repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
lmazuel/ansible | lib/ansible/modules/cloud/rackspace/rax_clb_ssl.py | 70 | 9636 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION='''
module: rax_clb_ssl
short_description: Manage SSL termination for a Rackspace Cloud Load Balancer.
description:
- Set up, reconfigure, or remove SSL termination for an existing load balancer.
version_added: "2.0"
options:
loadbalancer:
description:
- Name or ID of the load balancer on which to manage SSL termination.
required: true
state:
description:
- If set to "present", SSL termination will be added to this load balancer.
- If "absent", SSL termination will be removed instead.
choices:
- present
- absent
default: present
enabled:
description:
- If set to "false", temporarily disable SSL termination without discarding
- existing credentials.
default: true
private_key:
description:
- The private SSL key as a string in PEM format.
certificate:
description:
- The public SSL certificates as a string in PEM format.
intermediate_certificate:
description:
- One or more intermediate certificate authorities as a string in PEM
- format, concatenated into a single string.
secure_port:
description:
- The port to listen for secure traffic.
default: 443
secure_traffic_only:
description:
- If "true", the load balancer will *only* accept secure traffic.
default: false
https_redirect:
description:
- If "true", the load balancer will redirect HTTP traffic to HTTPS.
- Requires "secure_traffic_only" to be true. Incurs an implicit wait if SSL
- termination is also applied or removed.
wait:
description:
- Wait for the balancer to be in state "running" before turning.
default: false
wait_timeout:
description:
- How long before "wait" gives up, in seconds.
default: 300
author: Ash Wilson
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
- name: Enable SSL termination on a load balancer
rax_clb_ssl:
loadbalancer: the_loadbalancer
state: present
private_key: "{{ lookup('file', 'credentials/server.key' ) }}"
certificate: "{{ lookup('file', 'credentials/server.crt' ) }}"
intermediate_certificate: "{{ lookup('file', 'credentials/trust-chain.crt') }}"
secure_traffic_only: true
wait: true
- name: Disable SSL termination
rax_clb_ssl:
loadbalancer: "{{ registered_lb.balancer.id }}"
state: absent
wait: true
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def cloud_load_balancer_ssl(module, loadbalancer, state, enabled, private_key,
certificate, intermediate_certificate, secure_port,
secure_traffic_only, https_redirect,
wait, wait_timeout):
# Validate arguments.
if state == 'present':
if not private_key:
module.fail_json(msg="private_key must be provided.")
else:
private_key = private_key.strip()
if not certificate:
module.fail_json(msg="certificate must be provided.")
else:
certificate = certificate.strip()
attempts = wait_timeout / 5
# Locate the load balancer.
balancer = rax_find_loadbalancer(module, pyrax, loadbalancer)
existing_ssl = balancer.get_ssl_termination()
changed = False
if state == 'present':
# Apply or reconfigure SSL termination on the load balancer.
ssl_attrs = dict(
securePort=secure_port,
privatekey=private_key,
certificate=certificate,
intermediateCertificate=intermediate_certificate,
enabled=enabled,
secureTrafficOnly=secure_traffic_only
)
needs_change = False
if existing_ssl:
for ssl_attr, value in ssl_attrs.items():
if ssl_attr == 'privatekey':
# The private key is not included in get_ssl_termination's
# output (as it shouldn't be). Also, if you're changing the
# private key, you'll also be changing the certificate,
# so we don't lose anything by not checking it.
continue
if value is not None and existing_ssl.get(ssl_attr) != value:
# module.fail_json(msg='Unnecessary change', attr=ssl_attr, value=value, existing=existing_ssl.get(ssl_attr))
needs_change = True
else:
needs_change = True
if needs_change:
try:
balancer.add_ssl_termination(**ssl_attrs)
except pyrax.exceptions.PyraxException as e:
module.fail_json(msg='%s' % e.message)
changed = True
elif state == 'absent':
# Remove SSL termination if it's already configured.
if existing_ssl:
try:
balancer.delete_ssl_termination()
except pyrax.exceptions.PyraxException as e:
module.fail_json(msg='%s' % e.message)
changed = True
if https_redirect is not None and balancer.httpsRedirect != https_redirect:
if changed:
# This wait is unavoidable because load balancers are immutable
# while the SSL termination changes above are being applied.
pyrax.utils.wait_for_build(balancer, interval=5, attempts=attempts)
try:
balancer.update(httpsRedirect=https_redirect)
except pyrax.exceptions.PyraxException as e:
module.fail_json(msg='%s' % e.message)
changed = True
if changed and wait:
pyrax.utils.wait_for_build(balancer, interval=5, attempts=attempts)
balancer.get()
new_ssl_termination = balancer.get_ssl_termination()
# Intentionally omit the private key from the module output, so you don't
# accidentally echo it with `ansible-playbook -v` or `debug`, and the
# certificate, which is just long. Convert other attributes to snake_case
# and include https_redirect at the top-level.
if new_ssl_termination:
new_ssl = dict(
enabled=new_ssl_termination['enabled'],
secure_port=new_ssl_termination['securePort'],
secure_traffic_only=new_ssl_termination['secureTrafficOnly']
)
else:
new_ssl = None
result = dict(
changed=changed,
https_redirect=balancer.httpsRedirect,
ssl_termination=new_ssl,
balancer=rax_to_dict(balancer, 'clb')
)
success = True
if balancer.status == 'ERROR':
result['msg'] = '%s failed to build' % balancer.id
success = False
elif wait and balancer.status not in ('ACTIVE', 'ERROR'):
result['msg'] = 'Timeout waiting on %s' % balancer.id
success = False
if success:
module.exit_json(**result)
else:
module.fail_json(**result)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(dict(
loadbalancer=dict(required=True),
state=dict(default='present', choices=['present', 'absent']),
enabled=dict(type='bool', default=True),
private_key=dict(),
certificate=dict(),
intermediate_certificate=dict(),
secure_port=dict(type='int', default=443),
secure_traffic_only=dict(type='bool', default=False),
https_redirect=dict(type='bool'),
wait=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=300)
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module.')
loadbalancer = module.params.get('loadbalancer')
state = module.params.get('state')
enabled = module.boolean(module.params.get('enabled'))
private_key = module.params.get('private_key')
certificate = module.params.get('certificate')
intermediate_certificate = module.params.get('intermediate_certificate')
secure_port = module.params.get('secure_port')
secure_traffic_only = module.boolean(module.params.get('secure_traffic_only'))
https_redirect = module.boolean(module.params.get('https_redirect'))
wait = module.boolean(module.params.get('wait'))
wait_timeout = module.params.get('wait_timeout')
setup_rax_module(module, pyrax)
cloud_load_balancer_ssl(
module, loadbalancer, state, enabled, private_key, certificate,
intermediate_certificate, secure_port, secure_traffic_only,
https_redirect, wait, wait_timeout
)
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
if __name__ == '__main__':
main()
| gpl-3.0 |
jolyonb/edx-platform | cms/djangoapps/contentstore/management/commands/tests/test_fix_not_found.py | 87 | 2147 | """
Tests for the fix_not_found management command
"""
from django.core.management import CommandError, call_command
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
class TestFixNotFound(ModuleStoreTestCase):
"""
Tests for the fix_not_found management command
"""
def test_no_args(self):
"""
Test fix_not_found command with no arguments
"""
with self.assertRaisesRegexp(CommandError, "Error: too few arguments"):
call_command('fix_not_found')
def test_fix_not_found_non_split(self):
"""
The management command doesn't work on non split courses
"""
course = CourseFactory.create(default_store=ModuleStoreEnum.Type.mongo)
with self.assertRaisesRegexp(CommandError, "The owning modulestore does not support this command."):
call_command("fix_not_found", unicode(course.id))
def test_fix_not_found(self):
course = CourseFactory.create(default_store=ModuleStoreEnum.Type.split)
ItemFactory.create(category='chapter', parent_location=course.location)
# get course again in order to update its children list
course = self.store.get_course(course.id)
# create a dangling usage key that we'll add to the course's children list
dangling_pointer = course.id.make_usage_key('chapter', 'DanglingPointer')
course.children.append(dangling_pointer)
self.store.update_item(course, self.user.id)
# the course block should now point to two children, one of which
# doesn't actually exist
self.assertEqual(len(course.children), 2)
self.assertIn(dangling_pointer, course.children)
call_command("fix_not_found", unicode(course.id))
# make sure the dangling pointer was removed from
# the course block's children
course = self.store.get_course(course.id)
self.assertEqual(len(course.children), 1)
self.assertNotIn(dangling_pointer, course.children)
| agpl-3.0 |
eayunstack/nova | nova/tests/console/test_console.py | 11 | 7615 | # Copyright (c) 2010 OpenStack Foundation
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests For Console proxy."""
from oslo.config import cfg
from nova.compute import rpcapi as compute_rpcapi
from nova.console import api as console_api
from nova.console import rpcapi as console_rpcapi
from nova import context
from nova import db
from nova import exception
from nova.openstack.common import importutils
from nova import test
CONF = cfg.CONF
CONF.import_opt('console_manager', 'nova.service')
CONF.import_opt('console_driver', 'nova.console.manager')
class ConsoleTestCase(test.TestCase):
"""Test case for console proxy manager."""
def setUp(self):
super(ConsoleTestCase, self).setUp()
self.flags(console_driver='nova.console.fake.FakeConsoleProxy',
stub_compute=True)
self.console = importutils.import_object(CONF.console_manager)
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.host = 'test_compute_host'
def _create_instance(self):
"""Create a test instance."""
inst = {}
inst['image_id'] = 1
inst['reservation_id'] = 'r-fakeres'
inst['user_id'] = self.user_id
inst['project_id'] = self.project_id
inst['instance_type_id'] = 1
inst['ami_launch_index'] = 0
return db.instance_create(self.context, inst)
def test_get_pool_for_instance_host(self):
pool = self.console._get_pool_for_instance_host(self.context,
self.host)
self.assertEqual(pool['compute_host'], self.host)
def test_get_pool_creates_new_pool_if_needed(self):
self.assertRaises(exception.NotFound,
db.console_pool_get_by_host_type,
self.context,
self.host,
self.console.host,
self.console.driver.console_type)
pool = self.console._get_pool_for_instance_host(self.context,
self.host)
pool2 = db.console_pool_get_by_host_type(self.context,
self.host,
self.console.host,
self.console.driver.console_type)
self.assertEqual(pool['id'], pool2['id'])
def test_get_pool_does_not_create_new_pool_if_exists(self):
pool_info = {'address': '127.0.0.1',
'username': 'test',
'password': '1234pass',
'host': self.console.host,
'console_type': self.console.driver.console_type,
'compute_host': 'sometesthostname'}
new_pool = db.console_pool_create(self.context, pool_info)
pool = self.console._get_pool_for_instance_host(self.context,
'sometesthostname')
self.assertEqual(pool['id'], new_pool['id'])
def test_add_console(self):
instance = self._create_instance()
self.console.add_console(self.context, instance['id'])
instance = db.instance_get(self.context, instance['id'])
pool = db.console_pool_get_by_host_type(self.context,
instance['host'], self.console.host,
self.console.driver.console_type)
console_instances = [con['instance_uuid'] for con in pool['consoles']]
self.assertIn(instance['uuid'], console_instances)
db.instance_destroy(self.context, instance['uuid'])
def test_add_console_does_not_duplicate(self):
instance = self._create_instance()
cons1 = self.console.add_console(self.context, instance['id'])
cons2 = self.console.add_console(self.context, instance['id'])
self.assertEqual(cons1, cons2)
db.instance_destroy(self.context, instance['uuid'])
def test_remove_console(self):
instance = self._create_instance()
console_id = self.console.add_console(self.context, instance['id'])
self.console.remove_console(self.context, console_id)
self.assertRaises(exception.NotFound,
db.console_get,
self.context,
console_id)
db.instance_destroy(self.context, instance['uuid'])
class ConsoleAPITestCase(test.TestCase):
"""Test case for console API."""
def setUp(self):
super(ConsoleAPITestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake')
self.console_api = console_api.API()
self.fake_uuid = '00000000-aaaa-bbbb-cccc-000000000000'
self.fake_instance = {
'id': 1,
'uuid': self.fake_uuid,
'host': 'fake_host'
}
self.fake_console = {
'pool': {'host': 'fake_host'},
'id': 'fake_id'
}
def _fake_db_console_get(_ctxt, _console_uuid, _instance_uuid):
return self.fake_console
self.stubs.Set(db, 'console_get', _fake_db_console_get)
def _fake_db_console_get_all_by_instance(_ctxt, _instance_uuid,
columns_to_join):
return [self.fake_console]
self.stubs.Set(db, 'console_get_all_by_instance',
_fake_db_console_get_all_by_instance)
def _fake_instance_get_by_uuid(_ctxt, _instance_uuid):
return self.fake_instance
self.stubs.Set(db, 'instance_get_by_uuid', _fake_instance_get_by_uuid)
def test_get_consoles(self):
console = self.console_api.get_consoles(self.context, self.fake_uuid)
self.assertEqual(console, [self.fake_console])
def test_get_console(self):
console = self.console_api.get_console(self.context, self.fake_uuid,
'fake_id')
self.assertEqual(console, self.fake_console)
def test_delete_console(self):
self.mox.StubOutWithMock(console_rpcapi.ConsoleAPI, 'remove_console')
console_rpcapi.ConsoleAPI.remove_console(self.context, 'fake_id')
self.mox.ReplayAll()
self.console_api.delete_console(self.context, self.fake_uuid,
'fake_id')
def test_create_console(self):
self.mox.StubOutWithMock(compute_rpcapi.ComputeAPI,
'get_console_topic')
compute_rpcapi.ComputeAPI.get_console_topic(
self.context, 'fake_host').AndReturn('compute.fake_host')
self.mox.StubOutClassWithMocks(console_rpcapi, 'ConsoleAPI')
console_api_mock = console_rpcapi.ConsoleAPI(
topic='compute', server='fake_host')
console_api_mock.add_console(self.context,
self.fake_instance['id'])
self.mox.ReplayAll()
self.console_api.create_console(self.context, self.fake_uuid)
| apache-2.0 |
EraYaN/CouchPotatoServer | libs/pyutil/mathutil.py | 92 | 2257 | # Copyright (c) 2005-2010 Zooko Wilcox-O'Hearn
# This file is part of pyutil; see README.rst for licensing terms.
"""
A few commonly needed functions.
"""
import math
def div_ceil(n, d):
"""
The smallest integer k such that k*d >= n.
"""
return int((n//d) + (n%d != 0))
def next_multiple(n, k):
"""
The smallest multiple of k which is >= n. Note that if n is 0 then the
answer is 0.
"""
return div_ceil(n, k) * k
def pad_size(n, k):
"""
The smallest number that has to be added to n to equal a multiple of k.
"""
if n%k:
return k - n%k
else:
return 0
def is_power_of_k(n, k):
return k**int(math.log(n, k) + 0.5) == n
def next_power_of_k(n, k):
p = 1
while p < n:
p *= k
return p
def ave(l):
return sum(l) / len(l)
def log_ceil(n, b):
"""
The smallest integer k such that b^k >= n.
log_ceil(n, 2) is the number of bits needed to store any of n values, e.g.
the number of bits needed to store any of 128 possible values is 7.
"""
p = 1
k = 0
while p < n:
p *= b
k += 1
return k
def log_floor(n, b):
"""
The largest integer k such that b^k <= n.
"""
p = 1
k = 0
while p <= n:
p *= b
k += 1
return k - 1
def linear_fit_slope(ps):
"""
Single-independent-variable linear regression -- least squares method.
At least, I *think* this function computes that answer. I no longer
remember where I learned this trick and at the moment I can't prove to
myself that this is correct.
@param ps a sequence of tuples of (x, y)
"""
avex = ave([x for (x, y) in ps])
avey = ave([y for (x, y) in ps])
sxy = sum([ (x - avex) * (y - avey) for (x, y) in ps ])
sxx = sum([ (x - avex) ** 2 for (x, y) in ps ])
if sxx == 0:
return None
return sxy / sxx
def permute(l):
"""
Return all possible permutations of l.
@type l: sequence
@rtype a set of sequences
"""
if len(l) == 1:
return [l,]
res = []
for i in range(len(l)):
l2 = list(l[:])
x = l2.pop(i)
for l3 in permute(l2):
l3.append(x)
res.append(l3)
return res
| gpl-3.0 |
agry/NGECore2 | scripts/mobiles/endor/deranged_ravisher.py | 2 | 1387 | import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('squill_deranged_ravisher')
mobileTemplate.setLevel(35)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("squill")
mobileTemplate.setAssistRange(12)
mobileTemplate.setStalker(True)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_squill.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('deranged_ravisher', mobileTemplate)
return | lgpl-3.0 |
kevinmel2000/sl4a | python/src/Lib/encodings/shift_jis_2004.py | 816 | 1059 | #
# shift_jis_2004.py: Python Unicode Codec for SHIFT_JIS_2004
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_jp, codecs
import _multibytecodec as mbc
codec = _codecs_jp.getcodec('shift_jis_2004')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='shift_jis_2004',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| apache-2.0 |
jlspyaozhongkai/Uter | third_party_backup/Python-2.7.9/Lib/plat-mac/bundlebuilder.py | 40 | 33754 | #! /usr/bin/env python
"""\
bundlebuilder.py -- Tools to assemble MacOS X (application) bundles.
This module contains two classes to build so called "bundles" for
MacOS X. BundleBuilder is a general tool, AppBuilder is a subclass
specialized in building application bundles.
[Bundle|App]Builder objects are instantiated with a bunch of keyword
arguments, and have a build() method that will do all the work. See
the class doc strings for a description of the constructor arguments.
The module contains a main program that can be used in two ways:
% python bundlebuilder.py [options] build
% python buildapp.py [options] build
Where "buildapp.py" is a user-supplied setup.py-like script following
this model:
from bundlebuilder import buildapp
buildapp(<lots-of-keyword-args>)
"""
__all__ = ["BundleBuilder", "BundleBuilderError", "AppBuilder", "buildapp"]
from warnings import warnpy3k
warnpy3k("In 3.x, the bundlebuilder module is removed.", stacklevel=2)
import sys
import os, errno, shutil
import imp, marshal
import re
from copy import deepcopy
import getopt
from plistlib import Plist
from types import FunctionType as function
class BundleBuilderError(Exception): pass
class Defaults:
"""Class attributes that don't start with an underscore and are
not functions or classmethods are (deep)copied to self.__dict__.
This allows for mutable default values.
"""
def __init__(self, **kwargs):
defaults = self._getDefaults()
defaults.update(kwargs)
self.__dict__.update(defaults)
def _getDefaults(cls):
defaults = {}
for base in cls.__bases__:
if hasattr(base, "_getDefaults"):
defaults.update(base._getDefaults())
for name, value in cls.__dict__.items():
if name[0] != "_" and not isinstance(value,
(function, classmethod)):
defaults[name] = deepcopy(value)
return defaults
_getDefaults = classmethod(_getDefaults)
class BundleBuilder(Defaults):
"""BundleBuilder is a barebones class for assembling bundles. It
knows nothing about executables or icons, it only copies files
and creates the PkgInfo and Info.plist files.
"""
# (Note that Defaults.__init__ (deep)copies these values to
# instance variables. Mutable defaults are therefore safe.)
# Name of the bundle, with or without extension.
name = None
# The property list ("plist")
plist = Plist(CFBundleDevelopmentRegion = "English",
CFBundleInfoDictionaryVersion = "6.0")
# The type of the bundle.
type = "BNDL"
# The creator code of the bundle.
creator = None
# the CFBundleIdentifier (this is used for the preferences file name)
bundle_id = None
# List of files that have to be copied to <bundle>/Contents/Resources.
resources = []
# List of (src, dest) tuples; dest should be a path relative to the bundle
# (eg. "Contents/Resources/MyStuff/SomeFile.ext).
files = []
# List of shared libraries (dylibs, Frameworks) to bundle with the app
# will be placed in Contents/Frameworks
libs = []
# Directory where the bundle will be assembled.
builddir = "build"
# Make symlinks instead copying files. This is handy during debugging, but
# makes the bundle non-distributable.
symlink = 0
# Verbosity level.
verbosity = 1
# Destination root directory
destroot = ""
def setup(self):
# XXX rethink self.name munging, this is brittle.
self.name, ext = os.path.splitext(self.name)
if not ext:
ext = ".bundle"
bundleextension = ext
# misc (derived) attributes
self.bundlepath = pathjoin(self.builddir, self.name + bundleextension)
plist = self.plist
plist.CFBundleName = self.name
plist.CFBundlePackageType = self.type
if self.creator is None:
if hasattr(plist, "CFBundleSignature"):
self.creator = plist.CFBundleSignature
else:
self.creator = "????"
plist.CFBundleSignature = self.creator
if self.bundle_id:
plist.CFBundleIdentifier = self.bundle_id
elif not hasattr(plist, "CFBundleIdentifier"):
plist.CFBundleIdentifier = self.name
def build(self):
"""Build the bundle."""
builddir = self.builddir
if builddir and not os.path.exists(builddir):
os.mkdir(builddir)
self.message("Building %s" % repr(self.bundlepath), 1)
if os.path.exists(self.bundlepath):
shutil.rmtree(self.bundlepath)
if os.path.exists(self.bundlepath + '~'):
shutil.rmtree(self.bundlepath + '~')
bp = self.bundlepath
# Create the app bundle in a temporary location and then
# rename the completed bundle. This way the Finder will
# never see an incomplete bundle (where it might pick up
# and cache the wrong meta data)
self.bundlepath = bp + '~'
try:
os.mkdir(self.bundlepath)
self.preProcess()
self._copyFiles()
self._addMetaFiles()
self.postProcess()
os.rename(self.bundlepath, bp)
finally:
self.bundlepath = bp
self.message("Done.", 1)
def preProcess(self):
"""Hook for subclasses."""
pass
def postProcess(self):
"""Hook for subclasses."""
pass
def _addMetaFiles(self):
contents = pathjoin(self.bundlepath, "Contents")
makedirs(contents)
#
# Write Contents/PkgInfo
assert len(self.type) == len(self.creator) == 4, \
"type and creator must be 4-byte strings."
pkginfo = pathjoin(contents, "PkgInfo")
f = open(pkginfo, "wb")
f.write(self.type + self.creator)
f.close()
#
# Write Contents/Info.plist
infoplist = pathjoin(contents, "Info.plist")
self.plist.write(infoplist)
def _copyFiles(self):
files = self.files[:]
for path in self.resources:
files.append((path, pathjoin("Contents", "Resources",
os.path.basename(path))))
for path in self.libs:
files.append((path, pathjoin("Contents", "Frameworks",
os.path.basename(path))))
if self.symlink:
self.message("Making symbolic links", 1)
msg = "Making symlink from"
else:
self.message("Copying files", 1)
msg = "Copying"
files.sort()
for src, dst in files:
if os.path.isdir(src):
self.message("%s %s/ to %s/" % (msg, src, dst), 2)
else:
self.message("%s %s to %s" % (msg, src, dst), 2)
dst = pathjoin(self.bundlepath, dst)
if self.symlink:
symlink(src, dst, mkdirs=1)
else:
copy(src, dst, mkdirs=1)
def message(self, msg, level=0):
if level <= self.verbosity:
indent = ""
if level > 1:
indent = (level - 1) * " "
sys.stderr.write(indent + msg + "\n")
def report(self):
# XXX something decent
pass
if __debug__:
PYC_EXT = ".pyc"
else:
PYC_EXT = ".pyo"
MAGIC = imp.get_magic()
USE_ZIPIMPORT = "zipimport" in sys.builtin_module_names
# For standalone apps, we have our own minimal site.py. We don't need
# all the cruft of the real site.py.
SITE_PY = """\
import sys
if not %(semi_standalone)s:
del sys.path[1:] # sys.path[0] is Contents/Resources/
"""
ZIP_ARCHIVE = "Modules.zip"
SITE_PY_ZIP = SITE_PY + ("sys.path.append(sys.path[0] + '/%s')\n" % ZIP_ARCHIVE)
def getPycData(fullname, code, ispkg):
if ispkg:
fullname += ".__init__"
path = fullname.replace(".", os.sep) + PYC_EXT
return path, MAGIC + '\0\0\0\0' + marshal.dumps(code)
#
# Extension modules can't be in the modules zip archive, so a placeholder
# is added instead, that loads the extension from a specified location.
#
EXT_LOADER = """\
def __load():
import imp, sys, os
for p in sys.path:
path = os.path.join(p, "%(filename)s")
if os.path.exists(path):
break
else:
assert 0, "file not found: %(filename)s"
mod = imp.load_dynamic("%(name)s", path)
__load()
del __load
"""
MAYMISS_MODULES = ['os2', 'nt', 'ntpath', 'dos', 'dospath',
'win32api', 'ce', '_winreg', 'nturl2path', 'sitecustomize',
'org.python.core', 'riscos', 'riscosenviron', 'riscospath'
]
STRIP_EXEC = "/usr/bin/strip"
#
# We're using a stock interpreter to run the app, yet we need
# a way to pass the Python main program to the interpreter. The
# bootstrapping script fires up the interpreter with the right
# arguments. os.execve() is used as OSX doesn't like us to
# start a real new process. Also, the executable name must match
# the CFBundleExecutable value in the Info.plist, so we lie
# deliberately with argv[0]. The actual Python executable is
# passed in an environment variable so we can "repair"
# sys.executable later.
#
BOOTSTRAP_SCRIPT = """\
#!%(hashbang)s
import sys, os
execdir = os.path.dirname(sys.argv[0])
executable = os.path.join(execdir, "%(executable)s")
resdir = os.path.join(os.path.dirname(execdir), "Resources")
libdir = os.path.join(os.path.dirname(execdir), "Frameworks")
mainprogram = os.path.join(resdir, "%(mainprogram)s")
if %(optimize)s:
sys.argv.insert(1, '-O')
sys.argv.insert(1, mainprogram)
if %(standalone)s or %(semi_standalone)s:
os.environ["PYTHONPATH"] = resdir
if %(standalone)s:
os.environ["PYTHONHOME"] = resdir
else:
pypath = os.getenv("PYTHONPATH", "")
if pypath:
pypath = ":" + pypath
os.environ["PYTHONPATH"] = resdir + pypath
os.environ["PYTHONEXECUTABLE"] = executable
os.environ["DYLD_LIBRARY_PATH"] = libdir
os.environ["DYLD_FRAMEWORK_PATH"] = libdir
os.execve(executable, sys.argv, os.environ)
"""
#
# Optional wrapper that converts "dropped files" into sys.argv values.
#
ARGV_EMULATOR = """\
import argvemulator, os
argvemulator.ArgvCollector().mainloop()
execfile(os.path.join(os.path.split(__file__)[0], "%(realmainprogram)s"))
"""
#
# When building a standalone app with Python.framework, we need to copy
# a subset from Python.framework to the bundle. The following list
# specifies exactly what items we'll copy.
#
PYTHONFRAMEWORKGOODIES = [
"Python", # the Python core library
"Resources/English.lproj",
"Resources/Info.plist",
]
def isFramework():
return sys.exec_prefix.find("Python.framework") > 0
LIB = os.path.join(sys.prefix, "lib", "python" + sys.version[:3])
SITE_PACKAGES = os.path.join(LIB, "site-packages")
class AppBuilder(BundleBuilder):
use_zipimport = USE_ZIPIMPORT
# Override type of the bundle.
type = "APPL"
# platform, name of the subfolder of Contents that contains the executable.
platform = "MacOS"
# A Python main program. If this argument is given, the main
# executable in the bundle will be a small wrapper that invokes
# the main program. (XXX Discuss why.)
mainprogram = None
# The main executable. If a Python main program is specified
# the executable will be copied to Resources and be invoked
# by the wrapper program mentioned above. Otherwise it will
# simply be used as the main executable.
executable = None
# The name of the main nib, for Cocoa apps. *Must* be specified
# when building a Cocoa app.
nibname = None
# The name of the icon file to be copied to Resources and used for
# the Finder icon.
iconfile = None
# Symlink the executable instead of copying it.
symlink_exec = 0
# If True, build standalone app.
standalone = 0
# If True, build semi-standalone app (only includes third-party modules).
semi_standalone = 0
# If set, use this for #! lines in stead of sys.executable
python = None
# If True, add a real main program that emulates sys.argv before calling
# mainprogram
argv_emulation = 0
# The following attributes are only used when building a standalone app.
# Exclude these modules.
excludeModules = []
# Include these modules.
includeModules = []
# Include these packages.
includePackages = []
# Strip binaries from debug info.
strip = 0
# Found Python modules: [(name, codeobject, ispkg), ...]
pymodules = []
# Modules that modulefinder couldn't find:
missingModules = []
maybeMissingModules = []
def setup(self):
if ((self.standalone or self.semi_standalone)
and self.mainprogram is None):
raise BundleBuilderError, ("must specify 'mainprogram' when "
"building a standalone application.")
if self.mainprogram is None and self.executable is None:
raise BundleBuilderError, ("must specify either or both of "
"'executable' and 'mainprogram'")
self.execdir = pathjoin("Contents", self.platform)
if self.name is not None:
pass
elif self.mainprogram is not None:
self.name = os.path.splitext(os.path.basename(self.mainprogram))[0]
elif self.executable is not None:
self.name = os.path.splitext(os.path.basename(self.executable))[0]
if self.name[-4:] != ".app":
self.name += ".app"
if self.executable is None:
if not self.standalone and not isFramework():
self.symlink_exec = 1
if self.python:
self.executable = self.python
else:
self.executable = sys.executable
if self.nibname:
self.plist.NSMainNibFile = self.nibname
if not hasattr(self.plist, "NSPrincipalClass"):
self.plist.NSPrincipalClass = "NSApplication"
if self.standalone and isFramework():
self.addPythonFramework()
BundleBuilder.setup(self)
self.plist.CFBundleExecutable = self.name
if self.standalone or self.semi_standalone:
self.findDependencies()
def preProcess(self):
resdir = "Contents/Resources"
if self.executable is not None:
if self.mainprogram is None:
execname = self.name
else:
execname = os.path.basename(self.executable)
execpath = pathjoin(self.execdir, execname)
if not self.symlink_exec:
self.files.append((self.destroot + self.executable, execpath))
self.execpath = execpath
if self.mainprogram is not None:
mainprogram = os.path.basename(self.mainprogram)
self.files.append((self.mainprogram, pathjoin(resdir, mainprogram)))
if self.argv_emulation:
# Change the main program, and create the helper main program (which
# does argv collection and then calls the real main).
# Also update the included modules (if we're creating a standalone
# program) and the plist
realmainprogram = mainprogram
mainprogram = '__argvemulator_' + mainprogram
resdirpath = pathjoin(self.bundlepath, resdir)
mainprogrampath = pathjoin(resdirpath, mainprogram)
makedirs(resdirpath)
open(mainprogrampath, "w").write(ARGV_EMULATOR % locals())
if self.standalone or self.semi_standalone:
self.includeModules.append("argvemulator")
self.includeModules.append("os")
if "CFBundleDocumentTypes" not in self.plist:
self.plist["CFBundleDocumentTypes"] = [
{ "CFBundleTypeOSTypes" : [
"****",
"fold",
"disk"],
"CFBundleTypeRole": "Viewer"}]
# Write bootstrap script
executable = os.path.basename(self.executable)
execdir = pathjoin(self.bundlepath, self.execdir)
bootstrappath = pathjoin(execdir, self.name)
makedirs(execdir)
if self.standalone or self.semi_standalone:
# XXX we're screwed when the end user has deleted
# /usr/bin/python
hashbang = "/usr/bin/python"
elif self.python:
hashbang = self.python
else:
hashbang = os.path.realpath(sys.executable)
standalone = self.standalone
semi_standalone = self.semi_standalone
optimize = sys.flags.optimize
open(bootstrappath, "w").write(BOOTSTRAP_SCRIPT % locals())
os.chmod(bootstrappath, 0775)
if self.iconfile is not None:
iconbase = os.path.basename(self.iconfile)
self.plist.CFBundleIconFile = iconbase
self.files.append((self.iconfile, pathjoin(resdir, iconbase)))
def postProcess(self):
if self.standalone or self.semi_standalone:
self.addPythonModules()
if self.strip and not self.symlink:
self.stripBinaries()
if self.symlink_exec and self.executable:
self.message("Symlinking executable %s to %s" % (self.executable,
self.execpath), 2)
dst = pathjoin(self.bundlepath, self.execpath)
makedirs(os.path.dirname(dst))
os.symlink(os.path.abspath(self.executable), dst)
if self.missingModules or self.maybeMissingModules:
self.reportMissing()
def addPythonFramework(self):
# If we're building a standalone app with Python.framework,
# include a minimal subset of Python.framework, *unless*
# Python.framework was specified manually in self.libs.
for lib in self.libs:
if os.path.basename(lib) == "Python.framework":
# a Python.framework was specified as a library
return
frameworkpath = sys.exec_prefix[:sys.exec_prefix.find(
"Python.framework") + len("Python.framework")]
version = sys.version[:3]
frameworkpath = pathjoin(frameworkpath, "Versions", version)
destbase = pathjoin("Contents", "Frameworks", "Python.framework",
"Versions", version)
for item in PYTHONFRAMEWORKGOODIES:
src = pathjoin(frameworkpath, item)
dst = pathjoin(destbase, item)
self.files.append((src, dst))
def _getSiteCode(self):
if self.use_zipimport:
return compile(SITE_PY % {"semi_standalone": self.semi_standalone},
"<-bundlebuilder.py->", "exec")
def addPythonModules(self):
self.message("Adding Python modules", 1)
if self.use_zipimport:
# Create a zip file containing all modules as pyc.
import zipfile
relpath = pathjoin("Contents", "Resources", ZIP_ARCHIVE)
abspath = pathjoin(self.bundlepath, relpath)
zf = zipfile.ZipFile(abspath, "w", zipfile.ZIP_DEFLATED)
for name, code, ispkg in self.pymodules:
self.message("Adding Python module %s" % name, 2)
path, pyc = getPycData(name, code, ispkg)
zf.writestr(path, pyc)
zf.close()
# add site.pyc
sitepath = pathjoin(self.bundlepath, "Contents", "Resources",
"site" + PYC_EXT)
writePyc(self._getSiteCode(), sitepath)
else:
# Create individual .pyc files.
for name, code, ispkg in self.pymodules:
if ispkg:
name += ".__init__"
path = name.split(".")
path = pathjoin("Contents", "Resources", *path) + PYC_EXT
if ispkg:
self.message("Adding Python package %s" % path, 2)
else:
self.message("Adding Python module %s" % path, 2)
abspath = pathjoin(self.bundlepath, path)
makedirs(os.path.dirname(abspath))
writePyc(code, abspath)
def stripBinaries(self):
if not os.path.exists(STRIP_EXEC):
self.message("Error: can't strip binaries: no strip program at "
"%s" % STRIP_EXEC, 0)
else:
import stat
self.message("Stripping binaries", 1)
def walk(top):
for name in os.listdir(top):
path = pathjoin(top, name)
if os.path.islink(path):
continue
if os.path.isdir(path):
walk(path)
else:
mod = os.stat(path)[stat.ST_MODE]
if not (mod & 0100):
continue
relpath = path[len(self.bundlepath):]
self.message("Stripping %s" % relpath, 2)
inf, outf = os.popen4("%s -S \"%s\"" %
(STRIP_EXEC, path))
output = outf.read().strip()
if output:
# usually not a real problem, like when we're
# trying to strip a script
self.message("Problem stripping %s:" % relpath, 3)
self.message(output, 3)
walk(self.bundlepath)
def findDependencies(self):
self.message("Finding module dependencies", 1)
import modulefinder
mf = modulefinder.ModuleFinder(excludes=self.excludeModules)
if self.use_zipimport:
# zipimport imports zlib, must add it manually
mf.import_hook("zlib")
# manually add our own site.py
site = mf.add_module("site")
site.__code__ = self._getSiteCode()
mf.scan_code(site.__code__, site)
# warnings.py gets imported implicitly from C
mf.import_hook("warnings")
includeModules = self.includeModules[:]
for name in self.includePackages:
includeModules.extend(findPackageContents(name).keys())
for name in includeModules:
try:
mf.import_hook(name)
except ImportError:
self.missingModules.append(name)
mf.run_script(self.mainprogram)
modules = mf.modules.items()
modules.sort()
for name, mod in modules:
path = mod.__file__
if path and self.semi_standalone:
# skip the standard library
if path.startswith(LIB) and not path.startswith(SITE_PACKAGES):
continue
if path and mod.__code__ is None:
# C extension
filename = os.path.basename(path)
pathitems = name.split(".")[:-1] + [filename]
dstpath = pathjoin(*pathitems)
if self.use_zipimport:
if name != "zlib":
# neatly pack all extension modules in a subdirectory,
# except zlib, since it's necessary for bootstrapping.
dstpath = pathjoin("ExtensionModules", dstpath)
# Python modules are stored in a Zip archive, but put
# extensions in Contents/Resources/. Add a tiny "loader"
# program in the Zip archive. Due to Thomas Heller.
source = EXT_LOADER % {"name": name, "filename": dstpath}
code = compile(source, "<dynloader for %s>" % name, "exec")
mod.__code__ = code
self.files.append((path, pathjoin("Contents", "Resources", dstpath)))
if mod.__code__ is not None:
ispkg = mod.__path__ is not None
if not self.use_zipimport or name != "site":
# Our site.py is doing the bootstrapping, so we must
# include a real .pyc file if self.use_zipimport is True.
self.pymodules.append((name, mod.__code__, ispkg))
if hasattr(mf, "any_missing_maybe"):
missing, maybe = mf.any_missing_maybe()
else:
missing = mf.any_missing()
maybe = []
self.missingModules.extend(missing)
self.maybeMissingModules.extend(maybe)
def reportMissing(self):
missing = [name for name in self.missingModules
if name not in MAYMISS_MODULES]
if self.maybeMissingModules:
maybe = self.maybeMissingModules
else:
maybe = [name for name in missing if "." in name]
missing = [name for name in missing if "." not in name]
missing.sort()
maybe.sort()
if maybe:
self.message("Warning: couldn't find the following submodules:", 1)
self.message(" (Note that these could be false alarms -- "
"it's not always", 1)
self.message(" possible to distinguish between \"from package "
"import submodule\" ", 1)
self.message(" and \"from package import name\")", 1)
for name in maybe:
self.message(" ? " + name, 1)
if missing:
self.message("Warning: couldn't find the following modules:", 1)
for name in missing:
self.message(" ? " + name, 1)
def report(self):
# XXX something decent
import pprint
pprint.pprint(self.__dict__)
if self.standalone or self.semi_standalone:
self.reportMissing()
#
# Utilities.
#
SUFFIXES = [_suf for _suf, _mode, _tp in imp.get_suffixes()]
identifierRE = re.compile(r"[_a-zA-z][_a-zA-Z0-9]*$")
def findPackageContents(name, searchpath=None):
head = name.split(".")[-1]
if identifierRE.match(head) is None:
return {}
try:
fp, path, (ext, mode, tp) = imp.find_module(head, searchpath)
except ImportError:
return {}
modules = {name: None}
if tp == imp.PKG_DIRECTORY and path:
files = os.listdir(path)
for sub in files:
sub, ext = os.path.splitext(sub)
fullname = name + "." + sub
if sub != "__init__" and fullname not in modules:
modules.update(findPackageContents(fullname, [path]))
return modules
def writePyc(code, path):
f = open(path, "wb")
f.write(MAGIC)
f.write("\0" * 4) # don't bother about a time stamp
marshal.dump(code, f)
f.close()
def copy(src, dst, mkdirs=0):
"""Copy a file or a directory."""
if mkdirs:
makedirs(os.path.dirname(dst))
if os.path.isdir(src):
shutil.copytree(src, dst, symlinks=1)
else:
shutil.copy2(src, dst)
def copytodir(src, dstdir):
"""Copy a file or a directory to an existing directory."""
dst = pathjoin(dstdir, os.path.basename(src))
copy(src, dst)
def makedirs(dir):
"""Make all directories leading up to 'dir' including the leaf
directory. Don't moan if any path element already exists."""
try:
os.makedirs(dir)
except OSError, why:
if why.errno != errno.EEXIST:
raise
def symlink(src, dst, mkdirs=0):
"""Copy a file or a directory."""
if not os.path.exists(src):
raise IOError, "No such file or directory: '%s'" % src
if mkdirs:
makedirs(os.path.dirname(dst))
os.symlink(os.path.abspath(src), dst)
def pathjoin(*args):
"""Safe wrapper for os.path.join: asserts that all but the first
argument are relative paths."""
for seg in args[1:]:
assert seg[0] != "/"
return os.path.join(*args)
cmdline_doc = """\
Usage:
python bundlebuilder.py [options] command
python mybuildscript.py [options] command
Commands:
build build the application
report print a report
Options:
-b, --builddir=DIR the build directory; defaults to "build"
-n, --name=NAME application name
-r, --resource=FILE extra file or folder to be copied to Resources
-f, --file=SRC:DST extra file or folder to be copied into the bundle;
DST must be a path relative to the bundle root
-e, --executable=FILE the executable to be used
-m, --mainprogram=FILE the Python main program
-a, --argv add a wrapper main program to create sys.argv
-p, --plist=FILE .plist file (default: generate one)
--nib=NAME main nib name
-c, --creator=CCCC 4-char creator code (default: '????')
--iconfile=FILE filename of the icon (an .icns file) to be used
as the Finder icon
--bundle-id=ID the CFBundleIdentifier, in reverse-dns format
(eg. org.python.BuildApplet; this is used for
the preferences file name)
-l, --link symlink files/folder instead of copying them
--link-exec symlink the executable instead of copying it
--standalone build a standalone application, which is fully
independent of a Python installation
--semi-standalone build a standalone application, which depends on
an installed Python, yet includes all third-party
modules.
--no-zipimport Do not copy code into a zip file
--python=FILE Python to use in #! line in stead of current Python
--lib=FILE shared library or framework to be copied into
the bundle
-x, --exclude=MODULE exclude module (with --(semi-)standalone)
-i, --include=MODULE include module (with --(semi-)standalone)
--package=PACKAGE include a whole package (with --(semi-)standalone)
--strip strip binaries (remove debug info)
-v, --verbose increase verbosity level
-q, --quiet decrease verbosity level
-h, --help print this message
"""
def usage(msg=None):
if msg:
print msg
print cmdline_doc
sys.exit(1)
def main(builder=None):
if builder is None:
builder = AppBuilder(verbosity=1)
shortopts = "b:n:r:f:e:m:c:p:lx:i:hvqa"
longopts = ("builddir=", "name=", "resource=", "file=", "executable=",
"mainprogram=", "creator=", "nib=", "plist=", "link",
"link-exec", "help", "verbose", "quiet", "argv", "standalone",
"exclude=", "include=", "package=", "strip", "iconfile=",
"lib=", "python=", "semi-standalone", "bundle-id=", "destroot="
"no-zipimport"
)
try:
options, args = getopt.getopt(sys.argv[1:], shortopts, longopts)
except getopt.error:
usage()
for opt, arg in options:
if opt in ('-b', '--builddir'):
builder.builddir = arg
elif opt in ('-n', '--name'):
builder.name = arg
elif opt in ('-r', '--resource'):
builder.resources.append(os.path.normpath(arg))
elif opt in ('-f', '--file'):
srcdst = arg.split(':')
if len(srcdst) != 2:
usage("-f or --file argument must be two paths, "
"separated by a colon")
builder.files.append(srcdst)
elif opt in ('-e', '--executable'):
builder.executable = arg
elif opt in ('-m', '--mainprogram'):
builder.mainprogram = arg
elif opt in ('-a', '--argv'):
builder.argv_emulation = 1
elif opt in ('-c', '--creator'):
builder.creator = arg
elif opt == '--bundle-id':
builder.bundle_id = arg
elif opt == '--iconfile':
builder.iconfile = arg
elif opt == "--lib":
builder.libs.append(os.path.normpath(arg))
elif opt == "--nib":
builder.nibname = arg
elif opt in ('-p', '--plist'):
builder.plist = Plist.fromFile(arg)
elif opt in ('-l', '--link'):
builder.symlink = 1
elif opt == '--link-exec':
builder.symlink_exec = 1
elif opt in ('-h', '--help'):
usage()
elif opt in ('-v', '--verbose'):
builder.verbosity += 1
elif opt in ('-q', '--quiet'):
builder.verbosity -= 1
elif opt == '--standalone':
builder.standalone = 1
elif opt == '--semi-standalone':
builder.semi_standalone = 1
elif opt == '--python':
builder.python = arg
elif opt in ('-x', '--exclude'):
builder.excludeModules.append(arg)
elif opt in ('-i', '--include'):
builder.includeModules.append(arg)
elif opt == '--package':
builder.includePackages.append(arg)
elif opt == '--strip':
builder.strip = 1
elif opt == '--destroot':
builder.destroot = arg
elif opt == '--no-zipimport':
builder.use_zipimport = False
if len(args) != 1:
usage("Must specify one command ('build', 'report' or 'help')")
command = args[0]
if command == "build":
builder.setup()
builder.build()
elif command == "report":
builder.setup()
builder.report()
elif command == "help":
usage()
else:
usage("Unknown command '%s'" % command)
def buildapp(**kwargs):
builder = AppBuilder(**kwargs)
main(builder)
if __name__ == "__main__":
main()
| gpl-3.0 |
neerja28/Tempest | tempest/api/identity/admin/v3/test_tokens.py | 10 | 6798 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
from tempest_lib import exceptions as lib_exc
from tempest.api.identity import base
from tempest import test
class TokensV3TestJSON(base.BaseIdentityV3AdminTest):
@test.idempotent_id('0f9f5a5f-d5cd-4a86-8a5b-c5ded151f212')
def test_tokens(self):
# Valid user's token is authenticated
# Create a User
u_name = data_utils.rand_name('user')
u_desc = '%s-description' % u_name
u_email = '%s@testmail.tm' % u_name
u_password = data_utils.rand_name('pass')
user = self.client.create_user(
u_name, description=u_desc, password=u_password,
email=u_email)
self.addCleanup(self.client.delete_user, user['id'])
# Perform Authentication
resp = self.token.auth(user_id=user['id'],
password=u_password).response
subject_token = resp['x-subject-token']
# Perform GET Token
token_details = self.client.get_token(subject_token)
self.assertEqual(resp['x-subject-token'], subject_token)
self.assertEqual(token_details['user']['id'], user['id'])
self.assertEqual(token_details['user']['name'], u_name)
# Perform Delete Token
self.client.delete_token(subject_token)
self.assertRaises(lib_exc.NotFound, self.client.get_token,
subject_token)
@test.idempotent_id('565fa210-1da1-4563-999b-f7b5b67cf112')
def test_rescope_token(self):
"""Rescope a token.
An unscoped token can be requested, that token can be used to request a
scoped token. The scoped token can be revoked, and the original token
used to get a token in a different project.
"""
# Create a user.
user_name = data_utils.rand_name(name='user')
user_password = data_utils.rand_name(name='pass')
user = self.client.create_user(user_name, password=user_password)
self.addCleanup(self.client.delete_user, user['id'])
# Create a couple projects
project1_name = data_utils.rand_name(name='project')
project1 = self.client.create_project(project1_name)
self.addCleanup(self.client.delete_project, project1['id'])
project2_name = data_utils.rand_name(name='project')
project2 = self.client.create_project(project2_name)
self.addCleanup(self.client.delete_project, project2['id'])
# Create a role
role_name = data_utils.rand_name(name='role')
role = self.client.create_role(role_name)
self.addCleanup(self.client.delete_role, role['id'])
# Grant the user the role on both projects.
self.client.assign_user_role(project1['id'], user['id'],
role['id'])
self.client.assign_user_role(project2['id'], user['id'],
role['id'])
# Get an unscoped token.
token_auth = self.token.auth(user_id=user['id'],
password=user_password)
token_id = token_auth.response['x-subject-token']
orig_expires_at = token_auth['token']['expires_at']
orig_issued_at = token_auth['token']['issued_at']
orig_user = token_auth['token']['user']
self.assertIsInstance(token_auth['token']['expires_at'], unicode)
self.assertIsInstance(token_auth['token']['issued_at'], unicode)
self.assertEqual(['password'], token_auth['token']['methods'])
self.assertEqual(user['id'], token_auth['token']['user']['id'])
self.assertEqual(user['name'], token_auth['token']['user']['name'])
self.assertEqual('default',
token_auth['token']['user']['domain']['id'])
self.assertEqual('Default',
token_auth['token']['user']['domain']['name'])
self.assertNotIn('catalog', token_auth['token'])
self.assertNotIn('project', token_auth['token'])
self.assertNotIn('roles', token_auth['token'])
# Use the unscoped token to get a scoped token.
token_auth = self.token.auth(token=token_id,
project_name=project1_name,
project_domain_name='Default')
token1_id = token_auth.response['x-subject-token']
self.assertEqual(orig_expires_at, token_auth['token']['expires_at'],
'Expiration time should match original token')
self.assertIsInstance(token_auth['token']['issued_at'], unicode)
self.assertNotEqual(orig_issued_at, token_auth['token']['issued_at'])
self.assertEqual(set(['password', 'token']),
set(token_auth['token']['methods']))
self.assertEqual(orig_user, token_auth['token']['user'],
'User should match original token')
self.assertIsInstance(token_auth['token']['catalog'], list)
self.assertEqual(project1['id'],
token_auth['token']['project']['id'])
self.assertEqual(project1['name'],
token_auth['token']['project']['name'])
self.assertEqual('default',
token_auth['token']['project']['domain']['id'])
self.assertEqual('Default',
token_auth['token']['project']['domain']['name'])
self.assertEqual(1, len(token_auth['token']['roles']))
self.assertEqual(role['id'], token_auth['token']['roles'][0]['id'])
self.assertEqual(role['name'], token_auth['token']['roles'][0]['name'])
# Revoke the unscoped token.
self.client.delete_token(token1_id)
# Now get another scoped token using the unscoped token.
token_auth = self.token.auth(token=token_id,
project_name=project2_name,
project_domain_name='Default')
self.assertEqual(project2['id'],
token_auth['token']['project']['id'])
self.assertEqual(project2['name'],
token_auth['token']['project']['name'])
| apache-2.0 |
rouge8/moto | tests/test_ec2/test_vpc_peering.py | 23 | 3117 | from __future__ import unicode_literals
# Ensure 'assert_raises' context manager support for Python 2.6
import tests.backport_assert_raises
from nose.tools import assert_raises
import boto
from boto.exception import EC2ResponseError
import sure # noqa
from moto import mock_ec2
from tests.helpers import requires_boto_gte
@requires_boto_gte("2.32.0")
@mock_ec2
def test_vpc_peering_connections():
conn = boto.connect_vpc('the_key', 'the_secret')
vpc = conn.create_vpc("10.0.0.0/16")
peer_vpc = conn.create_vpc("11.0.0.0/16")
vpc_pcx = conn.create_vpc_peering_connection(vpc.id, peer_vpc.id)
vpc_pcx._status.code.should.equal('initiating-request')
return vpc_pcx
@requires_boto_gte("2.32.0")
@mock_ec2
def test_vpc_peering_connections_get_all():
conn = boto.connect_vpc('the_key', 'the_secret')
vpc_pcx = test_vpc_peering_connections()
vpc_pcx._status.code.should.equal('initiating-request')
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
all_vpc_pcxs.should.have.length_of(1)
all_vpc_pcxs[0]._status.code.should.equal('pending-acceptance')
@requires_boto_gte("2.32.0")
@mock_ec2
def test_vpc_peering_connections_accept():
conn = boto.connect_vpc('the_key', 'the_secret')
vpc_pcx = test_vpc_peering_connections()
vpc_pcx = conn.accept_vpc_peering_connection(vpc_pcx.id)
vpc_pcx._status.code.should.equal('active')
with assert_raises(EC2ResponseError) as cm:
conn.reject_vpc_peering_connection(vpc_pcx.id)
cm.exception.code.should.equal('InvalidStateTransition')
cm.exception.status.should.equal(400)
cm.exception.request_id.should_not.be.none
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
all_vpc_pcxs.should.have.length_of(1)
all_vpc_pcxs[0]._status.code.should.equal('active')
@requires_boto_gte("2.32.0")
@mock_ec2
def test_vpc_peering_connections_reject():
conn = boto.connect_vpc('the_key', 'the_secret')
vpc_pcx = test_vpc_peering_connections()
verdict = conn.reject_vpc_peering_connection(vpc_pcx.id)
verdict.should.equal(True)
with assert_raises(EC2ResponseError) as cm:
conn.accept_vpc_peering_connection(vpc_pcx.id)
cm.exception.code.should.equal('InvalidStateTransition')
cm.exception.status.should.equal(400)
cm.exception.request_id.should_not.be.none
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
all_vpc_pcxs.should.have.length_of(1)
all_vpc_pcxs[0]._status.code.should.equal('rejected')
@requires_boto_gte("2.32.1")
@mock_ec2
def test_vpc_peering_connections_delete():
conn = boto.connect_vpc('the_key', 'the_secret')
vpc_pcx = test_vpc_peering_connections()
verdict = vpc_pcx.delete()
verdict.should.equal(True)
all_vpc_pcxs = conn.get_all_vpc_peering_connections()
all_vpc_pcxs.should.have.length_of(0)
with assert_raises(EC2ResponseError) as cm:
conn.delete_vpc_peering_connection("pcx-1234abcd")
cm.exception.code.should.equal('InvalidVpcPeeringConnectionId.NotFound')
cm.exception.status.should.equal(400)
cm.exception.request_id.should_not.be.none
| apache-2.0 |
jamesmunns/wate_backend | prototyping/create_schema.py | 1 | 1046 | import psycopg2
import getpass
username = getpass.getuser()
password = getpass.getpass("Database password for {}: ".format(username))
database = "wate"
def create_user_table(cursor):
user_schema = """
CREATE TABLE users (
id serial PRIMARY KEY,
name text NOT NULL,
username text NOT NULL,
email citext UNIQUE NOT NULL,
joindate date NOT NULL,
passhash character (60),
use_metric_units boolean,
emails_disabled boolean
);
"""
cursor.execute(user_schema)
def create_weight_table(cursor):
weight_schema = """
CREATE TABLE weights (
user_id integer REFERENCES users(id) NOT NULL,
weight_lbs numeric CHECK (weight_lbs > 0) NOT NULL,
measure_date date NOT NULL,
measure_time time);
"""
cursor.execute(weight_schema)
with psycopg2.connect(dbname=database, user=username, password=password) as conn:
with conn.cursor() as cur:
create_user_table(cur)
create_weight_table(cur)
| mit |
SrNetoChan/Quantum-GIS | python/plugins/db_manager/info_viewer.py | 67 | 5508 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS
Date : May 23, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : brush.tyler@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from builtins import str
from qgis.PyQt.QtCore import Qt
from qgis.PyQt.QtWidgets import QTextBrowser, QApplication
from qgis.utils import OverrideCursor
from .db_plugins.plugin import BaseError, DbError, DBPlugin, Schema, Table
from .dlg_db_error import DlgDbError
class InfoViewer(QTextBrowser):
def __init__(self, parent=None):
QTextBrowser.__init__(self, parent)
self.setOpenLinks(False)
self.item = None
self.dirty = False
self._clear()
self._showPluginInfo()
self.anchorClicked.connect(self._linkClicked)
def _linkClicked(self, url):
if self.item is None:
return
if url.scheme() == "action":
with OverrideCursor(Qt.WaitCursor):
try:
if self.item.runAction(url.path()):
self.refresh()
except BaseError as e:
DlgDbError.showError(e, self)
def refresh(self):
self.setDirty(True)
self.showInfo(self.item)
def showInfo(self, item):
if item == self.item and not self.dirty:
return
self._clear()
if item is None:
return
if isinstance(item, DBPlugin):
self._showDatabaseInfo(item)
elif isinstance(item, Schema):
self._showSchemaInfo(item)
elif isinstance(item, Table):
self._showTableInfo(item)
else:
return
self.item = item
item.aboutToChange.connect(self.setDirty)
def setDirty(self, val=True):
self.dirty = val
def _clear(self):
if self.item is not None:
# skip exception on RuntimeError fixes #6892
try:
self.item.aboutToChange.disconnect(self.setDirty)
except RuntimeError:
pass
self.item = None
self.dirty = False
self.item = None
self.setHtml("")
def _showPluginInfo(self):
from .db_plugins import getDbPluginErrors
html = u'<div style="background-color:#ffffcc;"><h1> ' + self.tr("DB Manager") + '</h1></div>'
html += '<div style="margin-left:8px;">'
for msg in getDbPluginErrors():
html += u"<p>%s" % msg
self.setHtml(html)
def _showDatabaseInfo(self, connection):
html = u'<div style="background-color:#ccffcc;"><h1> %s</h1></div>' % connection.connectionName()
html += '<div style="margin-left:8px;">'
try:
if connection.database() is None:
html += connection.info().toHtml()
else:
html += connection.database().info().toHtml()
except DbError as e:
html += u'<p style="color:red">%s</p>' % str(e).replace('\n', '<br>')
html += '</div>'
self.setHtml(html)
def _showSchemaInfo(self, schema):
html = u'<div style="background-color:#ffcccc;"><h1> %s</h1></div>' % schema.name
html += '<div style="margin-left:8px;">'
try:
html += schema.info().toHtml()
except DbError as e:
html += u'<p style="color:red">%s</p>' % str(e).replace('\n', '<br>')
html += "</div>"
self.setHtml(html)
def _showTableInfo(self, table):
html = u'<div style="background-color:#ccccff"><h1> %s</h1></div>' % table.name
html += '<div style="margin-left:8px;">'
try:
html += table.info().toHtml()
except DbError as e:
html += u'<p style="color:red">%s</p>' % str(e).replace('\n', '<br>')
html += '</div>'
self.setHtml(html)
return True
def setHtml(self, html):
# convert special tags :)
html = str(html).replace('<warning>', '<img src=":/db_manager/warning"> ')
# add default style
html = u"""
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<style type="text/css">
.section { margin-top: 25px; }
table.header th { background-color: #dddddd; }
table.header td { background-color: #f5f5f5; }
table.header th, table.header td { padding: 0px 10px; }
table td { padding-right: 20px; }
.underline { text-decoration:underline; }
</style>
</head>
<body>
%s <br>
</body>
</html>
""" % html
# print ">>>>>\n", html, "\n<<<<<<"
return QTextBrowser.setHtml(self, html)
| gpl-2.0 |
jpwhite3/wilmu-linux-toolkit | lab_toolkit/includes/requests/packages/chardet/sbcharsetprober.py | 2927 | 4793 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
from .compat import wrap_ord
SAMPLE_SIZE = 64
SB_ENOUGH_REL_THRESHOLD = 1024
POSITIVE_SHORTCUT_THRESHOLD = 0.95
NEGATIVE_SHORTCUT_THRESHOLD = 0.05
SYMBOL_CAT_ORDER = 250
NUMBER_OF_SEQ_CAT = 4
POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1
#NEGATIVE_CAT = 0
class SingleByteCharSetProber(CharSetProber):
def __init__(self, model, reversed=False, nameProber=None):
CharSetProber.__init__(self)
self._mModel = model
# TRUE if we need to reverse every pair in the model lookup
self._mReversed = reversed
# Optional auxiliary prober for name decision
self._mNameProber = nameProber
self.reset()
def reset(self):
CharSetProber.reset(self)
# char order of last character
self._mLastOrder = 255
self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT
self._mTotalSeqs = 0
self._mTotalChar = 0
# characters that fall in our sampling range
self._mFreqChar = 0
def get_charset_name(self):
if self._mNameProber:
return self._mNameProber.get_charset_name()
else:
return self._mModel['charsetName']
def feed(self, aBuf):
if not self._mModel['keepEnglishLetter']:
aBuf = self.filter_without_english_letters(aBuf)
aLen = len(aBuf)
if not aLen:
return self.get_state()
for c in aBuf:
order = self._mModel['charToOrderMap'][wrap_ord(c)]
if order < SYMBOL_CAT_ORDER:
self._mTotalChar += 1
if order < SAMPLE_SIZE:
self._mFreqChar += 1
if self._mLastOrder < SAMPLE_SIZE:
self._mTotalSeqs += 1
if not self._mReversed:
i = (self._mLastOrder * SAMPLE_SIZE) + order
model = self._mModel['precedenceMatrix'][i]
else: # reverse the order of the letters in the lookup
i = (order * SAMPLE_SIZE) + self._mLastOrder
model = self._mModel['precedenceMatrix'][i]
self._mSeqCounters[model] += 1
self._mLastOrder = order
if self.get_state() == constants.eDetecting:
if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:
cf = self.get_confidence()
if cf > POSITIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, we have a'
'winner\n' %
(self._mModel['charsetName'], cf))
self._mState = constants.eFoundIt
elif cf < NEGATIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, below negative'
'shortcut threshhold %s\n' %
(self._mModel['charsetName'], cf,
NEGATIVE_SHORTCUT_THRESHOLD))
self._mState = constants.eNotMe
return self.get_state()
def get_confidence(self):
r = 0.01
if self._mTotalSeqs > 0:
r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs
/ self._mModel['mTypicalPositiveRatio'])
r = r * self._mFreqChar / self._mTotalChar
if r >= 1.0:
r = 0.99
return r
| mit |
skycucumber/xuemc | python/venv/lib/python2.7/site-packages/migrate/tests/versioning/test_schemadiff.py | 78 | 6755 | # -*- coding: utf-8 -*-
import os
from sqlalchemy import *
from migrate.versioning import schemadiff
from migrate.tests import fixture
class SchemaDiffBase(fixture.DB):
level = fixture.DB.CONNECT
def _make_table(self,*cols,**kw):
self.table = Table('xtable', self.meta,
Column('id',Integer(), primary_key=True),
*cols
)
if kw.get('create',True):
self.table.create()
def _assert_diff(self,col_A,col_B):
self._make_table(col_A)
self.meta.clear()
self._make_table(col_B,create=False)
diff = self._run_diff()
# print diff
self.assertTrue(diff)
self.assertEqual(1,len(diff.tables_different))
td = list(diff.tables_different.values())[0]
self.assertEqual(1,len(td.columns_different))
cd = list(td.columns_different.values())[0]
label_width = max(len(self.name1), len(self.name2))
self.assertEqual(('Schema diffs:\n'
' table with differences: xtable\n'
' column with differences: data\n'
' %*s: %r\n'
' %*s: %r')%(
label_width,
self.name1,
cd.col_A,
label_width,
self.name2,
cd.col_B
),str(diff))
class Test_getDiffOfModelAgainstDatabase(SchemaDiffBase):
name1 = 'model'
name2 = 'database'
def _run_diff(self,**kw):
return schemadiff.getDiffOfModelAgainstDatabase(
self.meta, self.engine, **kw
)
@fixture.usedb()
def test_table_missing_in_db(self):
self._make_table(create=False)
diff = self._run_diff()
self.assertTrue(diff)
self.assertEqual('Schema diffs:\n tables missing from %s: xtable' % self.name2,
str(diff))
@fixture.usedb()
def test_table_missing_in_model(self):
self._make_table()
self.meta.clear()
diff = self._run_diff()
self.assertTrue(diff)
self.assertEqual('Schema diffs:\n tables missing from %s: xtable' % self.name1,
str(diff))
@fixture.usedb()
def test_column_missing_in_db(self):
# db
Table('xtable', self.meta,
Column('id',Integer(), primary_key=True),
).create()
self.meta.clear()
# model
self._make_table(
Column('xcol',Integer()),
create=False
)
# run diff
diff = self._run_diff()
self.assertTrue(diff)
self.assertEqual('Schema diffs:\n'
' table with differences: xtable\n'
' %s missing these columns: xcol' % self.name2,
str(diff))
@fixture.usedb()
def test_column_missing_in_model(self):
# db
self._make_table(
Column('xcol',Integer()),
)
self.meta.clear()
# model
self._make_table(
create=False
)
# run diff
diff = self._run_diff()
self.assertTrue(diff)
self.assertEqual('Schema diffs:\n'
' table with differences: xtable\n'
' %s missing these columns: xcol' % self.name1,
str(diff))
@fixture.usedb()
def test_exclude_tables(self):
# db
Table('ytable', self.meta,
Column('id',Integer(), primary_key=True),
).create()
Table('ztable', self.meta,
Column('id',Integer(), primary_key=True),
).create()
self.meta.clear()
# model
self._make_table(
create=False
)
Table('ztable', self.meta,
Column('id',Integer(), primary_key=True),
)
# run diff
diff = self._run_diff(excludeTables=('xtable','ytable'))
# ytable only in database
# xtable only in model
# ztable identical on both
# ...so we expect no diff!
self.assertFalse(diff)
self.assertEqual('No schema diffs',str(diff))
@fixture.usedb()
def test_identical_just_pk(self):
self._make_table()
diff = self._run_diff()
self.assertFalse(diff)
self.assertEqual('No schema diffs',str(diff))
@fixture.usedb()
def test_different_type(self):
self._assert_diff(
Column('data', String(10)),
Column('data', Integer()),
)
@fixture.usedb()
def test_int_vs_float(self):
self._assert_diff(
Column('data', Integer()),
Column('data', Float()),
)
# NOTE(mriedem): The ibm_db_sa driver handles the Float() as a DOUBLE()
# which extends Numeric() but isn't defined in sqlalchemy.types, so we
# can't check for it as a special case like is done in schemadiff.ColDiff.
@fixture.usedb(not_supported='ibm_db_sa')
def test_float_vs_numeric(self):
self._assert_diff(
Column('data', Float()),
Column('data', Numeric()),
)
@fixture.usedb()
def test_numeric_precision(self):
self._assert_diff(
Column('data', Numeric(precision=5)),
Column('data', Numeric(precision=6)),
)
@fixture.usedb()
def test_numeric_scale(self):
self._assert_diff(
Column('data', Numeric(precision=6,scale=0)),
Column('data', Numeric(precision=6,scale=1)),
)
@fixture.usedb()
def test_string_length(self):
self._assert_diff(
Column('data', String(10)),
Column('data', String(20)),
)
@fixture.usedb()
def test_integer_identical(self):
self._make_table(
Column('data', Integer()),
)
diff = self._run_diff()
self.assertEqual('No schema diffs',str(diff))
self.assertFalse(diff)
@fixture.usedb()
def test_string_identical(self):
self._make_table(
Column('data', String(10)),
)
diff = self._run_diff()
self.assertEqual('No schema diffs',str(diff))
self.assertFalse(diff)
@fixture.usedb()
def test_text_identical(self):
self._make_table(
Column('data', Text),
)
diff = self._run_diff()
self.assertEqual('No schema diffs',str(diff))
self.assertFalse(diff)
class Test_getDiffOfModelAgainstModel(Test_getDiffOfModelAgainstDatabase):
name1 = 'metadataA'
name2 = 'metadataB'
def _run_diff(self,**kw):
db_meta= MetaData()
db_meta.reflect(self.engine)
return schemadiff.getDiffOfModelAgainstModel(
self.meta, db_meta, **kw
)
| gpl-2.0 |
Wilo/barcampMilagro2015 | pushfeed/pushfeed/pipelines.py | 1 | 1083 | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import logging
from scrapy.conf import settings
from scrapy.exceptions import DropItem
import rethinkdb as r
#class PushfeedPipeline(object):
# def process_item(self, item, spider):
# return item
class RethinkdbPipeline(object):
"""docstring for RethinkdbPipeline"""
def __init__(self):
r.connect(settings['RETHINKDB_SERVER'], settings['RETHINKDB_PORT']).repl()
self.db = r.db(settings['RETHINKDB_DB']).table(settings['RETHINKDB_TABLE'])
def process_item(self, item, spider):
for data in item:
if not data:
raise DropItem
data = dict(title=item['title'][0], description=item['description'][0],
date=item['date'][0], link=item['link'][0], img=item['img'][0])
self.db.insert(data).run()
logging.log(logging.INFO,"Feed added to rethinkdb database!")
return item
| mit |
trivoldus28/pulsarch-verilog | tools/local/bas-release/bas,3.9/lib/python/lib/python2.3/test/test_select.py | 9 | 1647 | # Testing select module
from test.test_support import verbose
import select
import os
# test some known error conditions
try:
rfd, wfd, xfd = select.select(1, 2, 3)
except TypeError:
pass
else:
print 'expected TypeError exception not raised'
class Nope:
pass
class Almost:
def fileno(self):
return 'fileno'
try:
rfd, wfd, xfd = select.select([Nope()], [], [])
except TypeError:
pass
else:
print 'expected TypeError exception not raised'
try:
rfd, wfd, xfd = select.select([Almost()], [], [])
except TypeError:
pass
else:
print 'expected TypeError exception not raised'
try:
rfd, wfd, xfd = select.select([], [], [], 'not a number')
except TypeError:
pass
else:
print 'expected TypeError exception not raised'
def test():
import sys
if sys.platform[:3] in ('win', 'mac', 'os2', 'riscos'):
if verbose:
print "Can't test select easily on", sys.platform
return
cmd = 'for i in 0 1 2 3 4 5 6 7 8 9; do echo testing...; sleep 1; done'
p = os.popen(cmd, 'r')
for tout in (0, 1, 2, 4, 8, 16) + (None,)*10:
if verbose:
print 'timeout =', tout
rfd, wfd, xfd = select.select([p], [], [], tout)
if (rfd, wfd, xfd) == ([], [], []):
continue
if (rfd, wfd, xfd) == ([p], [], []):
line = p.readline()
if verbose:
print `line`
if not line:
if verbose:
print 'EOF'
break
continue
print 'Unexpected return values from select():', rfd, wfd, xfd
p.close()
test()
| gpl-2.0 |
martonw/phantomjs | src/breakpad/src/tools/gyp/test/configurations/inheritance/gyptest-inheritance.py | 430 | 1047 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies build of an executable in three different configurations.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('configurations.gyp')
test.set_configuration('Release')
test.build('configurations.gyp')
test.run_built_executable('configurations',
stdout=('Base configuration\n'
'Common configuration\n'
'Common2 configuration\n'
'Release configuration\n'))
test.set_configuration('Debug')
test.build('configurations.gyp')
test.run_built_executable('configurations',
stdout=('Base configuration\n'
'Common configuration\n'
'Common2 configuration\n'
'Debug configuration\n'))
test.pass_test()
| bsd-3-clause |
ask/kamqp | kamqp/client_0_8/__init__.py | 1 | 1253 | """AMQP Client implementing the 0-8 spec."""
# Copyright (C) 2007-2008 Barry Pederson <bp@barryp.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
# Pull in the public items from the various sub-modules
#
from .basic_message import Message
from .channel import Channel
from .connection import Connection
from .exceptions import (AMQPError, AMQPConnectionError,
AMQPChannelError, AMQPInternalError)
__all__ = ["Connection", "Channel", "Message", "AMQPError",
"AMQPConnectionError", "AMQPChannelError",
"AMQPInternalError"]
| lgpl-2.1 |
FireBladeNooT/Medusa_1_6 | lib/unidecode/x0c6.py | 253 | 4490 | data = (
'yeoss', # 0x00
'yeong', # 0x01
'yeoj', # 0x02
'yeoc', # 0x03
'yeok', # 0x04
'yeot', # 0x05
'yeop', # 0x06
'yeoh', # 0x07
'ye', # 0x08
'yeg', # 0x09
'yegg', # 0x0a
'yegs', # 0x0b
'yen', # 0x0c
'yenj', # 0x0d
'yenh', # 0x0e
'yed', # 0x0f
'yel', # 0x10
'yelg', # 0x11
'yelm', # 0x12
'yelb', # 0x13
'yels', # 0x14
'yelt', # 0x15
'yelp', # 0x16
'yelh', # 0x17
'yem', # 0x18
'yeb', # 0x19
'yebs', # 0x1a
'yes', # 0x1b
'yess', # 0x1c
'yeng', # 0x1d
'yej', # 0x1e
'yec', # 0x1f
'yek', # 0x20
'yet', # 0x21
'yep', # 0x22
'yeh', # 0x23
'o', # 0x24
'og', # 0x25
'ogg', # 0x26
'ogs', # 0x27
'on', # 0x28
'onj', # 0x29
'onh', # 0x2a
'od', # 0x2b
'ol', # 0x2c
'olg', # 0x2d
'olm', # 0x2e
'olb', # 0x2f
'ols', # 0x30
'olt', # 0x31
'olp', # 0x32
'olh', # 0x33
'om', # 0x34
'ob', # 0x35
'obs', # 0x36
'os', # 0x37
'oss', # 0x38
'ong', # 0x39
'oj', # 0x3a
'oc', # 0x3b
'ok', # 0x3c
'ot', # 0x3d
'op', # 0x3e
'oh', # 0x3f
'wa', # 0x40
'wag', # 0x41
'wagg', # 0x42
'wags', # 0x43
'wan', # 0x44
'wanj', # 0x45
'wanh', # 0x46
'wad', # 0x47
'wal', # 0x48
'walg', # 0x49
'walm', # 0x4a
'walb', # 0x4b
'wals', # 0x4c
'walt', # 0x4d
'walp', # 0x4e
'walh', # 0x4f
'wam', # 0x50
'wab', # 0x51
'wabs', # 0x52
'was', # 0x53
'wass', # 0x54
'wang', # 0x55
'waj', # 0x56
'wac', # 0x57
'wak', # 0x58
'wat', # 0x59
'wap', # 0x5a
'wah', # 0x5b
'wae', # 0x5c
'waeg', # 0x5d
'waegg', # 0x5e
'waegs', # 0x5f
'waen', # 0x60
'waenj', # 0x61
'waenh', # 0x62
'waed', # 0x63
'wael', # 0x64
'waelg', # 0x65
'waelm', # 0x66
'waelb', # 0x67
'waels', # 0x68
'waelt', # 0x69
'waelp', # 0x6a
'waelh', # 0x6b
'waem', # 0x6c
'waeb', # 0x6d
'waebs', # 0x6e
'waes', # 0x6f
'waess', # 0x70
'waeng', # 0x71
'waej', # 0x72
'waec', # 0x73
'waek', # 0x74
'waet', # 0x75
'waep', # 0x76
'waeh', # 0x77
'oe', # 0x78
'oeg', # 0x79
'oegg', # 0x7a
'oegs', # 0x7b
'oen', # 0x7c
'oenj', # 0x7d
'oenh', # 0x7e
'oed', # 0x7f
'oel', # 0x80
'oelg', # 0x81
'oelm', # 0x82
'oelb', # 0x83
'oels', # 0x84
'oelt', # 0x85
'oelp', # 0x86
'oelh', # 0x87
'oem', # 0x88
'oeb', # 0x89
'oebs', # 0x8a
'oes', # 0x8b
'oess', # 0x8c
'oeng', # 0x8d
'oej', # 0x8e
'oec', # 0x8f
'oek', # 0x90
'oet', # 0x91
'oep', # 0x92
'oeh', # 0x93
'yo', # 0x94
'yog', # 0x95
'yogg', # 0x96
'yogs', # 0x97
'yon', # 0x98
'yonj', # 0x99
'yonh', # 0x9a
'yod', # 0x9b
'yol', # 0x9c
'yolg', # 0x9d
'yolm', # 0x9e
'yolb', # 0x9f
'yols', # 0xa0
'yolt', # 0xa1
'yolp', # 0xa2
'yolh', # 0xa3
'yom', # 0xa4
'yob', # 0xa5
'yobs', # 0xa6
'yos', # 0xa7
'yoss', # 0xa8
'yong', # 0xa9
'yoj', # 0xaa
'yoc', # 0xab
'yok', # 0xac
'yot', # 0xad
'yop', # 0xae
'yoh', # 0xaf
'u', # 0xb0
'ug', # 0xb1
'ugg', # 0xb2
'ugs', # 0xb3
'un', # 0xb4
'unj', # 0xb5
'unh', # 0xb6
'ud', # 0xb7
'ul', # 0xb8
'ulg', # 0xb9
'ulm', # 0xba
'ulb', # 0xbb
'uls', # 0xbc
'ult', # 0xbd
'ulp', # 0xbe
'ulh', # 0xbf
'um', # 0xc0
'ub', # 0xc1
'ubs', # 0xc2
'us', # 0xc3
'uss', # 0xc4
'ung', # 0xc5
'uj', # 0xc6
'uc', # 0xc7
'uk', # 0xc8
'ut', # 0xc9
'up', # 0xca
'uh', # 0xcb
'weo', # 0xcc
'weog', # 0xcd
'weogg', # 0xce
'weogs', # 0xcf
'weon', # 0xd0
'weonj', # 0xd1
'weonh', # 0xd2
'weod', # 0xd3
'weol', # 0xd4
'weolg', # 0xd5
'weolm', # 0xd6
'weolb', # 0xd7
'weols', # 0xd8
'weolt', # 0xd9
'weolp', # 0xda
'weolh', # 0xdb
'weom', # 0xdc
'weob', # 0xdd
'weobs', # 0xde
'weos', # 0xdf
'weoss', # 0xe0
'weong', # 0xe1
'weoj', # 0xe2
'weoc', # 0xe3
'weok', # 0xe4
'weot', # 0xe5
'weop', # 0xe6
'weoh', # 0xe7
'we', # 0xe8
'weg', # 0xe9
'wegg', # 0xea
'wegs', # 0xeb
'wen', # 0xec
'wenj', # 0xed
'wenh', # 0xee
'wed', # 0xef
'wel', # 0xf0
'welg', # 0xf1
'welm', # 0xf2
'welb', # 0xf3
'wels', # 0xf4
'welt', # 0xf5
'welp', # 0xf6
'welh', # 0xf7
'wem', # 0xf8
'web', # 0xf9
'webs', # 0xfa
'wes', # 0xfb
'wess', # 0xfc
'weng', # 0xfd
'wej', # 0xfe
'wec', # 0xff
)
| gpl-3.0 |
hmpf/nav | python/nav/web/portadmin/urls.py | 1 | 1470 | #
# Copyright (C) 2011, 2013-2015 Uninett AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details. You should have received a copy of the GNU General Public License
# along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""PortAdmin Django URL config"""
from django.conf.urls import url
from nav.web.portadmin import views
urlpatterns = [
url(r'^$',
views.index,
name='portadmin-index'),
url(r'^ip=(?P<ip>[\d\.]+)',
views.search_by_ip,
name='portadmin-ip'),
url(r'^sysname=(?P<sysname>\S+)',
views.search_by_sysname,
name='portadmin-sysname'),
url(r'^interfaceid=(?P<interfaceid>\d+)',
views.search_by_interfaceid,
name='portadmin-interface'),
url(r'^save_interfaceinfo',
views.save_interfaceinfo),
url(r'^restart_interface',
views.restart_interface),
url(r'^write_mem',
views.write_mem),
url(r'^trunk/(?P<interfaceid>\d+)',
views.render_trunk_edit,
name="portadmin-render-trunk-edit"),
]
| gpl-3.0 |
shangwuhencc/shogun | examples/undocumented/python_modular/classifier_svmlight_batch_linadd_modular.py | 24 | 1587 | #!/usr/bin/env python
from tools.load import LoadMatrix
lm=LoadMatrix()
train_dna=lm.load_dna('../data/fm_train_dna.dat')
test_dna=lm.load_dna('../data/fm_test_dna.dat')
label=lm.load_labels('../data/label_train_dna.dat')
parameter_list=[[train_dna, test_dna, label, 20, 0.9, 1e-7, 1],
[train_dna, test_dna, label, 20, 2.3, 1e-7, 4]]
def classifier_svmlight_batch_linadd_modular (fm_train_dna, fm_test_dna,
label_train_dna, degree, C, epsilon, num_threads):
from modshogun import StringCharFeatures, BinaryLabels, DNA
from modshogun import WeightedDegreeStringKernel, MSG_DEBUG
try:
from modshogun import SVMLight
except ImportError:
print('No support for SVMLight available.')
return
feats_train=StringCharFeatures(DNA)
#feats_train.io.set_loglevel(MSG_DEBUG)
feats_train.set_features(fm_train_dna)
feats_test=StringCharFeatures(DNA)
feats_test.set_features(fm_test_dna)
degree=20
kernel=WeightedDegreeStringKernel(feats_train, feats_train, degree)
labels=BinaryLabels(label_train_dna)
svm=SVMLight(C, kernel, labels)
svm.set_epsilon(epsilon)
svm.parallel.set_num_threads(num_threads)
svm.train()
kernel.init(feats_train, feats_test)
#print('SVMLight Objective: %f num_sv: %d' % \)
# (svm.get_objective(), svm.get_num_support_vectors())
svm.set_batch_computation_enabled(False)
svm.set_linadd_enabled(False)
svm.apply().get_labels()
svm.set_batch_computation_enabled(True)
labels = svm.apply().get_labels()
return labels, svm
if __name__=='__main__':
print('SVMlight batch')
classifier_svmlight_batch_linadd_modular(*parameter_list[0])
| gpl-3.0 |
virneo/opencog | opencog/python/blending/src/connector/connect_conflict_viable.py | 4 | 6602 | import itertools
from opencog.atomspace import *
from blending.src.connector.base_connector import BaseConnector
from blending.src.connector.connect_util import *
import blending.src.connector.equal_link_key as eq_link
from blending.util.blending_config import BlendConfig
from blending.util.blending_error import blending_status
__author__ = 'DongMin Kim'
class ConnectConflictAllViable(BaseConnector):
"""Make 2^k available(viable) new blend atoms if there exists k conflicts.
Attributes:
check_type: A link type to check conflict.
strength_diff_limit: A limit of difference between links strength value.
confidence_above_limit: A threshold of both links confidence value.
viable_atoms_count_threshold: A max count limit of new blend atoms.
:type check_type: opencog.type_constructors.types
:type strength_diff_limit: float
:type confidence_above_limit: float
:type viable_atoms_count_threshold: int
"""
# TODO: Currently, this class can handle
# when the number of decided atom is only 2.
def __init__(self, a):
super(self.__class__, self).__init__(a)
self.check_type = None
self.strength_diff_limit = None
self.confidence_above_limit = None
self.viable_atoms_count_threshold = None
def make_default_config(self):
super(self.__class__, self).make_default_config()
BlendConfig().update(self.a, "connect-check-type", "SimilarityLink")
BlendConfig().update(self.a, "connect-strength-diff-limit", "0.3")
BlendConfig().update(self.a, "connect-confidence-above-limit", "0.7")
BlendConfig().update(self.a, "connect-viable-atoms-count-limit", "100")
def __prepare_blended_atoms(self, conflict_links, merged_atom):
# 1. Make all available new blended atoms.
# Number of new atoms is expected to 2^k, if there exists k conflicts.
if self.viable_atoms_count_threshold is not None:
if self.viable_atoms_count_threshold < 2 ** len(conflict_links):
# TODO: Control if expected result atoms count
# is bigger than threshold
log.warn(
"ConnectConflictAllViable: Too many atoms! ->" +
str(2 ** len(conflict_links)) +
" atoms will produce."
)
# 1-a. Prepare 2^k new blend atoms.
self.ret = [merged_atom]
for i in xrange(1, 2 ** len(conflict_links)):
self.ret.append(
self.a.add_node(
merged_atom.t,
merged_atom.name + '-' + str(i), merged_atom.tv
)
)
def __connect_conflict_links(self, conflict_links):
# 1-b. Prepare cartesian product iterator.
# if number of conflict_links is 3, this iterator produces:
# (0, 0, 0), (0, 0, 1), (0, 1, 0), (0, 1, 1), ... (1, 1, 1)
cartesian_binary_iterator = \
itertools.product([0, 1], repeat=len(conflict_links))
# 1-c. Connect to each viable atoms.
for i, viable_case_binary in enumerate(cartesian_binary_iterator):
for j, selector in enumerate(viable_case_binary):
eq_link.key_to_link(
self.a,
conflict_links[j][selector],
self.ret[i],
conflict_links[j][selector].tv
)
def __connect_non_conflict_links(self, non_conflict_links):
# 2. Others, evaluate the weighted average of truth value between
# the duplicate links.
for merged_atom in self.ret:
for links in non_conflict_links:
# array elements are same except original node information.
link_key_sample = links[0]
eq_link.key_to_link(
self.a, link_key_sample, merged_atom,
get_weighted_tv(links)
)
def __connect_non_duplicate_links(self, non_duplicate_links):
# Just copy.
for links in non_duplicate_links:
for link in links:
for merged_atom in self.ret:
eq_link.key_to_link(self.a, link, merged_atom, link.tv)
def __connect_to_blended_atoms(self, decided_atoms):
# Make the links between source nodes and newly blended node.
# TODO: Give proper truth value, not average of truthvalue.
for merged_atom in self.ret:
weighted_tv = get_weighted_tv(self.a.get_incoming(merged_atom.h))
for decided_atom in decided_atoms:
self.a.add_link(
types.AssociativeLink,
[decided_atom, merged_atom],
weighted_tv
)
def __connect_viable_conflict_links(self, decided_atoms, merged_atom):
duplicate_links, non_duplicate_links = \
find_duplicate_links(self.a, decided_atoms)
conflict_links, non_conflict_links = \
find_conflict_links(
self.a, duplicate_links,
self.check_type,
self.strength_diff_limit,
self.confidence_above_limit
)
self.__prepare_blended_atoms(conflict_links, merged_atom)
self.__connect_conflict_links(conflict_links)
self.__connect_non_conflict_links(non_conflict_links)
self.__connect_non_duplicate_links(non_duplicate_links)
self.__connect_to_blended_atoms(decided_atoms)
def link_connect_impl(self, decided_atoms, merged_atom, config_base):
check_type_str = BlendConfig().get_str(
self.a, "connect-check-type", config_base
)
strength_diff_threshold = BlendConfig().get_str(
self.a, "connect-strength-diff-limit", config_base
)
confidence_above_threshold = BlendConfig().get_str(
self.a, "connect-confidence-above-limit", config_base
)
viable_atoms_count_threshold = BlendConfig().get_int(
self.a, "connect-viable-atoms-count-limit", config_base
)
self.check_type = get_type(check_type_str)
if check_type_str != get_type_name(self.check_type):
self.last_status = blending_status.UNKNOWN_TYPE
return
self.strength_diff_limit = float(strength_diff_threshold)
self.confidence_above_limit = float(confidence_above_threshold)
self.viable_atoms_count_threshold = viable_atoms_count_threshold
self.__connect_viable_conflict_links(decided_atoms, merged_atom)
| agpl-3.0 |
bmcage/centrifuge-1d | centrifuge1d/modules/direct_consolidation_saturated/options.py | 1 | 7263 | from __future__ import division, print_function
import sys
from ..shared.functions import lagrangian_derivative_coefs
from numpy import linspace, power, empty, array, log
from ..shared.consolidation import (create_CON, CON_SLURRY, CON_GOMPERTZ,
CON_FREEFORM, CON_SLURRY_CC, CON_SLURRY_KWA,
CON_WEIBULL)
def dtype_deps(cfg):
dtype = cfg.get_value('dtype')
result = []
if dtype == 1: pass
elif dtype in [2,3]: result = ['k_dx']
return result
PARENTAL_MODULES = ['base']
CONFIG_OPTIONS = ['inner_points', 'dtype',
('con_type', CON_SLURRY),
('con_max_refine', 0),
(lambda cfg: cfg.get_value('con_type') == CON_SLURRY,
['a', 'b', 'c', 'd']),
(lambda cfg: cfg.get_value('con_type') == CON_SLURRY_CC,
['a', 'cc', 'c', 'd']),
(lambda cfg: cfg.get_value('con_type') in [CON_GOMPERTZ,],
['a', 'b', 'c', 'd', 'cc']),
(lambda cfg: cfg.get_value('con_type') in [CON_WEIBULL,CON_SLURRY_KWA],
['b', 'e', 'f', 'c', 'd']),
(lambda cfg: cfg.get_value('con_type') in [CON_FREEFORM],
[('ei', None), ('si', None), ('ki', None), ('eiadd', None)]),
'porosity',
'estimate_zp0',
('L_atol', 1e-8),
dtype_deps,
# dependent
(lambda cfg: cfg.get_value('fl1') > 0.0,
['fp1'], [('fp1', -1.0)]),
(lambda cfg: cfg.get_value('fl2') > 0.0,
['fp2'], [('fp2', -1.0)]),
#
'rb_type',
# dependent
(lambda cfg: cfg.get_value('rb_type') == 2,
['h_last']),
(lambda cfg: cfg.get_value('rb_type') == 3,
['dip_height']),
'h_last',
'l0',
'wl0',
'density_s', #density sample in g/(cm^3)
('excess_load', [0]),
('excess_load_t',[0]),
('numerfact_e0', 0.999),
('e0_overshoot_factor', 0.),
]
INTERNAL_OPTIONS = ['m', 'y', 'y12', 'dy', 'alpha', 'ldc1', 'ldc2', 'ldc3',
'k_dx', 'wm0', 'CON',
'first_idx', 'last_idx', 'wl_idx', 'L_idx',
'mass_in_idx', 'mass_out_idx',
'z_size', 'gamma_w', 'gamma_s', 'e0']
EXCLUDE_FROM_MODEL = ['dtype']
PROVIDE_OPTIONS = []
OPTIONS_ITERABLE_LISTS = ['porosity']
def load_func(x, atimes, aloads, duration_change=10):
#print (x, atimes, aloads,aloads[x>=atimes])
x_load = aloads[x>=atimes][-1]
#10 sec later
x_offset_load = aloads[x+duration_change>=atimes][-1]
if (x_load == x_offset_load):
return x_load
else:
#load will change, change smootly to the change
t_new_load = atimes[x+duration_change>=atimes][-1]
val= (x - (t_new_load-duration_change))/duration_change * (x_offset_load-x_load) + x_load
return val
def create_excess_load(times, loads, duration_change=10):
if (len(times) != len(loads)):
print ("ERROR: excess loads and excess load times don't have same array sizes!")
sys.exit(0)
if (len(times) == 0 or (len(times) == 1 and times[0] == 0 and loads[0] == 0)):
#no loads
return lambda x: 0.
else:
atimes = array(times)
aloads = array(loads)
return lambda x: load_func(x, atimes, aloads, duration_change)
#return lambda x: aloads[x>=atimes][-1]
def adjust_cfg(cfg):
#specific weight water in g/(s cm^2)
cfg.set_value('gamma_w', cfg.get_value('density')*cfg.get_value('g'))
#specific weight sample in g/(s cm^2)
cfg.set_value('gamma_s', cfg.get_value('density_s')*cfg.get_value('g'))
# Discretization
inner_points = cfg.get_value('inner_points')
discretization_type = cfg.get_value('dtype')
if discretization_type == 1: # linear discretization
y = linspace(0, 1, inner_points + 2)
elif discretization_type in [2,3]: # L= a+ka+(k^2)a+...+(k^inner_points)a
# L=1 (as we use transformed interval <0,1>)
# L = a*[(1-k^(inner_points +1))/(1-k)]
k = cfg.get_value('k_dx')
a=(1-k)/(1-power(k, inner_points+1))
y= empty([inner_points+2, ])
y[0] = 0.0; y[-1] = 1.0
for i in range(1, inner_points+1):
y[i] = y[i-1] + a
a = a*k
if discretization_type == 3:
# invert it
tmp = y[::-1]
y[:] = 1. - tmp[:]
else:
print('Unsupported discretization type:', discretization_type)
exit(1)
#porosity and void ratio
por = cfg.get_value('porosity')
if not (0<por<1):
print ('Porosity must be a value between 0 and 1. Given:', por)
exit(1)
e0 = por/(1-por)
cfg.set_value('e0', e0)
print ('Consolidation: Calculated initial void ratio is', cfg.get_value('e0'))
ksguess = cfg.get_value('ks')
ks = ksguess
if cfg.get_value('con_type') in [CON_SLURRY, CON_GOMPERTZ]:
ks = (1+e0)*(cfg.get_value('c')+cfg.get_value('d')*e0)
cfg.set_value('ks', ks)
elif cfg.get_value('con_type') in [CON_SLURRY_CC, CON_SLURRY_KWA, CON_WEIBULL]:
ks = log(e0/cfg.get_value('c')) / cfg.get_value('d')
else:
print ("ERROR: cannot calculate the start ks as consolidation type is not known!")
sys.exit(0)
print ('Consolidation: Your guessed ks', ksguess, 'has been changed into calculated', ks, 'cm/s')
back = raw_input("Continue? [Y/n] ")
if back.strip().lower() in ['n', "no"]:
sys.exit(0)
# Determine consolidation curve model used, all data is now available
cfg.set_value('CON', create_CON(cfg))
cfg.set_value('excess_load_f', create_excess_load(
cfg.get_value('excess_load_t'),
cfg.get_value('excess_load'),
duration_change=10))
cfg.set_value('y', y)
cfg.set_value('y12', (y[1:]+y[:-1])/2.)
dy = y[1:]-y[:-1]
alpha = empty([len(dy)+1, ])
alpha[0] = 0.
alpha[1:] = dy
cfg.set_value('dy', dy)
cfg.set_value('alpha', alpha)
ldc1, ldc2, ldc3 = lagrangian_derivative_coefs(dy)
cfg.set_value('ldc1', ldc1)
cfg.set_value('ldc2', ldc2)
cfg.set_value('ldc3', ldc3)
inner_points = cfg.get_value('inner_points')
cfg.set_value('sc_max_refine', 0)
cfg.set_value('first_idx', 0)
cfg.set_value('last_idx', inner_points+1)
cfg.set_value('mass_in_idx', inner_points+2)
cfg.set_value('wl_idx', inner_points+3)
cfg.set_value('L_idx', inner_points+4)
cfg.set_value('mass_out_idx', inner_points+5)
# total length of 'z' array (discretization points + s1,s2,mass_in,...)
cfg.set_value('z_size', inner_points+6)
def check_cfg(cfg):
if not (not cfg.get_value('wl0') is None or not cfg.get_value('ww0') is None):
print("One of 'wl0' or 'ww0' parameters must be specified.")
return False
return True
| gpl-2.0 |
Elettronik/SickRage | lib/unidecode/x0d0.py | 253 | 4706 | data = (
'kweon', # 0x00
'kweonj', # 0x01
'kweonh', # 0x02
'kweod', # 0x03
'kweol', # 0x04
'kweolg', # 0x05
'kweolm', # 0x06
'kweolb', # 0x07
'kweols', # 0x08
'kweolt', # 0x09
'kweolp', # 0x0a
'kweolh', # 0x0b
'kweom', # 0x0c
'kweob', # 0x0d
'kweobs', # 0x0e
'kweos', # 0x0f
'kweoss', # 0x10
'kweong', # 0x11
'kweoj', # 0x12
'kweoc', # 0x13
'kweok', # 0x14
'kweot', # 0x15
'kweop', # 0x16
'kweoh', # 0x17
'kwe', # 0x18
'kweg', # 0x19
'kwegg', # 0x1a
'kwegs', # 0x1b
'kwen', # 0x1c
'kwenj', # 0x1d
'kwenh', # 0x1e
'kwed', # 0x1f
'kwel', # 0x20
'kwelg', # 0x21
'kwelm', # 0x22
'kwelb', # 0x23
'kwels', # 0x24
'kwelt', # 0x25
'kwelp', # 0x26
'kwelh', # 0x27
'kwem', # 0x28
'kweb', # 0x29
'kwebs', # 0x2a
'kwes', # 0x2b
'kwess', # 0x2c
'kweng', # 0x2d
'kwej', # 0x2e
'kwec', # 0x2f
'kwek', # 0x30
'kwet', # 0x31
'kwep', # 0x32
'kweh', # 0x33
'kwi', # 0x34
'kwig', # 0x35
'kwigg', # 0x36
'kwigs', # 0x37
'kwin', # 0x38
'kwinj', # 0x39
'kwinh', # 0x3a
'kwid', # 0x3b
'kwil', # 0x3c
'kwilg', # 0x3d
'kwilm', # 0x3e
'kwilb', # 0x3f
'kwils', # 0x40
'kwilt', # 0x41
'kwilp', # 0x42
'kwilh', # 0x43
'kwim', # 0x44
'kwib', # 0x45
'kwibs', # 0x46
'kwis', # 0x47
'kwiss', # 0x48
'kwing', # 0x49
'kwij', # 0x4a
'kwic', # 0x4b
'kwik', # 0x4c
'kwit', # 0x4d
'kwip', # 0x4e
'kwih', # 0x4f
'kyu', # 0x50
'kyug', # 0x51
'kyugg', # 0x52
'kyugs', # 0x53
'kyun', # 0x54
'kyunj', # 0x55
'kyunh', # 0x56
'kyud', # 0x57
'kyul', # 0x58
'kyulg', # 0x59
'kyulm', # 0x5a
'kyulb', # 0x5b
'kyuls', # 0x5c
'kyult', # 0x5d
'kyulp', # 0x5e
'kyulh', # 0x5f
'kyum', # 0x60
'kyub', # 0x61
'kyubs', # 0x62
'kyus', # 0x63
'kyuss', # 0x64
'kyung', # 0x65
'kyuj', # 0x66
'kyuc', # 0x67
'kyuk', # 0x68
'kyut', # 0x69
'kyup', # 0x6a
'kyuh', # 0x6b
'keu', # 0x6c
'keug', # 0x6d
'keugg', # 0x6e
'keugs', # 0x6f
'keun', # 0x70
'keunj', # 0x71
'keunh', # 0x72
'keud', # 0x73
'keul', # 0x74
'keulg', # 0x75
'keulm', # 0x76
'keulb', # 0x77
'keuls', # 0x78
'keult', # 0x79
'keulp', # 0x7a
'keulh', # 0x7b
'keum', # 0x7c
'keub', # 0x7d
'keubs', # 0x7e
'keus', # 0x7f
'keuss', # 0x80
'keung', # 0x81
'keuj', # 0x82
'keuc', # 0x83
'keuk', # 0x84
'keut', # 0x85
'keup', # 0x86
'keuh', # 0x87
'kyi', # 0x88
'kyig', # 0x89
'kyigg', # 0x8a
'kyigs', # 0x8b
'kyin', # 0x8c
'kyinj', # 0x8d
'kyinh', # 0x8e
'kyid', # 0x8f
'kyil', # 0x90
'kyilg', # 0x91
'kyilm', # 0x92
'kyilb', # 0x93
'kyils', # 0x94
'kyilt', # 0x95
'kyilp', # 0x96
'kyilh', # 0x97
'kyim', # 0x98
'kyib', # 0x99
'kyibs', # 0x9a
'kyis', # 0x9b
'kyiss', # 0x9c
'kying', # 0x9d
'kyij', # 0x9e
'kyic', # 0x9f
'kyik', # 0xa0
'kyit', # 0xa1
'kyip', # 0xa2
'kyih', # 0xa3
'ki', # 0xa4
'kig', # 0xa5
'kigg', # 0xa6
'kigs', # 0xa7
'kin', # 0xa8
'kinj', # 0xa9
'kinh', # 0xaa
'kid', # 0xab
'kil', # 0xac
'kilg', # 0xad
'kilm', # 0xae
'kilb', # 0xaf
'kils', # 0xb0
'kilt', # 0xb1
'kilp', # 0xb2
'kilh', # 0xb3
'kim', # 0xb4
'kib', # 0xb5
'kibs', # 0xb6
'kis', # 0xb7
'kiss', # 0xb8
'king', # 0xb9
'kij', # 0xba
'kic', # 0xbb
'kik', # 0xbc
'kit', # 0xbd
'kip', # 0xbe
'kih', # 0xbf
'ta', # 0xc0
'tag', # 0xc1
'tagg', # 0xc2
'tags', # 0xc3
'tan', # 0xc4
'tanj', # 0xc5
'tanh', # 0xc6
'tad', # 0xc7
'tal', # 0xc8
'talg', # 0xc9
'talm', # 0xca
'talb', # 0xcb
'tals', # 0xcc
'talt', # 0xcd
'talp', # 0xce
'talh', # 0xcf
'tam', # 0xd0
'tab', # 0xd1
'tabs', # 0xd2
'tas', # 0xd3
'tass', # 0xd4
'tang', # 0xd5
'taj', # 0xd6
'tac', # 0xd7
'tak', # 0xd8
'tat', # 0xd9
'tap', # 0xda
'tah', # 0xdb
'tae', # 0xdc
'taeg', # 0xdd
'taegg', # 0xde
'taegs', # 0xdf
'taen', # 0xe0
'taenj', # 0xe1
'taenh', # 0xe2
'taed', # 0xe3
'tael', # 0xe4
'taelg', # 0xe5
'taelm', # 0xe6
'taelb', # 0xe7
'taels', # 0xe8
'taelt', # 0xe9
'taelp', # 0xea
'taelh', # 0xeb
'taem', # 0xec
'taeb', # 0xed
'taebs', # 0xee
'taes', # 0xef
'taess', # 0xf0
'taeng', # 0xf1
'taej', # 0xf2
'taec', # 0xf3
'taek', # 0xf4
'taet', # 0xf5
'taep', # 0xf6
'taeh', # 0xf7
'tya', # 0xf8
'tyag', # 0xf9
'tyagg', # 0xfa
'tyags', # 0xfb
'tyan', # 0xfc
'tyanj', # 0xfd
'tyanh', # 0xfe
'tyad', # 0xff
)
| gpl-3.0 |
Notxor/Neuronal | neuronal/neuromotor.py | 1 | 3081 | # -*- coding: utf-8 -*-
# Neuronal - Framework for Neural Networks and Artificial Intelligence
#
# Copyright (C) 2012 dddddd <dddddd@pyphiverses.org>
# Copyright (C) 2012 Notxor <gnotxor@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from glioblasto import Glioblasto
class _Motor(Glioblasto):
"""Elemento de salida neuromotora."""
def __init__(self, neuromotor):
Glioblasto.__init__(self)
self.vias_aferentes = set()
self.neuromotor = neuromotor
class NeuroMotor(object):
"""Conjunto de sensores. Proporciona datos de entrada a una red."""
def __init__(self, cantidad_de_motores, red = None):
"""
Las neuronas motoras están en una lista inmutable (tuple), por lo tanto
no cambian a lo largo de la vida del neuromotor.
"""
self.motoras = tuple(
_Motor(self) for i in xrange(cantidad_de_motores)
)
self._red = None
if red is not None:
self._conectar_a_red_aferente(red)
def _conectar_a_red_aferente(self, red):
"""
Crea tantas neuronas de salida en la 'red' como motoras haya en
este neuromotor, y las conecta (mediante sinapsis salida->motora).
Es conveniente que dichas neuronas sean las que finalizan la lista de
neuronas del núcleo. El objetivo es que sean disparadas al final del
'ciclo' para reducir el número de pasadas que habrá que darle a la
red. Por lo tanto, lo ideal es llamar a esta función como último
paso de la creación de la red.
"""
n_conexiones = len(self.motoras)
# Crear neuronas en la red, que serviran de emisoras.
nuevas_salidas = red.crear_neuronas_de_salida(n_conexiones)
# Conectar las nuevas salidas (mediante sinapsis) a
# ... las motoras de este neuromotor.
for i in xrange(n_conexiones):
nuevas_salidas[i].crear_sinapsis_saliente(self.motoras[i])
# Guardamos una referencia a la red.
self._red = red
def _conectar_motorizacion(self, funciones):
"""
Este miembro recibe una lista de funciones y le asigna cada una de ellas
a una neurona motora de la red, de modo que si usa salida es activada
por la red, se ejecutará el código contenido en la función asociada.
"""
if (len(funciones) != len(self.motoras)):
raise "No coincide el número de neuronas con las acciones."
| agpl-3.0 |
thiagoveras/implicit-mf | mf.py | 2 | 3154 | import numpy as np
import scipy.sparse as sparse
from scipy.sparse.linalg import spsolve
import time
def load_matrix(filename, num_users, num_items):
t0 = time.time()
counts = np.zeros((num_users, num_items))
total = 0.0
num_zeros = num_users * num_items
for i, line in enumerate(open(filename, 'r')):
user, item, count = line.strip().split('\t')
user = int(user)
item = int(item)
count = float(count)
if user >= num_users:
continue
if item >= num_items:
continue
if count != 0:
counts[user, item] = count
total += count
num_zeros -= 1
if i % 100000 == 0:
print 'loaded %i counts...' % i
alpha = num_zeros / total
print 'alpha %.2f' % alpha
counts *= alpha
counts = sparse.csr_matrix(counts)
t1 = time.time()
print 'Finished loading matrix in %f seconds' % (t1 - t0)
return counts
class ImplicitMF():
def __init__(self, counts, num_factors=40, num_iterations=30,
reg_param=0.8):
self.counts = counts
self.num_users = counts.shape[0]
self.num_items = counts.shape[1]
self.num_factors = num_factors
self.num_iterations = num_iterations
self.reg_param = reg_param
def train_model(self):
self.user_vectors = np.random.normal(size=(self.num_users,
self.num_factors))
self.item_vectors = np.random.normal(size=(self.num_items,
self.num_factors))
for i in xrange(self.num_iterations):
t0 = time.time()
print 'Solving for user vectors...'
self.user_vectors = self.iteration(True, sparse.csr_matrix(self.item_vectors))
print 'Solving for item vectors...'
self.item_vectors = self.iteration(False, sparse.csr_matrix(self.user_vectors))
t1 = time.time()
print 'iteration %i finished in %f seconds' % (i + 1, t1 - t0)
def iteration(self, user, fixed_vecs):
num_solve = self.num_users if user else self.num_items
num_fixed = fixed_vecs.shape[0]
YTY = fixed_vecs.T.dot(fixed_vecs)
eye = sparse.eye(num_fixed)
lambda_eye = self.reg_param * sparse.eye(self.num_factors)
solve_vecs = np.zeros((num_solve, self.num_factors))
t = time.time()
for i in xrange(num_solve):
if user:
counts_i = self.counts[i].toarray()
else:
counts_i = self.counts[:, i].T.toarray()
CuI = sparse.diags(counts_i, [0])
pu = counts_i.copy()
pu[np.where(pu != 0)] = 1.0
YTCuIY = fixed_vecs.T.dot(CuI).dot(fixed_vecs)
YTCupu = fixed_vecs.T.dot(CuI + eye).dot(sparse.csr_matrix(pu).T)
xu = spsolve(YTY + YTCuIY + lambda_eye, YTCupu)
solve_vecs[i] = xu
if i % 1000 == 0:
print 'Solved %i vecs in %d seconds' % (i, time.time() - t)
t = time.time()
return solve_vecs
| bsd-2-clause |
Ali-aqrabawi/ezclinic | lib/django/conf/locale/cy/formats.py | 504 | 1822 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y' # '25 Hydref 2006'
TIME_FORMAT = 'P' # '2:30 y.b.'
DATETIME_FORMAT = 'j F Y, P' # '25 Hydref 2006, 2:30 y.b.'
YEAR_MONTH_FORMAT = 'F Y' # 'Hydref 2006'
MONTH_DAY_FORMAT = 'j F' # '25 Hydref'
SHORT_DATE_FORMAT = 'd/m/Y' # '25/10/2006'
SHORT_DATETIME_FORMAT = 'd/m/Y P' # '25/10/2006 2:30 y.b.'
FIRST_DAY_OF_WEEK = 1 # 'Dydd Llun'
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M:%S.%f', # '25/10/06 14:30:59.000200'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
]
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
| mit |
Medigate/cutiuta-server | cutiuta-server/env/lib/python3.4/site-packages/pip/compat/dictconfig.py | 921 | 23096 | # This is a copy of the Python logging.config.dictconfig module,
# reproduced with permission. It is provided here for backwards
# compatibility for Python versions prior to 2.7.
#
# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import absolute_import
import logging.handlers
import re
import sys
import types
from pip._vendor import six
# flake8: noqa
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
def valid_ident(s):
m = IDENTIFIER.match(s)
if not m:
raise ValueError('Not a valid Python identifier: %r' % s)
return True
#
# This function is defined in logging only in recent versions of Python
#
try:
from logging import _checkLevel
except ImportError:
def _checkLevel(level):
if isinstance(level, int):
rv = level
elif str(level) == level:
if level not in logging._levelNames:
raise ValueError('Unknown level: %r' % level)
rv = logging._levelNames[level]
else:
raise TypeError('Level not an integer or a '
'valid string: %r' % level)
return rv
# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.
class ConvertingDict(dict):
"""A converting dictionary wrapper."""
def __getitem__(self, key):
value = dict.__getitem__(self, key)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def get(self, key, default=None):
value = dict.get(self, key, default)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, key, default=None):
value = dict.pop(self, key, default)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class ConvertingList(list):
"""A converting list wrapper."""
def __getitem__(self, key):
value = list.__getitem__(self, key)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, idx=-1):
value = list.pop(self, idx)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
return result
class ConvertingTuple(tuple):
"""A converting tuple wrapper."""
def __getitem__(self, key):
value = tuple.__getitem__(self, key)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class BaseConfigurator(object):
"""
The configurator base class which defines some useful defaults.
"""
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
DIGIT_PATTERN = re.compile(r'^\d+$')
value_converters = {
'ext' : 'ext_convert',
'cfg' : 'cfg_convert',
}
# We might want to use a different one, e.g. importlib
importer = __import__
def __init__(self, config):
self.config = ConvertingDict(config)
self.config.configurator = self
def resolve(self, s):
"""
Resolve strings to objects using standard import and attribute
syntax.
"""
name = s.split('.')
used = name.pop(0)
try:
found = self.importer(used)
for frag in name:
used += '.' + frag
try:
found = getattr(found, frag)
except AttributeError:
self.importer(used)
found = getattr(found, frag)
return found
except ImportError:
e, tb = sys.exc_info()[1:]
v = ValueError('Cannot resolve %r: %s' % (s, e))
v.__cause__, v.__traceback__ = e, tb
raise v
def ext_convert(self, value):
"""Default converter for the ext:// protocol."""
return self.resolve(value)
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[0]]
# print d, rest
while rest:
m = self.DOT_PATTERN.match(rest)
if m:
d = d[m.groups()[0]]
else:
m = self.INDEX_PATTERN.match(rest)
if m:
idx = m.groups()[0]
if not self.DIGIT_PATTERN.match(idx):
d = d[idx]
else:
try:
n = int(idx) # try as number first (most likely)
d = d[n]
except TypeError:
d = d[idx]
if m:
rest = rest[m.end():]
else:
raise ValueError('Unable to convert '
'%r at %r' % (value, rest))
# rest should be empty
return d
def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, six.string_types): # str for py3k
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def as_tuple(self, value):
"""Utility function which converts lists to tuples."""
if isinstance(value, list):
value = tuple(value)
return value
class DictConfigurator(BaseConfigurator):
"""
Configure logging using a dictionary-like object to describe the
configuration.
"""
def configure(self):
"""Do the configuration."""
config = self.config
if 'version' not in config:
raise ValueError("dictionary doesn't specify a version")
if config['version'] != 1:
raise ValueError("Unsupported version: %s" % config['version'])
incremental = config.pop('incremental', False)
EMPTY_DICT = {}
logging._acquireLock()
try:
if incremental:
handlers = config.get('handlers', EMPTY_DICT)
# incremental handler config only if handler name
# ties in to logging._handlers (Python 2.7)
if sys.version_info[:2] == (2, 7):
for name in handlers:
if name not in logging._handlers:
raise ValueError('No handler found with '
'name %r' % name)
else:
try:
handler = logging._handlers[name]
handler_config = handlers[name]
level = handler_config.get('level', None)
if level:
handler.setLevel(_checkLevel(level))
except StandardError as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
try:
self.configure_logger(name, loggers[name], True)
except StandardError as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
root = config.get('root', None)
if root:
try:
self.configure_root(root, True)
except StandardError as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
else:
disable_existing = config.pop('disable_existing_loggers', True)
logging._handlers.clear()
del logging._handlerList[:]
# Do formatters first - they don't refer to anything else
formatters = config.get('formatters', EMPTY_DICT)
for name in formatters:
try:
formatters[name] = self.configure_formatter(
formatters[name])
except StandardError as e:
raise ValueError('Unable to configure '
'formatter %r: %s' % (name, e))
# Next, do filters - they don't refer to anything else, either
filters = config.get('filters', EMPTY_DICT)
for name in filters:
try:
filters[name] = self.configure_filter(filters[name])
except StandardError as e:
raise ValueError('Unable to configure '
'filter %r: %s' % (name, e))
# Next, do handlers - they refer to formatters and filters
# As handlers can refer to other handlers, sort the keys
# to allow a deterministic order of configuration
handlers = config.get('handlers', EMPTY_DICT)
for name in sorted(handlers):
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except StandardError as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Next, do loggers - they refer to handlers and filters
# we don't want to lose the existing loggers,
# since other threads may have pointers to them.
# existing is set to contain all existing loggers,
# and as we go through the new configuration we
# remove any which are configured. At the end,
# what's left in existing is the set of loggers
# which were in the previous configuration but
# which are not in the new configuration.
root = logging.root
existing = list(root.manager.loggerDict)
# The list needs to be sorted so that we can
# avoid disabling child loggers of explicitly
# named loggers. With a sorted list it is easier
# to find the child loggers.
existing.sort()
# We'll keep the list of existing loggers
# which are children of named loggers here...
child_loggers = []
# now set up the new ones...
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
if name in existing:
i = existing.index(name)
prefixed = name + "."
pflen = len(prefixed)
num_existing = len(existing)
i = i + 1 # look at the entry after name
while (i < num_existing) and\
(existing[i][:pflen] == prefixed):
child_loggers.append(existing[i])
i = i + 1
existing.remove(name)
try:
self.configure_logger(name, loggers[name])
except StandardError as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
# Disable any old loggers. There's no point deleting
# them as other threads may continue to hold references
# and by disabling them, you stop them doing any logging.
# However, don't disable children of named loggers, as that's
# probably not what was intended by the user.
for log in existing:
logger = root.manager.loggerDict[log]
if log in child_loggers:
logger.level = logging.NOTSET
logger.handlers = []
logger.propagate = True
elif disable_existing:
logger.disabled = True
# And finally, do the root logger
root = config.get('root', None)
if root:
try:
self.configure_root(root)
except StandardError as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
finally:
logging._releaseLock()
def configure_formatter(self, config):
"""Configure a formatter from a dictionary."""
if '()' in config:
factory = config['()'] # for use in exception handler
try:
result = self.configure_custom(config)
except TypeError as te:
if "'format'" not in str(te):
raise
# Name of parameter changed from fmt to format.
# Retry with old name.
# This is so that code can be used with older Python versions
#(e.g. by Django)
config['fmt'] = config.pop('format')
config['()'] = factory
result = self.configure_custom(config)
else:
fmt = config.get('format', None)
dfmt = config.get('datefmt', None)
result = logging.Formatter(fmt, dfmt)
return result
def configure_filter(self, config):
"""Configure a filter from a dictionary."""
if '()' in config:
result = self.configure_custom(config)
else:
name = config.get('name', '')
result = logging.Filter(name)
return result
def add_filters(self, filterer, filters):
"""Add filters to a filterer from a list of names."""
for f in filters:
try:
filterer.addFilter(self.config['filters'][f])
except StandardError as e:
raise ValueError('Unable to add filter %r: %s' % (f, e))
def configure_handler(self, config):
"""Configure a handler from a dictionary."""
formatter = config.pop('formatter', None)
if formatter:
try:
formatter = self.config['formatters'][formatter]
except StandardError as e:
raise ValueError('Unable to set formatter '
'%r: %s' % (formatter, e))
level = config.pop('level', None)
filters = config.pop('filters', None)
if '()' in config:
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
factory = c
else:
klass = self.resolve(config.pop('class'))
# Special case for handler which refers to another handler
if issubclass(klass, logging.handlers.MemoryHandler) and\
'target' in config:
try:
config['target'] = self.config['handlers'][config['target']]
except StandardError as e:
raise ValueError('Unable to set target handler '
'%r: %s' % (config['target'], e))
elif issubclass(klass, logging.handlers.SMTPHandler) and\
'mailhost' in config:
config['mailhost'] = self.as_tuple(config['mailhost'])
elif issubclass(klass, logging.handlers.SysLogHandler) and\
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
try:
result = factory(**kwargs)
except TypeError as te:
if "'stream'" not in str(te):
raise
# The argument name changed from strm to stream
# Retry with old name.
# This is so that code can be used with older Python versions
#(e.g. by Django)
kwargs['strm'] = kwargs.pop('stream')
result = factory(**kwargs)
if formatter:
result.setFormatter(formatter)
if level is not None:
result.setLevel(_checkLevel(level))
if filters:
self.add_filters(result, filters)
return result
def add_handlers(self, logger, handlers):
"""Add handlers to a logger from a list of names."""
for h in handlers:
try:
logger.addHandler(self.config['handlers'][h])
except StandardError as e:
raise ValueError('Unable to add handler %r: %s' % (h, e))
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def configure_logger(self, name, config, incremental=False):
"""Configure a non-root logger from a dictionary."""
logger = logging.getLogger(name)
self.common_logger_config(logger, config, incremental)
propagate = config.get('propagate', None)
if propagate is not None:
logger.propagate = propagate
def configure_root(self, config, incremental=False):
"""Configure a root logger from a dictionary."""
root = logging.getLogger()
self.common_logger_config(root, config, incremental)
dictConfigClass = DictConfigurator
def dictConfig(config):
"""Configure logging using a dictionary."""
dictConfigClass(config).configure()
| gpl-3.0 |
dlenski/stravacli | stravacli/QueryGrabber.py | 1 | 1046 | from http.server import HTTPServer, BaseHTTPRequestHandler
import socket
from urllib.parse import parse_qs
class handler(BaseHTTPRequestHandler):
def do_GET(self):
self.server.received = parse_qs(self.path.split('?',1)[1])
self.send_response(200)
self.end_headers()
self.wfile.write(self.server.response.encode())
class QueryGrabber(HTTPServer):
def __init__(self, response='', address=None):
self.received = None
self.response = response
if address!=None:
HTTPServer.__init__(self, self.address, handler)
else:
for port in range(1024,65536):
try:
HTTPServer.__init__(self, ('localhost', port), handler)
except socket.error as e:
if e.errno!=98: # Address already in use
raise
else:
break
else:
raise e
def root_uri(self):
return 'http://{}:{:d}'.format(*self.server_address)
| gpl-3.0 |
jarshwah/django | django/contrib/gis/serializers/geojson.py | 31 | 2876 | from __future__ import unicode_literals
from django.contrib.gis.gdal import HAS_GDAL
from django.core.serializers.base import SerializerDoesNotExist
from django.core.serializers.json import Serializer as JSONSerializer
if HAS_GDAL:
from django.contrib.gis.gdal import CoordTransform, SpatialReference
class Serializer(JSONSerializer):
"""
Convert a queryset to GeoJSON, http://geojson.org/
"""
def _init_options(self):
super(Serializer, self)._init_options()
self.geometry_field = self.json_kwargs.pop('geometry_field', None)
self.srid = self.json_kwargs.pop('srid', 4326)
if (self.selected_fields is not None and self.geometry_field is not None and
self.geometry_field not in self.selected_fields):
self.selected_fields = list(self.selected_fields) + [self.geometry_field]
def start_serialization(self):
self._init_options()
self._cts = {} # cache of CoordTransform's
self.stream.write(
'{"type": "FeatureCollection", "crs": {"type": "name", "properties": {"name": "EPSG:%d"}},'
' "features": [' % self.srid)
def end_serialization(self):
self.stream.write(']}')
def start_object(self, obj):
super(Serializer, self).start_object(obj)
self._geometry = None
if self.geometry_field is None:
# Find the first declared geometry field
for field in obj._meta.fields:
if hasattr(field, 'geom_type'):
self.geometry_field = field.name
break
def get_dump_object(self, obj):
data = {
"type": "Feature",
"properties": self._current,
}
if ((self.selected_fields is None or 'pk' in self.selected_fields) and
'pk' not in data["properties"]):
data["properties"]["pk"] = obj._meta.pk.value_to_string(obj)
if self._geometry:
if self._geometry.srid != self.srid:
# If needed, transform the geometry in the srid of the global geojson srid
if self._geometry.srid not in self._cts:
srs = SpatialReference(self.srid)
self._cts[self._geometry.srid] = CoordTransform(self._geometry.srs, srs)
self._geometry.transform(self._cts[self._geometry.srid])
data["geometry"] = eval(self._geometry.geojson)
else:
data["geometry"] = None
return data
def handle_field(self, obj, field):
if field.name == self.geometry_field:
self._geometry = field.value_from_object(obj)
else:
super(Serializer, self).handle_field(obj, field)
class Deserializer(object):
def __init__(self, *args, **kwargs):
raise SerializerDoesNotExist("geojson is a serialization-only serializer")
| bsd-3-clause |
hanlind/nova | nova/tests/functional/db/test_aggregate.py | 1 | 28761 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from copy import deepcopy
import mock
from oslo_db import exception as db_exc
from oslo_utils import timeutils
from nova import context
from nova import db
from nova.db.sqlalchemy import api as db_api
from nova.db.sqlalchemy import api_models
from nova import exception
import nova.objects.aggregate as aggregate_obj
from nova import test
from nova.tests.unit import matchers
from nova.tests.unit.objects.test_objects import compare_obj as base_compare
from nova.tests import uuidsentinel
SUBS = {'metadata': 'metadetails'}
NOW = timeutils.utcnow().replace(microsecond=0)
def _get_fake_aggregate(db_id, in_api=True, result=True):
agg_map = {
'created_at': NOW,
'updated_at': None,
'deleted_at': None,
'id': db_id,
'uuid': getattr(uuidsentinel, str(db_id)),
'name': 'name_' + str(db_id),
}
if not in_api:
agg_map['deleted'] = False
if result:
agg_map['hosts'] = _get_fake_hosts(db_id)
agg_map['metadetails'] = _get_fake_metadata(db_id)
return agg_map
def _get_fake_hosts(db_id):
return ['constant_host', 'unique_host_' + str(db_id)]
def _get_fake_metadata(db_id):
return {'constant_key': 'constant_value',
'unique_key': 'unique_value_' + str(db_id)}
@db_api.api_context_manager.writer
def _create_aggregate(context, values=_get_fake_aggregate(1, result=False),
metadata=_get_fake_metadata(1)):
aggregate = api_models.Aggregate()
aggregate.update(values)
aggregate.save(context.session)
if metadata:
for key, value in metadata.items():
aggregate_metadata = api_models.AggregateMetadata()
aggregate_metadata.update({'key': key,
'value': value,
'aggregate_id': aggregate['id']})
aggregate_metadata.save(context.session)
return aggregate
@db_api.api_context_manager.writer
def _create_aggregate_with_hosts(context,
values=_get_fake_aggregate(1, result=False),
metadata=_get_fake_metadata(1),
hosts=_get_fake_hosts(1)):
aggregate = _create_aggregate(context, values, metadata)
for host in hosts:
host_model = api_models.AggregateHost()
host_model.update({'host': host,
'aggregate_id': aggregate.id})
host_model.save(context.session)
return aggregate
@db_api.api_context_manager.reader
def _aggregate_host_get_all(context, aggregate_id):
return context.session.query(api_models.AggregateHost).\
filter_by(aggregate_id=aggregate_id).all()
@db_api.api_context_manager.reader
def _aggregate_metadata_get_all(context, aggregate_id):
results = context.session.query(api_models.AggregateMetadata).\
filter_by(aggregate_id=aggregate_id).all()
metadata = {}
for r in results:
metadata[r['key']] = r['value']
return metadata
class AggregateObjectDbTestCase(test.TestCase):
def setUp(self):
super(AggregateObjectDbTestCase, self).setUp()
self.context = context.RequestContext('fake-user', 'fake-project')
def test_in_api(self):
ca1 = _create_aggregate(self.context, values={'name': 'fake_agg_1',
'id': 1,
'uuid': 'nonce'})
ca2 = db.aggregate_create(self.context, {'name': 'fake_agg_2',
'id': 2,
'uuid': 'nonce'})
api_db_agg = aggregate_obj.Aggregate.get_by_id(self.context, ca1['id'])
cell_db_agg = aggregate_obj.Aggregate.get_by_id(
self.context, ca2['id'])
self.assertTrue(api_db_agg.in_api)
self.assertFalse(cell_db_agg.in_api)
def test_aggregate_get_from_db(self):
result = _create_aggregate_with_hosts(self.context)
expected = aggregate_obj._aggregate_get_from_db(self.context,
result['id'])
self.assertEqual(_get_fake_hosts(1), expected.hosts)
self.assertEqual(_get_fake_metadata(1), expected['metadetails'])
def test_aggregate_get_from_db_by_uuid(self):
result = _create_aggregate_with_hosts(self.context)
expected = aggregate_obj._aggregate_get_from_db_by_uuid(
self.context, result['uuid'])
self.assertEqual(result.uuid, expected.uuid)
self.assertEqual(_get_fake_hosts(1), expected.hosts)
self.assertEqual(_get_fake_metadata(1), expected['metadetails'])
def test_aggregate_get_from_db_raise_not_found(self):
aggregate_id = 5
self.assertRaises(exception.AggregateNotFound,
aggregate_obj._aggregate_get_from_db,
self.context, aggregate_id)
def test_aggregate_get_all_from_db(self):
for c in range(3):
_create_aggregate(self.context,
values={'name': 'fake_aggregate_%d' % c})
results = aggregate_obj._get_all_from_db(self.context)
self.assertEqual(len(results), 3)
def test_aggregate_get_by_host_from_db(self):
_create_aggregate_with_hosts(self.context,
values={'name': 'fake_aggregate_1'},
hosts=['host.1.openstack.org'])
_create_aggregate_with_hosts(self.context,
values={'name': 'fake_aggregate_2'},
hosts=['host.1.openstack.org'])
_create_aggregate(self.context,
values={'name': 'no_host_aggregate'})
rh1 = aggregate_obj._get_all_from_db(self.context)
rh2 = aggregate_obj._get_by_host_from_db(self.context,
'host.1.openstack.org')
self.assertEqual(3, len(rh1))
self.assertEqual(2, len(rh2))
def test_aggregate_get_by_host_with_key_from_db(self):
ah1 = _create_aggregate_with_hosts(self.context,
values={'name': 'fake_aggregate_1'},
metadata={'goodkey': 'good'},
hosts=['host.1.openstack.org'])
_create_aggregate_with_hosts(self.context,
values={'name': 'fake_aggregate_2'},
hosts=['host.1.openstack.org'])
rh1 = aggregate_obj._get_by_host_from_db(self.context,
'host.1.openstack.org',
key='goodkey')
self.assertEqual(1, len(rh1))
self.assertEqual(ah1['id'], rh1[0]['id'])
def test_aggregate_get_by_metadata_key_from_db(self):
_create_aggregate(self.context,
values={'name': 'aggregate_1'},
metadata={'goodkey': 'good'})
_create_aggregate(self.context,
values={'name': 'aggregate_2'},
metadata={'goodkey': 'bad'})
_create_aggregate(self.context,
values={'name': 'aggregate_3'},
metadata={'badkey': 'good'})
rl1 = aggregate_obj._get_by_metadata_key_from_db(self.context,
key='goodkey')
self.assertEqual(2, len(rl1))
def test_aggregate_create_in_db(self):
fake_create_aggregate = {
'name': 'fake-aggregate',
}
agg = aggregate_obj._aggregate_create_in_db(self.context,
fake_create_aggregate)
result = aggregate_obj._aggregate_get_from_db(self.context,
agg.id)
self.assertEqual(result.name, fake_create_aggregate['name'])
def test_aggregate_create_in_db_with_metadata(self):
fake_create_aggregate = {
'name': 'fake-aggregate',
}
agg = aggregate_obj._aggregate_create_in_db(self.context,
fake_create_aggregate,
metadata={'goodkey': 'good'})
result = aggregate_obj._aggregate_get_from_db(self.context,
agg.id)
md = aggregate_obj._get_by_metadata_key_from_db(self.context,
key='goodkey')
self.assertEqual(len(md), 1)
self.assertEqual(md[0]['id'], agg.id)
self.assertEqual(result.name, fake_create_aggregate['name'])
def test_aggregate_create_raise_exist_exc(self):
fake_create_aggregate = {
'name': 'fake-aggregate',
}
aggregate_obj._aggregate_create_in_db(self.context,
fake_create_aggregate)
self.assertRaises(exception.AggregateNameExists,
aggregate_obj._aggregate_create_in_db,
self.context,
fake_create_aggregate,
metadata=None)
def test_aggregate_delete(self):
result = _create_aggregate(self.context, metadata=None)
aggregate_obj._aggregate_delete_from_db(self.context, result['id'])
self.assertRaises(exception.AggregateNotFound,
aggregate_obj._aggregate_get_from_db,
self.context, result['id'])
def test_aggregate_delete_raise_not_found(self):
# this does not exist!
aggregate_id = 45
self.assertRaises(exception.AggregateNotFound,
aggregate_obj._aggregate_delete_from_db,
self.context, aggregate_id)
def test_aggregate_delete_with_metadata(self):
result = _create_aggregate(self.context,
metadata={'availability_zone': 'fake_avail_zone'})
aggregate_obj._aggregate_delete_from_db(self.context, result['id'])
self.assertRaises(exception.AggregateNotFound,
aggregate_obj._aggregate_get_from_db,
self.context, result['id'])
def test_aggregate_update(self):
created = _create_aggregate(self.context,
metadata={'availability_zone': 'fake_avail_zone'})
result = aggregate_obj._aggregate_get_from_db(self.context,
created['id'])
self.assertEqual('fake_avail_zone', result['availability_zone'])
new_values = deepcopy(_get_fake_aggregate(1, result=False))
new_values['availability_zone'] = 'different_avail_zone'
updated = aggregate_obj._aggregate_update_to_db(self.context,
result['id'], new_values)
self.assertEqual('different_avail_zone', updated['availability_zone'])
def test_aggregate_update_with_metadata(self):
result = _create_aggregate(self.context, metadata=None)
values = deepcopy(_get_fake_aggregate(1, result=False))
values['metadata'] = deepcopy(_get_fake_metadata(1))
values['availability_zone'] = 'different_avail_zone'
expected_metadata = deepcopy(values['metadata'])
expected_metadata['availability_zone'] = values['availability_zone']
aggregate_obj._aggregate_update_to_db(self.context, result['id'],
values)
metadata = _aggregate_metadata_get_all(self.context, result['id'])
updated = aggregate_obj._aggregate_get_from_db(self.context,
result['id'])
self.assertThat(metadata,
matchers.DictMatches(expected_metadata))
self.assertEqual('different_avail_zone', updated['availability_zone'])
def test_aggregate_update_with_existing_metadata(self):
result = _create_aggregate(self.context)
values = deepcopy(_get_fake_aggregate(1, result=False))
values['metadata'] = deepcopy(_get_fake_metadata(1))
values['metadata']['fake_key1'] = 'foo'
expected_metadata = deepcopy(values['metadata'])
aggregate_obj._aggregate_update_to_db(self.context, result['id'],
values)
metadata = _aggregate_metadata_get_all(self.context, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected_metadata))
def test_aggregate_update_zone_with_existing_metadata(self):
result = _create_aggregate(self.context)
new_zone = {'availability_zone': 'fake_avail_zone_2'}
metadata = deepcopy(_get_fake_metadata(1))
metadata.update(new_zone)
aggregate_obj._aggregate_update_to_db(self.context, result['id'],
new_zone)
expected = _aggregate_metadata_get_all(self.context, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_update_raise_not_found(self):
# this does not exist!
aggregate_id = 2
new_values = deepcopy(_get_fake_aggregate(1, result=False))
self.assertRaises(exception.AggregateNotFound,
aggregate_obj._aggregate_update_to_db,
self.context, aggregate_id, new_values)
def test_aggregate_update_raise_name_exist(self):
_create_aggregate(self.context, values={'name': 'test1'},
metadata={'availability_zone': 'fake_avail_zone'})
_create_aggregate(self.context, values={'name': 'test2'},
metadata={'availability_zone': 'fake_avail_zone'})
aggregate_id = 1
new_values = {'name': 'test2'}
self.assertRaises(exception.AggregateNameExists,
aggregate_obj._aggregate_update_to_db,
self.context, aggregate_id, new_values)
def test_aggregate_host_add_to_db(self):
result = _create_aggregate(self.context, metadata=None)
host = _get_fake_hosts(1)[0]
aggregate_obj._host_add_to_db(self.context, result['id'], host)
expected = aggregate_obj._aggregate_get_from_db(self.context,
result['id'])
self.assertEqual([_get_fake_hosts(1)[0]], expected.hosts)
def test_aggregate_host_re_add_to_db(self):
result = _create_aggregate_with_hosts(self.context,
metadata=None)
host = _get_fake_hosts(1)[0]
aggregate_obj._host_delete_from_db(self.context, result['id'], host)
aggregate_obj._host_add_to_db(self.context, result['id'], host)
expected = _aggregate_host_get_all(self.context, result['id'])
self.assertEqual(len(expected), 2)
def test_aggregate_host_add_to_db_duplicate_works(self):
r1 = _create_aggregate_with_hosts(self.context,
metadata=None)
r2 = _create_aggregate_with_hosts(self.context,
values={'name': 'fake_aggregate2'},
metadata={'availability_zone': 'fake_avail_zone2'})
h1 = _aggregate_host_get_all(self.context, r1['id'])
self.assertEqual(len(h1), 2)
self.assertEqual(r1['id'], h1[0]['aggregate_id'])
h2 = _aggregate_host_get_all(self.context, r2['id'])
self.assertEqual(len(h2), 2)
self.assertEqual(r2['id'], h2[0]['aggregate_id'])
def test_aggregate_host_add_to_db_duplicate_raise_exist_exc(self):
result = _create_aggregate_with_hosts(self.context,
metadata=None)
self.assertRaises(exception.AggregateHostExists,
aggregate_obj._host_add_to_db,
self.context, result['id'],
_get_fake_hosts(1)[0])
def test_aggregate_host_add_to_db_raise_not_found(self):
# this does not exist!
aggregate_id = 1
host = _get_fake_hosts(1)[0]
self.assertRaises(exception.AggregateNotFound,
aggregate_obj._host_add_to_db,
self.context, aggregate_id, host)
def test_aggregate_host_delete_from_db(self):
result = _create_aggregate_with_hosts(self.context,
metadata=None)
aggregate_obj._host_delete_from_db(self.context, result['id'],
_get_fake_hosts(1)[0])
expected = _aggregate_host_get_all(self.context, result['id'])
self.assertEqual(len(expected), 1)
def test_aggregate_host_delete_from_db_raise_not_found(self):
result = _create_aggregate(self.context)
self.assertRaises(exception.AggregateHostNotFound,
aggregate_obj._host_delete_from_db,
self.context, result['id'],
_get_fake_hosts(1)[0])
def test_aggregate_metadata_add(self):
result = _create_aggregate(self.context, metadata=None)
metadata = deepcopy(_get_fake_metadata(1))
aggregate_obj._metadata_add_to_db(self.context, result['id'], metadata)
expected = _aggregate_metadata_get_all(self.context, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_add_empty_metadata(self):
result = _create_aggregate(self.context, metadata=None)
metadata = {}
aggregate_obj._metadata_add_to_db(self.context, result['id'], metadata)
expected = _aggregate_metadata_get_all(self.context, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_add_and_update(self):
result = _create_aggregate(self.context)
metadata = deepcopy(_get_fake_metadata(1))
key = list(metadata.keys())[0]
new_metadata = {key: 'foo',
'fake_new_key': 'fake_new_value'}
metadata.update(new_metadata)
aggregate_obj._metadata_add_to_db(self.context,
result['id'], new_metadata)
expected = _aggregate_metadata_get_all(self.context, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_add_retry(self):
result = _create_aggregate(self.context, metadata=None)
with mock.patch('nova.db.sqlalchemy.api_models.'
'AggregateMetadata.__table__.insert') as insert_mock:
insert_mock.side_effect = db_exc.DBDuplicateEntry
self.assertRaises(db_exc.DBDuplicateEntry,
aggregate_obj._metadata_add_to_db,
self.context,
result['id'],
{'fake_key2': 'fake_value2'},
max_retries=5)
def test_aggregate_metadata_update(self):
result = _create_aggregate(self.context)
metadata = deepcopy(_get_fake_metadata(1))
key = list(metadata.keys())[0]
aggregate_obj._metadata_delete_from_db(self.context, result['id'], key)
new_metadata = {key: 'foo'}
aggregate_obj._metadata_add_to_db(self.context,
result['id'], new_metadata)
expected = _aggregate_metadata_get_all(self.context, result['id'])
metadata[key] = 'foo'
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_delete(self):
result = _create_aggregate(self.context, metadata=None)
metadata = deepcopy(_get_fake_metadata(1))
aggregate_obj._metadata_add_to_db(self.context, result['id'], metadata)
aggregate_obj._metadata_delete_from_db(self.context, result['id'],
list(metadata.keys())[0])
expected = _aggregate_metadata_get_all(self.context, result['id'])
del metadata[list(metadata.keys())[0]]
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_remove_availability_zone(self):
result = _create_aggregate(self.context, metadata={'availability_zone':
'fake_avail_zone'})
aggregate_obj._metadata_delete_from_db(self.context,
result['id'],
'availability_zone')
aggr = aggregate_obj._aggregate_get_from_db(self.context, result['id'])
self.assertIsNone(aggr['availability_zone'])
def test_aggregate_metadata_delete_raise_not_found(self):
result = _create_aggregate(self.context)
self.assertRaises(exception.AggregateMetadataNotFound,
aggregate_obj._metadata_delete_from_db,
self.context, result['id'], 'foo_key')
def create_aggregate(context, db_id, in_api=True):
if in_api:
fake_aggregate = _get_fake_aggregate(db_id, in_api=False, result=False)
aggregate_obj._aggregate_create_in_db(context, fake_aggregate,
metadata=_get_fake_metadata(db_id))
for host in _get_fake_hosts(db_id):
aggregate_obj._host_add_to_db(context, fake_aggregate['id'], host)
else:
fake_aggregate = _get_fake_aggregate(db_id, in_api=False, result=False)
db.aggregate_create(context, fake_aggregate,
metadata=_get_fake_metadata(db_id))
for host in _get_fake_hosts(db_id):
db.aggregate_host_add(context, fake_aggregate['id'], host)
def compare_obj(test, result, source):
source['deleted'] = False
def updated_at_comparator(result, source):
return True
return base_compare(test, result, source, subs=SUBS,
comparators={'updated_at': updated_at_comparator})
class AggregateObjectCellTestCase(test.TestCase):
"""Tests for the case where all aggregate data is in Cell DB"""
def setUp(self):
super(AggregateObjectCellTestCase, self).setUp()
self.context = context.RequestContext('fake-user', 'fake-project')
self._seed_data()
def _seed_data(self):
for i in range(1, 10):
create_aggregate(self.context, i, in_api=False)
def test_get_by_id(self):
for i in range(1, 10):
agg = aggregate_obj.Aggregate.get_by_id(self.context, i)
compare_obj(self, agg, _get_fake_aggregate(i))
def test_save(self):
for i in range(1, 10):
agg = aggregate_obj.Aggregate.get_by_id(self.context, i)
fake_agg = _get_fake_aggregate(i)
fake_agg['name'] = 'new-name' + str(i)
agg.name = 'new-name' + str(i)
agg.save()
result = aggregate_obj.Aggregate.get_by_id(self.context, i)
compare_obj(self, agg, fake_agg)
compare_obj(self, result, fake_agg)
def test_update_metadata(self):
for i in range(1, 10):
agg = aggregate_obj.Aggregate.get_by_id(self.context, i)
fake_agg = _get_fake_aggregate(i)
fake_agg['metadetails'] = {'constant_key': 'constant_value'}
agg.update_metadata({'unique_key': None})
agg.save()
result = aggregate_obj.Aggregate.get_by_id(self.context, i)
compare_obj(self, agg, fake_agg)
compare_obj(self, result, fake_agg)
def test_destroy(self):
for i in range(1, 10):
agg = aggregate_obj.Aggregate.get_by_id(self.context, i)
agg.destroy()
aggs = aggregate_obj.AggregateList.get_all(self.context)
self.assertEqual(len(aggs), 0)
def test_add_host(self):
for i in range(1, 10):
agg = aggregate_obj.Aggregate.get_by_id(self.context, i)
fake_agg = _get_fake_aggregate(i)
fake_agg['hosts'].append('barbar')
agg.add_host('barbar')
agg.save()
result = aggregate_obj.Aggregate.get_by_id(self.context, i)
compare_obj(self, agg, fake_agg)
compare_obj(self, result, fake_agg)
def test_delete_host(self):
for i in range(1, 10):
agg = aggregate_obj.Aggregate.get_by_id(self.context, i)
fake_agg = _get_fake_aggregate(i)
fake_agg['hosts'].remove('constant_host')
agg.delete_host('constant_host')
result = aggregate_obj.Aggregate.get_by_id(self.context, i)
compare_obj(self, agg, fake_agg)
compare_obj(self, result, fake_agg)
class AggregateObjectApiTestCase(AggregateObjectCellTestCase):
"""Tests the aggregate in the case where all data is in the API DB"""
def _seed_data(self):
for i in range(1, 10):
create_aggregate(self.context, i)
def test_create(self):
new_agg = aggregate_obj.Aggregate(self.context)
new_agg.name = 'new-aggregate'
new_agg.create()
result = aggregate_obj.Aggregate.get_by_id(self.context, new_agg.id)
self.assertEqual(new_agg.name, result.name)
class AggregateObjectMixedTestCase(AggregateObjectCellTestCase):
"""Tests the aggregate in the case where data is in both databases"""
def _seed_data(self):
for i in range(1, 6):
create_aggregate(self.context, i)
for i in range(6, 10):
create_aggregate(self.context, i, in_api=False)
def test_create(self):
new_agg = aggregate_obj.Aggregate(self.context)
new_agg.name = 'new-aggregate'
self.assertRaises(exception.ObjectActionError,
new_agg.create)
class AggregateObjectMigrationTestCase(AggregateObjectCellTestCase):
"""Tests the aggregate in the case where data is migrated to the API db"""
def _seed_data(self):
for i in range(1, 10):
create_aggregate(self.context, i, in_api=False)
aggregate_obj.migrate_aggregates(self.context, 50)
def test_create(self):
new_agg = aggregate_obj.Aggregate(self.context)
new_agg.name = 'new-aggregate'
new_agg.create()
result = aggregate_obj.Aggregate.get_by_id(self.context, new_agg.id)
self.assertEqual(new_agg.name, result.name)
class AggregateMigrationTestCase(test.TestCase):
def setUp(self):
super(AggregateMigrationTestCase, self).setUp()
self.context = context.get_admin_context()
def test_migration(self):
db.aggregate_create(self.context, {'name': 'foo'})
main_aggregates_len = len(db.aggregate_get_all(self.context))
match, done = aggregate_obj.migrate_aggregates(self.context, 50)
self.assertEqual(1, main_aggregates_len)
self.assertEqual(main_aggregates_len, match)
self.assertEqual(main_aggregates_len, done)
self.assertEqual(0, len(db.aggregate_get_all(self.context)))
self.assertEqual(main_aggregates_len,
len(aggregate_obj.AggregateList.get_all(
self.context)))
def test_migrate_aggregate_reset_autoincrement(self):
agg = aggregate_obj.Aggregate(self.context, name='foo')
agg.create()
match, done = aggregate_obj.migrate_aggregate_reset_autoincrement(
self.context, 0)
self.assertEqual(0, match)
self.assertEqual(0, done)
def test_migrate_aggregate_reset_autoincrement_no_aggregates(self):
# NOTE(danms): This validates the "or 0" default if there are no
# aggregates (and thus no max id).
match, done = aggregate_obj.migrate_aggregate_reset_autoincrement(
self.context, 0)
self.assertEqual(0, match)
self.assertEqual(0, done)
| apache-2.0 |
EntityFXCode/arsenalsuite | python/scripts/joberror_handler.py | 10 | 2441 | #!/usr/bin/python
from PyQt4.QtCore import *
from PyQt4.QtSql import *
from blur.Stone import *
from blur.Classes import *
import blur.email, blur.jabber
import sys, time, re, os
from math import ceil
import traceback
if sys.argv.count('-daemonize'):
from blur.daemonize import createDaemon
createDaemon()
app = QCoreApplication(sys.argv)
initConfig( "/etc/joberror_handler.ini", "/var/log/ab/joberror_handler.log" )
# Read values from db.ini, but dont overwrite values from reaper.ini
# This allows db.ini defaults to work even if reaper.ini is non-existent
config().readFromFile( "/etc/db.ini", False )
blur.RedirectOutputToLog()
classes_loader()
VERBOSE_DEBUG = False
if VERBOSE_DEBUG:
Database.current().setEchoMode( Database.EchoUpdate | Database.EchoDelete )# | Database.EchoSelect )
Database.current().connection().reconnect()
service = Service.ensureServiceExists('AB_JobErrorHandler')
def perform_script_action( handler_script, error ):
func = getCompiledFunction( 'handleError', handler_script, handler_script.script(), handler_script.name() )
if func and callable(func):
func( error )
def handle_error( handler, error ):
# Does the handler match the error?
if error.message().contains( handler.errorRegEx() ):
perform_script_action( handler.jobErrorHandlerScript(), error )
def joberror_handler():
# Config: managerDriveLetter, managerSpoolDir, assburnerErrorStep
print "JobError Handler is starting up"
while True:
service.pulse()
errorsToCheck = JobError.select( "checked=false" )
handlersByJobType = {}
jobTypeByJob = {}
for error in errorsToCheck:
# Get the job key, but don't load the job(if we don't have to)
jobKey = error.getValue('fkeyJob').toInt()[0]
# Hmm, if we haven't looked at the job yet, then we have to load
# it to figure out the jobtype.
if not jobKey in jobTypeByJob:
job = error.job()
jobTypeByJob[job.key()] = job.jobType()
# Get the jobtype
jobType = jobTypeByJob[jobKey]
if not jobType.isRecord():
continue
# Load the handlers for this jobtype if not already loaded
if not jobType in handlersByJobType:
handlersByJobType[jobType] = jobType.jobErrorHandlers()
# Run each handler
for handler in handlersByJobType[jobType]:
handle_error( handler, error )
errorsToCheck.setChecked( True )
errorsToCheck.commit()
time.sleep(5)
if __name__ == "__main__":
joberror_handler()
| gpl-2.0 |
failys/CAIRIS | cairis/tools/PseudoClasses.py | 1 | 16629 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from collections import OrderedDict
import logging
from flask_restful import fields
from cairis.core.ReferenceSynopsis import ReferenceSynopsis
from cairis.core.ReferenceContribution import ReferenceContribution
__author__ = 'Robin Quetin, Shamal Faily'
obj_id_field = '__python_obj__'
def gen_class_metadata(class_ref):
return {"enum": [class_ref.__module__+'.'+class_ref.__name__]}
class CharacteristicReferenceSynopsis(object):
resource_fields = {
obj_id_field: fields.String,
"theSynopsis": fields.String,
"theDimension": fields.String,
"theActorType": fields.String,
"theActor": fields.String,
"theInitialSatisfaction" : fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self, rsName='', rsDim='', rsActorType='', rsActor='', gSat=''):
self.theSynopsis = rsName
self.theDimension = rsDim
self.theActorType = rsActorType
self.theActor= rsActor
self.theInitialSatisfaction = gSat
def __getitem__(self,varName):
if (varName == 'theSynopsis'): return self.theSynopsis
elif (varName == 'theDimension'): return self.theDimension
elif (varName == 'theActorType'): return self.theActorType
elif (varName == 'theActor'): return self.theActor
elif (varName == 'theInitialSatisfaction'): return self.theInitialSatisfaction
else: return None
class CharacteristicReferenceContribution(object):
resource_fields = {
obj_id_field: fields.String,
"theMeansEnd": fields.String,
"theContribution": fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self, rcMe='', rcCont=''):
self.theMeansEnd = rcMe
self.theContribution = rcCont
def __getitem__(self,varName):
if (varName == 'theMeansEnd'): return self.theMeansEnd
elif (varName == 'theContribution'): return self.theContribution
else: return None
class CharacteristicReference(object):
resource_fields = {
obj_id_field: fields.String,
'theReferenceName' : fields.String,
'theCharacteristicType' : fields.String,
'theReferenceDescription' : fields.String,
'theDimensionName' : fields.String,
'theReferenceSynopsis' : fields.Nested(CharacteristicReferenceSynopsis.resource_fields),
'theReferenceContribution' : fields.Nested(CharacteristicReferenceContribution.resource_fields)
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self, refName=None, crTypeName='grounds', refDesc=None, dimName='document',rSyn=None,rCont=None):
"""
:type refName: str
:type crTypeName: str
:type refDesc: str
:type dimName: str
"""
self.theReferenceName = refName
self.theCharacteristicType = crTypeName
self.theReferenceDescription = refDesc
self.theDimensionName = dimName
self.theReferenceSynopsis = rSyn
self.theReferenceContribution = rCont
class Definition(object):
resource_fields = {
obj_id_field: fields.String,
'name': fields.String,
'value': fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
class Contributor(object):
resource_fields = {
obj_id_field: fields.String,
'firstName': fields.String,
'surname': fields.String,
'affiliation': fields.String,
'role': fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self, first_name=None, surname=None, affiliation=None, role=None, tuple_form=None):
"""
:type first_name: str
:type surname: str
:type affiliation: str
:type role: str
:type tuple_form: tuple
"""
if tuple_form is None:
self.firstName = first_name or ''
self.surname = surname or ''
self.affiliation = affiliation or ''
self.role = role or ''
else:
attrs = ['firstName', 'surname', 'affiliation', 'role']
for idx in range(0, len(tuple_form)):
self.__setattr__(attrs[idx], tuple_form[idx] or '')
class EnvironmentTensionModel(object):
resource_fields = {
obj_id_field: fields.String,
"base_attr_id": fields.Integer,
"attr_id": fields.Integer,
"value": fields.Integer,
"rationale": fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
attr_dictionary = {
'Confidentiality': 0,
'Integrity': 1,
'Availability': 2,
'Accountability': 3,
'Anonymity': 4,
'Pseudonymity': 5,
'Unlinkability': 6,
'Unobservability': 7
}
attr_dictionary = OrderedDict(sorted(list(attr_dictionary.items()), key=lambda t: t[1]))
# endregion
base_attr_values = list(range(-1,4))
attr_values = list(range(4,8))
attr_values.append(-1)
def __init__(self, base_attr_id=-1, attr_id=-1, value=0, rationale='None', key=None):
"""
:type base_attr_id: int
:type attr_id: int
:type value: int|tuple
:type rationale: str
:type key: tuple
"""
if key is not None:
base_attr_id = key[0]
attr_id = key[1]
rationale = value[1]
value = value[0]
if base_attr_id not in self.base_attr_values or attr_id not in self.attr_values:
raise ValueError('Base attribute or subattribute value is incorrect.')
self.base_attr_id = base_attr_id
self.attr_id = attr_id
self.value = value
self.rationale = rationale
class Revision(object):
resource_fields = {
obj_id_field: fields.String,
'id': fields.Integer,
'date': fields.String,
'description': fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self, id=None, date=None, description=None, tuple_form=None):
"""
:type id: int
:type date: str
:type description: str
:type tuple_form: tuple
"""
if tuple_form is None:
self.id = id
self.date = date
self.description = description
else:
attrs = ['id', 'date', 'description']
for idx in range(0, len(tuple_form)):
self.__setattr__(attrs[idx], tuple_form[idx] or '')
class ProjectSettings(object):
resource_fields = {
obj_id_field: fields.String,
'projectName': fields.String,
'richPicture': fields.String,
'projectScope': fields.String,
'definitions': fields.List(fields.Nested(Definition.resource_fields)),
'projectGoals': fields.String,
'contributions': fields.List(fields.Nested(Contributor.resource_fields)),
'projectBackground': fields.String,
'revisions': fields.List(fields.Nested(Revision.resource_fields))
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
req_p_settings_keys = ['Project Background', 'Project Goals', 'Project Name', 'Project Scope', 'Rich Picture']
def __init__(self, pSettings=None, pDict=None, contributors=None, revisions=None):
logger = logging.getLogger('cairisd')
project_settings = pSettings or {}
self.projectBackground = project_settings.get("Project Background", "")
self.projectGoals = project_settings.get("Project Goals", "")
self.projectName = project_settings.get("Project Name", "")
self.projectScope = project_settings.get("Project Scope", "")
self.richPicture = project_settings.get("Rich Picture", "")
self.definitions = pDict or []
self.contributions = []
for contributor in contributors or []:
if isinstance(contributor, tuple):
new_contr = Contributor(tuple_form=contributor)
self.contributions.append(new_contr)
else:
logger.warning('Item does not meet typical contributor structure. Passing this one.')
self.revisions = []
for revision in revisions or []:
if isinstance(revision, tuple):
new_rev = Revision(tuple_form=revision)
self.revisions.append(new_rev)
else:
logger.warning('Item does not meet typical contributor structure. Passing this one.')
class RiskScore(object):
resource_fields = {
obj_id_field: fields.String,
'responseName': fields.String,
'unmitScore': fields.Integer,
'mitScore': fields.Integer,
'details': fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self, response_name, unmit_score, mit_score, details):
"""
:type response_name: str
:type unmit_score: int
:type mit_score: int
:type details: str
"""
self.responseName = response_name
self.unmitScore = unmit_score or -1
self.mitScore = mit_score or -1
self.details = details
class RiskRating(object):
resource_fields = {
obj_id_field: fields.String,
'rating': fields.String,
'threat': fields.String,
'vulnerability': fields.String,
'environment': fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self, threat, vulnerability, environment, rating=None):
self.threat = threat
self.vulnerability = vulnerability
self.environment = environment
self.rating = rating
class CountermeasureTarget(object):
def __init__(self, tName=None, tEffectiveness=None, tRat=None):
"""
:type tName: str
:type tEffectiveness: str
:type tRat: str
"""
self.theName = tName
self.theEffectiveness = tEffectiveness
self.theRationale = tRat
resource_fields = {
"__python_obj__": fields.String,
"theName": fields.String,
"theEffectiveness": fields.String,
"theRationale": fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def name(self): return self.theName
def effectiveness(self): return self.theEffectiveness
def rationale(self): return self.theRationale
class PersonaTaskCharacteristics(object):
def __init__(self, pName, pDur, pFreq, pDemands, pGoalConflict):
"""
:type pName: str
:type pDur: str
:type pFreq: str
:type pDemands: str
:type pGoalConflict: str
"""
self.thePersona = pName
self.theDuration = pDur
self.theFrequency = pFreq
self.theDemands = pDemands
self.theGoalConflict = pGoalConflict
resource_fields = {
"__python_obj__": fields.String,
"thePersona": fields.String,
"theDuration": fields.String,
"theFrequency": fields.String,
"theDemands": fields.String,
"theGoalConflict": fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def persona(self): return self.thePersona
def duration(self): return self.theDuration
def frequency(self): return self.theFrequency
def demands(self): return self.theDemands
def goalConflict(self): return self.theGoalConflict
class CountermeasureTaskCharacteristics(object):
def __init__(self, pTask, pName, pDur, pFreq, pDemands, pGoalConflict):
"""
:type pTask: str
:type pName: str
:type pDur: str
:type pFreq: str
:type pDemands: str
:type pGoalConflict: str
"""
self.theTask = pTask
self.thePersona = pName
self.theDuration = pDur
self.theFrequency = pFreq
self.theDemands = pDemands
self.theGoalConflict = pGoalConflict
resource_fields = {
"__python_obj__": fields.String,
"theTask": fields.String,
"thePersona": fields.String,
"theDuration": fields.String,
"theFrequency": fields.String,
"theDemands": fields.String,
"theGoalConflict": fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def task(self): return self.theTask
def persona(self): return self.thePersona
def duration(self): return self.theDuration
def frequency(self): return self.theFrequency
def demands(self): return self.theDemands
def goalConflict(self): return self.theGoalConflict
class SecurityAttribute(object):
def __init__(self, name=None, value=None, rationale=None):
"""
:type name: str
:type value: str
:type rationale: str
"""
self.name = name
self.value = value
self.rationale = rationale
resource_fields = {
"__python_obj__": fields.String,
"name": fields.String,
"value": fields.String,
"rationale": fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def get_attr_value(self, enum_obj):
"""
Gets the database value for the security attribute
:type enum_obj: list|tuple
"""
value = 0
if self.value is not None:
found = False
idx = 0
while not found and idx < len(enum_obj):
if enum_obj[idx] == self.value:
value = idx
found = True
else:
idx += 1
return value
class ValuedRole(object):
resource_fields = {
obj_id_field: fields.String,
'roleName': fields.String,
'cost': fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self, role_name, cost):
self.roleName = role_name
self.cost = cost
class ExceptionAttributes(object):
resource_fields = {
obj_id_field: fields.String,
'theName': fields.String,
'theDimensionType': fields.String,
'theDimensionValue': fields.String,
'theCategoryName': fields.String,
'theDescription': fields.String
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self,excName,dimType,dimValue,catName,excDesc):
self.theName = excName
self.theDimensionType = dimType
self.theDimensionValue = dimValue
self.theCategoryName = catName
self.theDescription = excDesc
class StepAttributes(object):
resource_fields = {
obj_id_field: fields.String,
'theStepText': fields.String,
'theSynopsis': fields.String,
'theActor': fields.String,
'theActorType': fields.String,
'theExceptions': fields.List(fields.Nested(ExceptionAttributes.resource_fields)),
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self,stepTxt,stepSyn,stepActor,stepActorType,stepExceptions):
self.theStepText = stepTxt
self.theSynopsis = stepSyn
self.theActor = stepActor
self.theActorType = stepActorType
self.theExceptions = stepExceptions
def synopsis(self): return self.theSynopsis
def actor(self): return self.theActor
def actorType(self): return self.theActorType
def tags(self): return self.theTags
def setSynopsis(self,s): self.theSynopsis = s
def setActor(self,a): self.theActor = a
class StepsAttributes(object):
resource_fields = {
obj_id_field: fields.String,
'theSteps': fields.List(fields.Nested(StepAttributes.resource_fields)),
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self):
self.theSteps = []
def __getitem__(self,stepNo):
return self.theSteps[stepNo]
def __setitem__(self,stepNo,s):
self.theSteps[stepNo] = s
def size(self):
return len(self.theSteps)
def append(self,s):
self.theSteps.append(s)
def remove(self,stepNo):
self.theSteps.pop(stepNo)
def insert(self,pos,s):
self.theSteps.insert(pos,s)
class ObjectDependency(object):
resource_fields = {
obj_id_field: fields.String,
'theDimensionName': fields.String,
'theObjectName': fields.String,
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self,dimension_name,object_name):
self.theDimensionName = dimension_name
self.theObjectName = object_name
class TaskGoalContribution(object):
resource_fields = {
obj_id_field: fields.String,
'theSource': fields.String,
'theDestination': fields.String,
'theEnvironment': fields.String,
'theContribution': fields.String,
}
required = list(resource_fields.keys())
required.remove(obj_id_field)
def __init__(self,src,dest,env,cont):
self.theSource = src
self.theDestination = dest
self.theEnvironment = env
self.theContribution = cont
| apache-2.0 |
ZoranPavlovic/kombu | kombu/utils/objects.py | 1 | 1945 | """Object Utilities."""
class cached_property:
"""Cached property descriptor.
Caches the return value of the get method on first call.
Examples:
.. code-block:: python
@cached_property
def connection(self):
return Connection()
@connection.setter # Prepares stored value
def connection(self, value):
if value is None:
raise TypeError('Connection must be a connection')
return value
@connection.deleter
def connection(self, value):
# Additional action to do at del(self.attr)
if value is not None:
print('Connection {0!r} deleted'.format(value)
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, obj, type=None):
if obj is None:
return self
try:
return obj.__dict__[self.__name__]
except KeyError:
value = obj.__dict__[self.__name__] = self.__get(obj)
return value
def __set__(self, obj, value):
if obj is None:
return self
if self.__set is not None:
value = self.__set(obj, value)
obj.__dict__[self.__name__] = value
def __delete__(self, obj, _sentinel=object()):
if obj is None:
return self
value = obj.__dict__.pop(self.__name__, _sentinel)
if self.__del is not None and value is not _sentinel:
self.__del(obj, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
| bsd-3-clause |
tecan/xchat-rt | plugins/scripts/Supybot-0.83.4.1-bitcoinotc-bot/build/lib/supybot/plugins/Factoids/test.py | 7 | 8060 | ###
# Copyright (c) 2002-2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
try:
import sqlite
except ImportError:
sqlite = None
if sqlite:
class FactoidsTestCase(ChannelPluginTestCase):
plugins = ('Factoids',)
def testRandomfactoid(self):
self.assertError('random')
self.assertNotError('learn jemfinch as my primary author')
self.assertRegexp('random', 'primary author')
def testLearn(self):
self.assertNotError('learn jemfinch as my primary author')
self.assertNotError('info jemfinch')
self.assertRegexp('whatis jemfinch', 'my primary author')
self.assertRegexp('whatis JEMFINCH', 'my primary author')
self.assertRegexp('whatis JEMFINCH 1', 'my primary author')
self.assertNotError('learn jemfinch as a bad assembly programmer')
self.assertRegexp('whatis jemfinch 2', 'bad assembly')
self.assertNotRegexp('whatis jemfinch 2', 'primary author')
self.assertRegexp('whatis jemfinch', r'.*primary author.*assembly')
self.assertError('forget jemfinch')
self.assertError('forget jemfinch 3')
self.assertError('forget jemfinch 0')
self.assertNotError('forget jemfinch 2')
self.assertNotError('forget jemfinch 1')
self.assertError('whatis jemfinch')
self.assertError('info jemfinch')
self.assertNotError('learn foo bar as baz')
self.assertNotError('info foo bar')
self.assertRegexp('whatis foo bar', 'baz')
self.assertNotError('learn foo bar as quux')
self.assertRegexp('whatis foo bar', '.*baz.*quux')
self.assertError('forget foo bar')
self.assertNotError('forget foo bar 2')
self.assertNotError('forget foo bar 1')
self.assertError('whatis foo bar')
self.assertError('info foo bar')
self.assertError('learn foo bar baz') # No 'as'
self.assertError('learn foo bar') # No 'as'
def testChangeFactoid(self):
self.assertNotError('learn foo as bar')
self.assertNotError('change foo 1 s/bar/baz/')
self.assertRegexp('whatis foo', 'baz')
self.assertError('change foo 2 s/bar/baz/')
self.assertError('change foo 0 s/bar/baz/')
def testSearchFactoids(self):
self.assertNotError('learn jemfinch as my primary author')
self.assertNotError('learn strike as a cool guy working on me')
self.assertNotError('learn inkedmn as another of my developers')
self.assertNotError('learn jamessan as a developer of much python')
self.assertNotError('learn bwp as author of my weather command')
self.assertRegexp('factoids search --regexp /.w./', 'bwp')
self.assertRegexp('factoids search --regexp /^.+i/',
'jemfinch.*strike')
self.assertNotRegexp('factoids search --regexp /^.+i/', 'inkedmn')
self.assertRegexp('factoids search --regexp m/j/ --regexp m/ss/',
'jamessan')
self.assertRegexp('factoids search --regexp m/^j/ *ss*',
'jamessan')
self.assertRegexp('factoids search --regexp /^j/',
'jemfinch.*jamessan')
self.assertRegexp('factoids search j*', 'jemfinch.*jamessan')
self.assertRegexp('factoids search *ke*',
'inkedmn.*strike|strike.*inkedmn')
self.assertRegexp('factoids search ke',
'inkedmn.*strike|strike.*inkedmn')
self.assertRegexp('factoids search jemfinch',
'my primary author')
self.assertRegexp('factoids search --values primary author',
'my primary author')
def testWhatisOnNumbers(self):
self.assertNotError('learn 911 as emergency number')
self.assertRegexp('whatis 911', 'emergency number')
def testNotZeroIndexed(self):
self.assertNotError('learn foo as bar')
self.assertNotRegexp('info foo', '#0')
self.assertNotRegexp('whatis foo', '#0')
self.assertNotError('learn foo as baz')
self.assertNotRegexp('info foo', '#0')
self.assertNotRegexp('whatis foo', '#0')
def testInfoReturnsRightNumber(self):
self.assertNotError('learn foo as bar')
self.assertNotRegexp('info foo', '2 factoids')
def testLearnSeparator(self):
self.assertError('learn foo is bar')
self.assertNotError('learn foo as bar')
self.assertRegexp('whatis foo', 'bar')
orig = conf.supybot.plugins.Factoids.learnSeparator()
try:
conf.supybot.plugins.Factoids.learnSeparator.setValue('is')
self.assertError('learn bar as baz')
self.assertNotError('learn bar is baz')
self.assertRegexp('whatis bar', 'baz')
finally:
conf.supybot.plugins.Factoids.learnSeparator.setValue(orig)
def testShowFactoidIfOnlyOneMatch(self):
m1 = self.assertNotError('factoids search m/foo|bar/')
orig = conf.supybot.plugins.Factoids.showFactoidIfOnlyOneMatch()
try:
conf.supybot.plugins.Factoids. \
showFactoidIfOnlyOneMatch.setValue(False)
m2 = self.assertNotError('factoids search m/foo/')
self.failUnless(m1.args[1].startswith(m2.args[1]))
finally:
conf.supybot.plugins.Factoids. \
showFactoidIfOnlyOneMatch.setValue(orig)
def testInvalidCommand(self):
orig = conf.supybot.plugins.Factoids.replyWhenInvalidCommand()
try:
conf.supybot.plugins.Factoids.\
replyWhenInvalidCommand.setValue(True)
self.assertNotError('learn foo as bar')
self.assertRegexp('foo', 'bar')
finally:
conf.supybot.plugins.Factoids.\
replyWhenInvalidCommand.setValue(orig)
def testQuoteHandling(self):
self.assertNotError('learn foo as "\\"bar\\""')
self.assertRegexp('whatis foo', r'"bar"')
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| gpl-2.0 |
Fuzen-py/BladeAndSoulAPI | BladeAndSoul/bns.py | 1 | 21211 | import asyncio
import json
import aiohttp
from bs4 import BeautifulSoup
from .errors import (CharacterNotFound, FailedToParse, InvalidData,
ServiceUnavialable)
try:
import lxml
parser = 'lxml'
except ImportError:
parser = 'html.parser'
# types of weapons in game
VALID_WEAPONS = ['dagger', 'sword', 'staff', 'razor', 'axe', 'bangle', 'gauntlet', 'lynblade', 'bracer']
# types of accessories in game
VALID_ACCESSORIES = ['necklace', 'earring', 'bracelet', 'ring', 'belt', 'energy', 'soul']
PROFILE_URL = 'http://na-bns.ncsoft.com/ingame/bs/character/profile' # ?c=Char
SEARCH_URL = 'http://na-bns.ncsoft.com/ingame/bs/character/search/info' # ?c=Char
SUGGEST_URL = 'http://na-search.ncsoft.com/openapi/suggest.jsp' # ?site=bns&display=10&collection=bnsusersuggest&query=char
MARKET_API_ENDPOINT = 'http://na.bnsbazaar.com/api/market' # ITEM NAME
ITEM_NAME_SUGGEST = 'http://na-search.ncsoft.com/openapi/bnsmarketsuggest.jsp' #?site=bns&display=1&collection=bnsitemsuggest&lang=en&callback=suggestKeyword&query=items
BASE_ITEM_IMAGE_URL = 'http://static.ncsoft.com/bns_resource/ui_resource'
def _float(var):
"""
Attempts to an entry to a float (normally works for this)
"""
if var in [None, False]:
return 0
if var is True:
return 1
if isinstance(var, float):
return var
if isinstance(var, int):
return float(var)
assert isinstance(var, str)
assert any(x.isnumeric() for x in var)
var = var.split()[-1]
while len(var) > 0 and not var[-1].isnumeric():
var = var[:-1]
while len(var) > 0 and not var[0].isnumeric():
var = var[1:]
return float(var)
def _subtract(var1, var2, string=True, percent=False):
"""
Visually do math
"""
if string:
if percent:
return '{}% - {}% = {}%'.format(var1, var2, var1-var2)
return '{} - {} = {}'.format(var1, var2, var1-var2)
if percent:
return str(var1) + '%', str(var2) + '%', str(var1-var2) + '%'
return var1, var2, var1-var2
def get_name(gear_item):
"""
A helper function for extracting names
"""
try:
gear_item = gear_item.find('div', class_='name')
if not gear_item:
return None
if gear_item.find('span', class_='empty') is not None:
return None
return gear_item.span.text
except AttributeError:
return None
def set_bonus(set_) -> tuple:
"""
returns the set bonus for a user as a generator
"""
return (':\n'.join(('\n'.join((t.strip() for t in z.text.strip().split('\n') if t.strip() != '')) for z in x)) for x
in dict(zip(set_.find_all('p', class_='discription'), set_.find_all('p', class_='setEffect'))).items())
async def fetch_url(url, params={}):
"""
Fetch a url and return soup
"""
async with aiohttp.ClientSession() as session:
async with session.get(url, params=params) as re:
return BeautifulSoup(await re.text(), parser)
async def search_user(user, suggest=True, max_count=3) -> list:
soup = await fetch_url(SEARCH_URL, params={'c': user})
search = soup.find('div', class_='searchList')
if suggest:
return [(x.dl.dt.a.text, [b.text for b in x.dl.find('dd', class_='other').dd.find_all('li')]) for x in
search.find_all('li') if x.dt is not None][:max_count]
return (search.li.dl.dt.a.text,
[x.text for x in search.li.dl.find('dd', class_='other').dd.find_all('li') if x is not None])
async def fetch_profile(user) -> dict:
"""
Fetches a user and returns the data as a dict
Dictionary Keys:
Account Name - The display name for their account (str).
Character Name - The Name of the given character (str).
Level - Character's level (str).
HM Level - Character's HM level (str).
Server - Server the character is on (str).
Faction - The Faction the character is in (str).
Picture - Link to the character's profile picture (str).
Stats - A dictionary object with stats (each stat is also a dict).
Gear - The gear of the Given Character (list).
SoulSheild - SoulSheild stats (str).
Set Bonus - Set bonus affects, a list of strings (list).
Outfit - The outfit of the character (dict).
Other Characters - A list of the other characters on that user's account (list).
Region - The region the user is from.
:parm user: The name of the character you wish to fetch data for
"""
CharacterName, other_chars = await search_user(user, suggest=False)
soup = await fetch_url(PROFILE_URL, params={'c': CharacterName})
if len(soup.find_all('div', clas_='pCharacter error', id='container')):
raise ServiceUnavialable('Cannot Access BNS At this time')
# INFORMATION
Name = soup.find('a', href='#').text
CharacterName = soup.find('dt').span.text[1:-1]
Class, Level, Server, *Faction = [x.text.strip().replace('\xa0', ' ') for x in
soup.find('dd', class_='desc').find_all('li')]
if len(Faction) == 0:
Clan = Rank = Faction = None
elif len(Faction) == 1:
Faction = Faction[0].split()
Rank = ' '.join(Faction[2:])
Faction = ' '.join(Faction[:2])
Clan = None
else:
Clan = Faction[1]
Faction = Faction[0].split()
Rank = ' '.join(Faction[2:])
Faction = ' '.join(Faction[:2])
Level = Level.split()
if len(Level) > 2:
HM = int(Level[-1])
else:
HM = 0
Level = int(Level[1])
# ATTACK
ATK = soup.find('div', class_='attack').dl
sub = [z for z in (dict(zip((z.text for z in x.find_all('span', class_='title')),
(z.text for z in x.find_all('span', class_='stat-point')))) for x in ATK.find_all('dd')) if len(z)][:-2]
temp = ATK.find_all('dt')[:-2]
ATK = dict(
zip([t.find('span', class_='title').text for t in temp], [t.find('span', 'stat-point').text for t in temp]))
del ATK['Mastery']
[ATK.update({x: {'Total': ATK.get(x)}}) for x in ATK.keys()]
ATK['Attack Power'].update(sub[0])
ATK['Piercing'].update(sub[2])
ATK['Accuracy'].update(sub[3])
ATK['Critical Hit'].update(sub[5])
ATK['Critical Damage'].update(sub[6])
# DEFENSE
Defense = soup.find('div', class_='defense')
temp = Defense.dl.find_all('dt')
sub = [z for z in (dict(zip((z.text for z in x.find_all('span', class_='title')),
(z.text for z in x.find_all('span', class_='stat-point')))) for x in Defense.find_all('dd')) if len(z)]
Defense = dict(
zip([t.find('span', class_='title').text for t in temp], [t.find('span', 'stat-point').text for t in temp]))
[Defense.update({x: {'Total': Defense.get(x)}}) for x in Defense.keys()]
del Defense['Debuff Defense']
Defense['Defense'].update(sub[1])
Defense['Evolved Defense'].update(sub[2])
Defense['Evasion'].update(sub[3])
Defense['Block'].update(sub[4])
Defense['Critical Defense'].update(sub[5])
Defense['Health Regen'].update(sub[7])
Defense['Recovery'].update(sub[8])
# GEAR
Weapon = get_name(soup.find('div', class_='wrapWeapon'))
Necklace = get_name(soup.find('div', class_='wrapAccessory necklace'))
Earring = get_name(soup.find('div', class_='wrapAccessory earring'))
Ring = get_name(soup.find('div', class_='wrapAccessory ring'))
Bracelet = get_name(soup.find('div', class_='wrapAccessory bracelet'))
Belt = get_name(soup.find('div', class_='wrapAccessory belt'))
Soul = get_name(soup.find('div', class_='wrapAccessory soul'))
# SoulSheild
SS = soup.find('div', class_='wrapGem')
BONUS = ()
Stats = ()
if any(x.img is not None for x in SS.find_all('span')):
BONUS = set_bonus(SS.find('div', class_='lyCharmEffect'))
Stats = ([': '.join([tr.th.text, tr.td.text]) for tr in SS.table.find_all('tr')])
# OUTFIT
Clothes = get_name(soup.find('div', class_='wrapAccessory clothes'))
Head = get_name(soup.find('div', class_='wrapAccessory tire'))
Face = get_name(soup.find('div', class_='wrapAccessory faceDecoration'))
Adornment = get_name(soup.find('div', class_='wrapAccessory clothesDecoration'))
# PROFILEPICTURE
Picture = soup.find('section').div.div.img.get('src')
del soup, temp, sub
r = {'Account Name': Name,
'Character Name': CharacterName,
'Class': Class,
'Level': Level,
'HM Level': HM,
'Server': Server,
'Faction': Faction,
'Clan': Clan,
'Faction Rank': Rank,
'Picture': Picture,
'Stats': {},
'Gear': {
'Weapon': Weapon,
'Necklace': Necklace,
'Earring': Earring,
'Ring': Ring,
'Bracelet': Bracelet,
'Belt': Belt,
'Soul': Soul},
'SoulSheild': Stats,
'Set Bonus': '\n\n'.join(BONUS),
'Outfit': {'Clothes': Clothes,
'Head': Head,
'Face': Face,
'Adornment': Adornment},
'Other Characters': other_chars,
'Region': 'NA'}
r['Stats'].update(ATK)
r['Stats'].update(Defense)
return r
async def get_item_name_suggestions(item, display, session):
async with session.get(ITEM_NAME_SUGGEST, params={'site': 'bns', 'display': display, 'collection': 'bnsitemsuggest', 'callback': 'suggestKeyword', 'query': item}) as re:
data: dict = json.loads((await re.text())[17:-4])
if data['result'] != "0":
raise ServiceUnavialable
return data
async def search_item(item, display:int=1):
def price_parse(html):
soup = BeautifulSoup(html, parser)
return [int(x.text.split()[0]) if x is not 0 else 0 for x in [soup.find(name='span', attrs={'class':c}) or 0 for c in ('gold', 'silver', 'bronze')]]
async def get_item_data(titem, session):
async with session.get(f'{MARKET_API_ENDPOINT}/{titem}/true') as re:
data = await re.json()
if (not isinstance(data, list)) or len(data) == 0:
raise InvalidData("Market Returned Invalid Data")
return {'icon': ''.join([BASE_ITEM_IMAGE_URL, data[0]['iconImg']]),
'prices': [(price_parse(e['price_html']), int(e['sale_data']['amount'])) for e in data],
'name': titem}
with aiohttp.ClientSession() as session:
data = await get_item_name_suggestions(item, display, session)
suggestions = [x[0] for x in data["front"] if len(x) == 2 and x[1] == 0 and isinstance(x[0], str)]
return [await get_item_data(item, session) for item in suggestions]
class Character(object):
"""
Character Object
pretty_profile - Return A prettied profile Overview as a string.
pretty_gear - Return a prettied Gear Overview as a string.
pretty_stats - Return a prettied Stats Overview as a string.
pretty_outfit - Return a prettied Outfit Overview as a string.
Notice: The Following items can be used as self.item with space replaced with "_" and it is not case sensitive.
Notice: The Following items can also be used as self[item] it is case sensitive, no replacement.
Account Name - The display name for their account (str).
Character Name - The Name of the given character (str).
Level - Character's level (str).
HM Level - Character's HM level (str).
Server - Server the character is on (str).
Faction - The Faction the character is in (str).
Picture - Link to the character's profile picture (str).
Stats - A dictionary object with stats (each stat is also a dict).
Gear - The gear of the Given Character (list).
SoulSheild - SoulSheild stats (str).
Set Bonus - Set bonus affects, a list of strings (list).
Outfit - The outfit of the character (dict).
Other Characters - A list of the other characters on that user's account (list).
Region - The region the user is from.
"""
def __init__(self, data: dict):
data = data.copy()
self.name = data['Character Name']
self.__data = data
self.items = self.__data.items
self.keys = self.__data.keys
self.account = data['Account Name']
async def refresh(self):
self.__data = await fetch_profile(self.name)
self.items = self.__data.items
self.keys = self.__data.keys
def __call__(self):
"""returns an awaitable to refresh"""
return self.refresh()
def __getattr__(self, item):
return self[str(item)]
def __getitem__(self, item):
item = str(item).replace('_', ' ')
k = list(self.__data.keys())
k = dict(zip([z.lower() for z in k], k))
try:
return self.__data[k.get(item.lower())]
except KeyError:
return self.__data[k.get(item.lower().replace(' ', '_'))]
def pretty_profile(self):
"""Return A prettyfied profile Overview as a string"""
if self['HM Level']:
temp = 'Level {} Hongmoon Level {}'.format(self['Level'], self['HM Level'])
else:
temp = 'Level {}'.format(self['Level'])
text = ['**Display Name:** {}'.format(self['Account Name']),
'**Character**: {} {}'.format(self['Character Name'], temp),
'**Weapon**: {}'.format(self['Gear']['Weapon']),
'**Server:** {}'.format(self['Server'])]
if self['Faction']:
if self['Faction'] == 'Cerulean Order':
text.append('**Faction:** Cerulean Order :blue_heart:')
else:
text.append('**Faction"** Crimson Legion :heart:')
text.append('**Faction Rank:** {}'.format(self['Faction Rank']))
if self['Clan']:
text.append('**Clan:** {}'.format(self['Clan']))
if len(self['Other Characters']):
temp = ['[', ']']
temp.insert(1, ', '.join(self['Other Characters']))
text.append('**Other Characters:**\n {}'.format(''.join(temp)))
text.append(self['Picture'])
return '\n'.join(text).strip()
def pretty_gear(self):
"""Return a prettyfied Gear Overview as a string"""
temp = [self['Character Name'], '[' + self['Class'],'Level', str(self['Level'])]
if self['HM Level']:
temp += ['Hongmoon Level', str(self['HM Level'])]
temp = ' '.join(temp) + ']'
divider = '─'*len(temp)
stats = self['Stats']
send_this = ['```', temp, divider, 'Total HP {} Attack Power {}'.format(stats['HP']['Total'], stats['Attack Power']['Total']),
divider, 'Soul Shield Attributes (Base + Fused + Set)', '\n'.join(self['SoulSheild']),
''.join(self['Set Bonus']), '']
gear = self['Gear']
temp = list(gear.keys())
temp.sort()
for k in temp:
send_this.append('{}: {}'.format(k, gear[k]))
send_this.append(divider)
send_this.append('```')
return '\n'.join(send_this).strip()
def pretty_stats(self):
"""Return a prettyfied Outfit Overview as a string"""
temp = [self['Character Name'], '[' + self['Class'] + ',','Level', str(self['Level'])]
if self['HM Level']:
temp += ['Hongmoon Level', str(self['HM Level'])]
temp = ' '.join(temp) + ']'
divider = '─'*len(temp)
stats = self['Stats']
send_this = ['```ruby', temp, divider, 'HP: {}'.format(stats['HP']['Total']),
'Attack Power: {}'.format(stats['Attack Power']['Total']),
'Piercing: {}'.format(stats['Piercing']['Total']),
'+Defense Piercing: {}'.format(stats['Piercing']['Defense Piercing']),
'+Block Piercing: {}'.format(stats['Piercing']['Block Piercing']),
'Accuracy: {0[Total]} ({0[Hit Rate]})'.format(stats['Accuracy']),
'Critical Hit: {0[Total]} ({0[Critical Rate]})'.format(stats['Critical Hit']),
'Critical Damage: {0[Total]} ({0[Increase Damage]})'.format(stats['Critical Damage']), divider,
'Defense: {0[Total]} ({0[Damage Reduction]})'.format(stats['Defense']),
'Evasion: {}'.format(stats['Evasion']['Total']),
'+Evasion Rate: {}'.format(stats['Evasion']['Evasion Rate']),
'+Counter Bonus: {}'.format(stats['Evasion']['Counter Bonus']),
('Block: {0[Total]}\n'
'+Damage Reduction: {0[Damage Reduction]}\n'
'+Block Bonus: {0[Block Bonus]}\n'
'+Block Rate: {0[Block Rate]}').format(stats['Block']),
'Health Regen (IN/OUT): {0[In Combat]}/{0[Out of Combat]}'.format(stats['Health Regen']),
'Recovery Rate: {}'.format(stats['Recovery']['Total']),
'```']
return '\n'.join(send_this)
def pretty_outfit(self):
"""Return a prettyfied Outfit Overview as a string"""
outfit = self['Outfit']
o = list(outfit.keys())
o.sort()
return '\n'.join(['```'] + ['{}\'s Outfit:'.format(self['Character Name'])] +
['{}: {}'.format(k, outfit[k]) for k in o] + ['```'])
def avg_dmg(self):
stats = self['Stats']
return avg_dmg(stats['Attack Power']['Total'],
stats['Critical Hit']['Critical Rate'],
stats['Critical Damage']['Total'],
elemental_bonus='100%')
async def get_character(user: str) -> Character:
"""
Return a Character Object for the given user.
:param user: The user to create an object for
:return: Returns A Character Object for the given user
"""
if not isinstance(user, str):
raise InvalidData('Expected type str for user, found {} instead'.format(type(user).__name__))
try:
return Character(await fetch_profile(user))
except AttributeError:
raise CharacterNotFound('Failed to find character "{}"'.format(user))
except Exception as e:
print('[!] Error:', e)
raise Exception(e)
async def compare(user1: Character, user2: Character, update=False):
"""A WIP compare fucntion."""
assert isinstance(user1, Character) and isinstance(user2, Character)
if update:
await user1.refresh()
await user2.refresh()
temp = '{} - {}'.format(user1['Character Name'], user2['Character Name'])
divider = '─'*len(temp)
user1 = user1['Stats']
user2 = user2['Stats']
for k,v in user1.items():
for k2,v2 in v.items():
v[k2] = _float(v2)
user1[k] = v
for k,v in user2.items():
for k2,v2 in v.items():
v[k2] = _float(v2)
user2[k] = v
send_this = [temp, divider, 'HP: {}'.format(_subtract(user1['HP']['Total'], user2['HP']['Total'])),
'Attack Power: {}'.format(_subtract(user1['Attack Power']['Total'],
user2['Attack Power']['Total'])),
'Piercing: {}'.format(_subtract(user1['Piercing']['Total'], user2['Piercing']['Total'])),
'+Defense Piercing: {}'.format(_subtract(user1['Piercing']['Defense Piercing'],
user2['Piercing']['Defense Piercing'],
percent=True)),
'+Block Piercing: {}'.format(_subtract(user1['Piercing']['Block Piercing'],
user2['Piercing']['Block Piercing'],
percent=True)),
'Accuracy: {}'.format(_subtract(user1['Accuracy']['Total'],
user2['Accuracy']['Total'])),
'+']
return '\n'.join(send_this)
def avg_dmg(attack_power: str, critical_rate: str, critical_damage: str, elemental_bonus: str='100%'):
"""
AVG Damage
Calculates The Average Damage
:param attack_power: Attack Power (Total)
:param critical_hit_rate: Critical Hit -> Critical Rate
:param critical_damage: Critical Damage (Total)
:param elemental_bonus: Total elemental_bonus% - 500
"""
attack_power = float(attack_power)
crit_rate = float(critical_rate.strip(' %'))
crit_damage = float(critical_damage)
elemental_bonus = float(elemental_bonus.strip(' %'))
# Result is No Blue Buff
# Result 2 is with Blue Buff
result = attack_power * (1 - (crit_rate * 0.01) + (crit_rate * crit_damage * 0.0001))
if (crit_rate < 60):
result2 = attack_power * (1 - ((crit_rate + 50) * 0.01) + (crit_rate + 50) * (crit_damage + 40) * .0001)
else: result2 = attack_power * ((crit_damage + 40) * .01)
if elemental_bonus in [0, 100]: return round(result, 2), round(result2, 2)
result *= (elemental_bonus * 0.01)
result2 *= (elemental_bonus * 0.01)
return round(result, 2), round(result2, 2)
| mit |
damonkohler/sl4a | python/gdata/src/gdata/tlslite/utils/compat.py | 361 | 4060 | """Miscellaneous functions to mask Python version differences."""
import sys
import os
if sys.version_info < (2,2):
raise AssertionError("Python 2.2 or later required")
if sys.version_info < (2,3):
def enumerate(collection):
return zip(range(len(collection)), collection)
class Set:
def __init__(self, seq=None):
self.values = {}
if seq:
for e in seq:
self.values[e] = None
def add(self, e):
self.values[e] = None
def discard(self, e):
if e in self.values.keys():
del(self.values[e])
def union(self, s):
ret = Set()
for e in self.values.keys():
ret.values[e] = None
for e in s.values.keys():
ret.values[e] = None
return ret
def issubset(self, other):
for e in self.values.keys():
if e not in other.values.keys():
return False
return True
def __nonzero__( self):
return len(self.values.keys())
def __contains__(self, e):
return e in self.values.keys()
def __iter__(self):
return iter(set.values.keys())
if os.name != "java":
import array
def createByteArraySequence(seq):
return array.array('B', seq)
def createByteArrayZeros(howMany):
return array.array('B', [0] * howMany)
def concatArrays(a1, a2):
return a1+a2
def bytesToString(bytes):
return bytes.tostring()
def stringToBytes(s):
bytes = createByteArrayZeros(0)
bytes.fromstring(s)
return bytes
import math
def numBits(n):
if n==0:
return 0
s = "%x" % n
return ((len(s)-1)*4) + \
{'0':0, '1':1, '2':2, '3':2,
'4':3, '5':3, '6':3, '7':3,
'8':4, '9':4, 'a':4, 'b':4,
'c':4, 'd':4, 'e':4, 'f':4,
}[s[0]]
return int(math.floor(math.log(n, 2))+1)
BaseException = Exception
import sys
import traceback
def formatExceptionTrace(e):
newStr = "".join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
return newStr
else:
#Jython 2.1 is missing lots of python 2.3 stuff,
#which we have to emulate here:
#NOTE: JYTHON SUPPORT NO LONGER WORKS, DUE TO USE OF GENERATORS.
#THIS CODE IS LEFT IN SO THAT ONE JYTHON UPDATES TO 2.2, IT HAS A
#CHANCE OF WORKING AGAIN.
import java
import jarray
def createByteArraySequence(seq):
if isinstance(seq, type("")): #If it's a string, convert
seq = [ord(c) for c in seq]
return jarray.array(seq, 'h') #use short instead of bytes, cause bytes are signed
def createByteArrayZeros(howMany):
return jarray.zeros(howMany, 'h') #use short instead of bytes, cause bytes are signed
def concatArrays(a1, a2):
l = list(a1)+list(a2)
return createByteArraySequence(l)
#WAY TOO SLOW - MUST BE REPLACED------------
def bytesToString(bytes):
return "".join([chr(b) for b in bytes])
def stringToBytes(s):
bytes = createByteArrayZeros(len(s))
for count, c in enumerate(s):
bytes[count] = ord(c)
return bytes
#WAY TOO SLOW - MUST BE REPLACED------------
def numBits(n):
if n==0:
return 0
n= 1L * n; #convert to long, if it isn't already
return n.__tojava__(java.math.BigInteger).bitLength()
#Adjust the string to an array of bytes
def stringToJavaByteArray(s):
bytes = jarray.zeros(len(s), 'b')
for count, c in enumerate(s):
x = ord(c)
if x >= 128: x -= 256
bytes[count] = x
return bytes
BaseException = java.lang.Exception
import sys
import traceback
def formatExceptionTrace(e):
newStr = "".join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
return newStr | apache-2.0 |
programadorjc/django | django/contrib/messages/storage/cookie.py | 471 | 6545 | import json
from django.conf import settings
from django.contrib.messages.storage.base import BaseStorage, Message
from django.http import SimpleCookie
from django.utils import six
from django.utils.crypto import constant_time_compare, salted_hmac
from django.utils.safestring import SafeData, mark_safe
class MessageEncoder(json.JSONEncoder):
"""
Compactly serializes instances of the ``Message`` class as JSON.
"""
message_key = '__json_message'
def default(self, obj):
if isinstance(obj, Message):
# Using 0/1 here instead of False/True to produce more compact json
is_safedata = 1 if isinstance(obj.message, SafeData) else 0
message = [self.message_key, is_safedata, obj.level, obj.message]
if obj.extra_tags:
message.append(obj.extra_tags)
return message
return super(MessageEncoder, self).default(obj)
class MessageDecoder(json.JSONDecoder):
"""
Decodes JSON that includes serialized ``Message`` instances.
"""
def process_messages(self, obj):
if isinstance(obj, list) and obj:
if obj[0] == MessageEncoder.message_key:
if len(obj) == 3:
# Compatibility with previously-encoded messages
return Message(*obj[1:])
if obj[1]:
obj[3] = mark_safe(obj[3])
return Message(*obj[2:])
return [self.process_messages(item) for item in obj]
if isinstance(obj, dict):
return {key: self.process_messages(value)
for key, value in six.iteritems(obj)}
return obj
def decode(self, s, **kwargs):
decoded = super(MessageDecoder, self).decode(s, **kwargs)
return self.process_messages(decoded)
class CookieStorage(BaseStorage):
"""
Stores messages in a cookie.
"""
cookie_name = 'messages'
# uwsgi's default configuration enforces a maximum size of 4kb for all the
# HTTP headers. In order to leave some room for other cookies and headers,
# restrict the session cookie to 1/2 of 4kb. See #18781.
max_cookie_size = 2048
not_finished = '__messagesnotfinished__'
def _get(self, *args, **kwargs):
"""
Retrieves a list of messages from the messages cookie. If the
not_finished sentinel value is found at the end of the message list,
remove it and return a result indicating that not all messages were
retrieved by this storage.
"""
data = self.request.COOKIES.get(self.cookie_name)
messages = self._decode(data)
all_retrieved = not (messages and messages[-1] == self.not_finished)
if messages and not all_retrieved:
# remove the sentinel value
messages.pop()
return messages, all_retrieved
def _update_cookie(self, encoded_data, response):
"""
Either sets the cookie with the encoded data if there is any data to
store, or deletes the cookie.
"""
if encoded_data:
response.set_cookie(self.cookie_name, encoded_data,
domain=settings.SESSION_COOKIE_DOMAIN,
secure=settings.SESSION_COOKIE_SECURE or None,
httponly=settings.SESSION_COOKIE_HTTPONLY or None)
else:
response.delete_cookie(self.cookie_name,
domain=settings.SESSION_COOKIE_DOMAIN)
def _store(self, messages, response, remove_oldest=True, *args, **kwargs):
"""
Stores the messages to a cookie, returning a list of any messages which
could not be stored.
If the encoded data is larger than ``max_cookie_size``, removes
messages until the data fits (these are the messages which are
returned), and add the not_finished sentinel value to indicate as much.
"""
unstored_messages = []
encoded_data = self._encode(messages)
if self.max_cookie_size:
# data is going to be stored eventually by SimpleCookie, which
# adds its own overhead, which we must account for.
cookie = SimpleCookie() # create outside the loop
def stored_length(val):
return len(cookie.value_encode(val)[1])
while encoded_data and stored_length(encoded_data) > self.max_cookie_size:
if remove_oldest:
unstored_messages.append(messages.pop(0))
else:
unstored_messages.insert(0, messages.pop())
encoded_data = self._encode(messages + [self.not_finished],
encode_empty=unstored_messages)
self._update_cookie(encoded_data, response)
return unstored_messages
def _hash(self, value):
"""
Creates an HMAC/SHA1 hash based on the value and the project setting's
SECRET_KEY, modified to make it unique for the present purpose.
"""
key_salt = 'django.contrib.messages'
return salted_hmac(key_salt, value).hexdigest()
def _encode(self, messages, encode_empty=False):
"""
Returns an encoded version of the messages list which can be stored as
plain text.
Since the data will be retrieved from the client-side, the encoded data
also contains a hash to ensure that the data was not tampered with.
"""
if messages or encode_empty:
encoder = MessageEncoder(separators=(',', ':'))
value = encoder.encode(messages)
return '%s$%s' % (self._hash(value), value)
def _decode(self, data):
"""
Safely decodes an encoded text stream back into a list of messages.
If the encoded text stream contained an invalid hash or was in an
invalid format, ``None`` is returned.
"""
if not data:
return None
bits = data.split('$', 1)
if len(bits) == 2:
hash, value = bits
if constant_time_compare(hash, self._hash(value)):
try:
# If we get here (and the JSON decode works), everything is
# good. In any other case, drop back and return None.
return json.loads(value, cls=MessageDecoder)
except ValueError:
pass
# Mark the data as used (so it gets removed) since something was wrong
# with the data.
self.used = True
return None
| bsd-3-clause |
noironetworks/python-neutronclient | neutronclient/tests/unit/osc/v2/trunk/test_network_trunk.py | 2 | 25882 | # Copyright 2016 ZTE Corporation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import copy
import mock
from mock import call
from osc_lib.cli import format_columns
from osc_lib import exceptions
from osc_lib.tests import utils as tests_utils
import testtools
from neutronclient.osc.v2.trunk import network_trunk as trunk
from neutronclient.osc.v2 import utils as v2_utils
from neutronclient.tests.unit.osc.v2 import fakes as test_fakes
from neutronclient.tests.unit.osc.v2.trunk import fakes
def _get_id(client, id_or_name, resource):
return id_or_name
class TestCreateNetworkTrunk(test_fakes.TestNeutronClientOSCV2):
# The new trunk created
_trunk = fakes.FakeTrunk.create_one_trunk()
columns = (
'admin_state_up',
'description',
'id',
'name',
'port_id',
'project_id',
'status',
'sub_ports',
)
def get_data(self):
return (
v2_utils.AdminStateColumn(self._trunk['admin_state_up']),
self._trunk['description'],
self._trunk['id'],
self._trunk['name'],
self._trunk['port_id'],
self._trunk['project_id'],
self._trunk['status'],
format_columns.ListDictColumn(self._trunk['sub_ports']),
)
def setUp(self):
super(TestCreateNetworkTrunk, self).setUp()
mock.patch('neutronclient.osc.v2.trunk.network_trunk._get_id',
new=_get_id).start()
self.neutronclient.create_trunk = mock.Mock(
return_value={trunk.TRUNK: self._trunk})
self.data = self.get_data()
# Get the command object to test
self.cmd = trunk.CreateNetworkTrunk(self.app, self.namespace)
def test_create_no_options(self):
arglist = []
verifylist = []
self.assertRaises(tests_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_create_default_options(self):
arglist = [
"--parent-port", self._trunk['port_id'],
self._trunk['name'],
]
verifylist = [
('parent_port', self._trunk['port_id']),
('name', self._trunk['name']),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = (self.cmd.take_action(parsed_args))
self.neutronclient.create_trunk.assert_called_once_with({
trunk.TRUNK: {'name': self._trunk['name'],
'admin_state_up': self._trunk['admin_state_up'],
'port_id': self._trunk['port_id']}
})
self.assertEqual(self.columns, columns)
self.assertItemEqual(self.data, data)
def test_create_full_options(self):
self._trunk['description'] = 'foo description'
self.data = self.get_data()
subport = self._trunk['sub_ports'][0]
arglist = [
"--disable",
"--description", self._trunk['description'],
"--parent-port", self._trunk['port_id'],
"--subport", 'port=%(port)s,segmentation-type=%(seg_type)s,'
'segmentation-id=%(seg_id)s' % {
'seg_id': subport['segmentation_id'],
'seg_type': subport['segmentation_type'],
'port': subport['port_id']},
self._trunk['name'],
]
verifylist = [
('name', self._trunk['name']),
('description', self._trunk['description']),
('parent_port', self._trunk['port_id']),
('add_subports', [{
'port': subport['port_id'],
'segmentation-id': str(subport['segmentation_id']),
'segmentation-type': subport['segmentation_type']}]),
('disable', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = (self.cmd.take_action(parsed_args))
self.neutronclient.create_trunk.assert_called_once_with({
trunk.TRUNK: {'name': self._trunk['name'],
'description': self._trunk['description'],
'admin_state_up': False,
'sub_ports': [subport],
'port_id': self._trunk['port_id']}
})
self.assertEqual(self.columns, columns)
self.assertItemEqual(self.data, data)
def test_create_trunk_with_subport_invalid_segmentation_id_fail(self):
subport = self._trunk['sub_ports'][0]
arglist = [
"--parent-port", self._trunk['port_id'],
"--subport", "port=%(port)s,segmentation-type=%(seg_type)s,"
"segmentation-id=boom" % {
'seg_type': subport['segmentation_type'],
'port': subport['port_id']},
self._trunk['name'],
]
verifylist = [
('name', self._trunk['name']),
('parent_port', self._trunk['port_id']),
('add_subports', [{
'port': subport['port_id'],
'segmentation-id': 'boom',
'segmentation-type': subport['segmentation_type']}]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
with testtools.ExpectedException(exceptions.CommandError) as e:
self.cmd.take_action(parsed_args)
self.assertEqual("Segmentation-id 'boom' is not an integer",
str(e))
def test_create_network_trunk_subports_without_optional_keys(self):
subport = copy.copy(self._trunk['sub_ports'][0])
# Pop out the segmentation-id and segmentation-type
subport.pop('segmentation_type')
subport.pop('segmentation_id')
arglist = [
'--parent-port', self._trunk['port_id'],
'--subport', 'port=%(port)s' % {'port': subport['port_id']},
self._trunk['name'],
]
verifylist = [
('name', self._trunk['name']),
('parent_port', self._trunk['port_id']),
('add_subports', [{
'port': subport['port_id']}]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = (self.cmd.take_action(parsed_args))
self.neutronclient.create_trunk.assert_called_once_with({
trunk.TRUNK: {'name': self._trunk['name'],
'admin_state_up': True,
'sub_ports': [subport],
'port_id': self._trunk['port_id']}
})
self.assertEqual(self.columns, columns)
self.assertItemEqual(self.data, data)
def test_create_network_trunk_subports_without_required_key_fail(self):
subport = self._trunk['sub_ports'][0]
arglist = [
'--parent-port', self._trunk['port_id'],
'--subport', 'segmentation-type=%(seg_type)s,'
'segmentation-id=%(seg_id)s' % {
'seg_id': subport['segmentation_id'],
'seg_type': subport['segmentation_type']},
self._trunk['name'],
]
verifylist = [
('name', self._trunk['name']),
('parent_port', self._trunk['port_id']),
('add_subports', [{
'segmentation-id': str(subport['segmentation_id']),
'segmentation-type': subport['segmentation_type']}]),
]
with testtools.ExpectedException(argparse.ArgumentTypeError):
self.check_parser(self.cmd, arglist, verifylist)
class TestDeleteNetworkTrunk(test_fakes.TestNeutronClientOSCV2):
# The trunk to be deleted.
_trunks = fakes.FakeTrunk.create_trunks(count=2)
def setUp(self):
super(TestDeleteNetworkTrunk, self).setUp()
mock.patch('neutronclient.osc.v2.trunk.network_trunk._get_id',
new=_get_id).start()
self.neutronclient.delete_trunk = mock.Mock(return_value=None)
# Get the command object to test
self.cmd = trunk.DeleteNetworkTrunk(self.app, self.namespace)
def test_delete_trunk(self):
arglist = [
self._trunks[0]['name'],
]
verifylist = [
('trunk', [self._trunks[0]['name']]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.neutronclient.delete_trunk.assert_called_once_with(
self._trunks[0]['name'])
self.assertIsNone(result)
def test_delete_trunk_multiple(self):
arglist = []
verifylist = []
for t in self._trunks:
arglist.append(t['name'])
verifylist = [
('trunk', arglist),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
calls = []
for t in self._trunks:
calls.append(call(t['name']))
self.neutronclient.delete_trunk.assert_has_calls(calls)
self.assertIsNone(result)
def test_delete_trunk_multiple_with_exception(self):
arglist = [
self._trunks[0]['name'],
'unexist_trunk',
]
verifylist = [
('trunk',
[self._trunks[0]['name'], 'unexist_trunk']),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
get_mock_result = [self._trunks[0], exceptions.CommandError]
trunk._get_id = (
mock.Mock(side_effect=get_mock_result)
)
with testtools.ExpectedException(exceptions.CommandError) as e:
self.cmd.take_action(parsed_args)
self.assertEqual('1 of 2 trunks failed to delete.', str(e))
self.neutronclient.delete_trunk.assert_called_once_with(
self._trunks[0]
)
class TestShowNetworkTrunk(test_fakes.TestNeutronClientOSCV2):
# The trunk to set.
_trunk = fakes.FakeTrunk.create_one_trunk()
columns = (
'admin_state_up',
'description',
'id',
'name',
'port_id',
'project_id',
'status',
'sub_ports',
)
data = (
v2_utils.AdminStateColumn(_trunk['admin_state_up']),
_trunk['description'],
_trunk['id'],
_trunk['name'],
_trunk['port_id'],
_trunk['project_id'],
_trunk['status'],
format_columns.ListDictColumn(_trunk['sub_ports']),
)
def setUp(self):
super(TestShowNetworkTrunk, self).setUp()
mock.patch('neutronclient.osc.v2.trunk.network_trunk._get_id',
new=_get_id).start()
self.neutronclient.show_trunk = mock.Mock(
return_value={trunk.TRUNK: self._trunk})
# Get the command object to test
self.cmd = trunk.ShowNetworkTrunk(self.app, self.namespace)
def test_show_no_options(self):
arglist = []
verifylist = []
self.assertRaises(tests_utils.ParserException, self.check_parser,
self.cmd, arglist, verifylist)
def test_show_all_options(self):
arglist = [
self._trunk['id'],
]
verifylist = [
('trunk', self._trunk['id']),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.neutronclient.show_trunk.assert_called_once_with(
self._trunk['id'])
self.assertEqual(self.columns, columns)
self.assertItemEqual(self.data, data)
class TestListNetworkTrunk(test_fakes.TestNeutronClientOSCV2):
# Create trunks to be listed.
_trunks = fakes.FakeTrunk.create_trunks(
{'created_at': '2001-01-01 00:00:00',
'updated_at': '2001-01-01 00:00:00'}, count=3)
columns = (
'ID',
'Name',
'Parent Port',
'Description'
)
columns_long = columns + (
'Status',
'State',
'Created At',
'Updated At'
)
data = []
for t in _trunks:
data.append((
t['id'],
t['name'],
t['port_id'],
t['description']
))
data_long = []
for t in _trunks:
data_long.append((
t['id'],
t['name'],
t['port_id'],
t['description'],
t['status'],
v2_utils.AdminStateColumn(t['admin_state_up']),
'2001-01-01 00:00:00',
'2001-01-01 00:00:00',
))
def setUp(self):
super(TestListNetworkTrunk, self).setUp()
mock.patch('neutronclient.osc.v2.trunk.network_trunk._get_id',
new=_get_id).start()
self.neutronclient.list_trunks = mock.Mock(
return_value={trunk.TRUNKS: self._trunks})
# Get the command object to test
self.cmd = trunk.ListNetworkTrunk(self.app, self.namespace)
def test_trunk_list_no_option(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.neutronclient.list_trunks.assert_called_once_with()
self.assertEqual(self.columns, columns)
self.assertListItemEqual(self.data, list(data))
def test_trunk_list_long(self):
arglist = [
'--long',
]
verifylist = [
('long', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.neutronclient.list_trunks.assert_called_once_with()
self.assertEqual(self.columns_long, columns)
self.assertListItemEqual(self.data_long, list(data))
class TestSetNetworkTrunk(test_fakes.TestNeutronClientOSCV2):
# Create trunks to be listed.
_trunk = fakes.FakeTrunk.create_one_trunk()
columns = (
'admin_state_up',
'id',
'name',
'description',
'port_id',
'project_id',
'status',
'sub_ports',
)
data = (
v2_utils.AdminStateColumn(_trunk['admin_state_up']),
_trunk['id'],
_trunk['name'],
_trunk['description'],
_trunk['port_id'],
_trunk['project_id'],
_trunk['status'],
format_columns.ListDictColumn(_trunk['sub_ports']),
)
def setUp(self):
super(TestSetNetworkTrunk, self).setUp()
mock.patch('neutronclient.osc.v2.trunk.network_trunk._get_id',
new=_get_id).start()
self.neutronclient.update_trunk = mock.Mock(
return_value={trunk.TRUNK: self._trunk})
self.neutronclient.trunk_add_subports = mock.Mock(
return_value=self._trunk)
# Get the command object to test
self.cmd = trunk.SetNetworkTrunk(self.app, self.namespace)
def _test_set_network_trunk_attr(self, attr, value):
arglist = [
'--%s' % attr, value,
self._trunk[attr],
]
verifylist = [
(attr, value),
('trunk', self._trunk[attr]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
attrs = {
attr: value,
}
self.neutronclient.update_trunk.assert_called_once_with(
self._trunk[attr], {trunk.TRUNK: attrs})
self.assertIsNone(result)
def test_set_network_trunk_name(self):
self._test_set_network_trunk_attr('name', 'trunky')
def test_test_set_network_trunk_description(self):
self._test_set_network_trunk_attr('description', 'description')
def test_set_network_trunk_admin_state_up_disable(self):
arglist = [
'--disable',
self._trunk['name'],
]
verifylist = [
('disable', True),
('trunk', self._trunk['name']),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
attrs = {
'admin_state_up': False,
}
self.neutronclient.update_trunk.assert_called_once_with(
self._trunk['name'], {trunk.TRUNK: attrs})
self.assertIsNone(result)
def test_set_network_trunk_admin_state_up_enable(self):
arglist = [
'--enable',
self._trunk['name'],
]
verifylist = [
('enable', True),
('trunk', self._trunk['name']),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
attrs = {
'admin_state_up': True,
}
self.neutronclient.update_trunk.assert_called_once_with(
self._trunk['name'], {trunk.TRUNK: attrs})
self.assertIsNone(result)
def test_set_network_trunk_nothing(self):
arglist = [self._trunk['name'], ]
verifylist = [('trunk', self._trunk['name']), ]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
attrs = {}
self.neutronclient.update_trunk.assert_called_once_with(
self._trunk['name'], {trunk.TRUNK: attrs})
self.assertIsNone(result)
def test_set_network_trunk_subports(self):
subport = self._trunk['sub_ports'][0]
arglist = [
'--subport', 'port=%(port)s,segmentation-type=%(seg_type)s,'
'segmentation-id=%(seg_id)s' % {
'seg_id': subport['segmentation_id'],
'seg_type': subport['segmentation_type'],
'port': subport['port_id']},
self._trunk['name'],
]
verifylist = [
('trunk', self._trunk['name']),
('set_subports', [{
'port': subport['port_id'],
'segmentation-id': str(subport['segmentation_id']),
'segmentation-type': subport['segmentation_type']}]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.neutronclient.trunk_add_subports.assert_called_once_with(
self._trunk['name'], {'sub_ports': [subport]}
)
self.assertIsNone(result)
def test_set_network_trunk_subports_without_optional_keys(self):
subport = copy.copy(self._trunk['sub_ports'][0])
# Pop out the segmentation-id and segmentation-type
subport.pop('segmentation_type')
subport.pop('segmentation_id')
arglist = [
'--subport', 'port=%(port)s' % {'port': subport['port_id']},
self._trunk['name'],
]
verifylist = [
('trunk', self._trunk['name']),
('set_subports', [{
'port': subport['port_id']}]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.neutronclient.trunk_add_subports.assert_called_once_with(
self._trunk['name'], {'sub_ports': [subport]}
)
self.assertIsNone(result)
def test_set_network_trunk_subports_without_required_key_fail(self):
subport = self._trunk['sub_ports'][0]
arglist = [
'--subport', 'segmentation-type=%(seg_type)s,'
'segmentation-id=%(seg_id)s' % {
'seg_id': subport['segmentation_id'],
'seg_type': subport['segmentation_type']},
self._trunk['name'],
]
verifylist = [
('trunk', self._trunk['name']),
('set_subports', [{
'segmentation-id': str(subport['segmentation_id']),
'segmentation-type': subport['segmentation_type']}]),
]
with testtools.ExpectedException(argparse.ArgumentTypeError):
self.check_parser(self.cmd, arglist, verifylist)
self.neutronclient.trunk_add_subports.assert_not_called()
def test_set_trunk_attrs_with_exception(self):
arglist = [
'--name', 'reallylongname',
self._trunk['name'],
]
verifylist = [
('trunk', self._trunk['name']),
('name', 'reallylongname'),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.neutronclient.update_trunk = (
mock.Mock(side_effect=exceptions.CommandError)
)
with testtools.ExpectedException(exceptions.CommandError) as e:
self.cmd.take_action(parsed_args)
self.assertEqual(
"Failed to set trunk '%s': " % self._trunk['name'],
str(e))
attrs = {'name': 'reallylongname'}
self.neutronclient.update_trunk.assert_called_once_with(
self._trunk['name'], {trunk.TRUNK: attrs})
self.neutronclient.trunk_add_subports.assert_not_called()
def test_set_trunk_add_subport_with_exception(self):
arglist = [
'--subport', 'port=invalid_subport',
self._trunk['name'],
]
verifylist = [
('trunk', self._trunk['name']),
('set_subports', [{'port': 'invalid_subport'}]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.neutronclient.trunk_add_subports = (
mock.Mock(side_effect=exceptions.CommandError)
)
with testtools.ExpectedException(exceptions.CommandError) as e:
self.cmd.take_action(parsed_args)
self.assertEqual(
"Failed to add subports to trunk '%s': " % self._trunk['name'],
str(e))
self.neutronclient.update_trunk.assert_called_once_with(
self._trunk['name'], {trunk.TRUNK: {}})
self.neutronclient.trunk_add_subports.assert_called_once_with(
self._trunk['name'],
{'sub_ports': [{'port_id': 'invalid_subport'}]}
)
class TestListNetworkSubport(test_fakes.TestNeutronClientOSCV2):
_trunk = fakes.FakeTrunk.create_one_trunk()
_subports = _trunk['sub_ports']
columns = (
'Port',
'Segmentation Type',
'Segmentation ID',
)
data = []
for s in _subports:
data.append((
s['port_id'],
s['segmentation_type'],
s['segmentation_id'],
))
def setUp(self):
super(TestListNetworkSubport, self).setUp()
mock.patch('neutronclient.osc.v2.trunk.network_trunk._get_id',
new=_get_id).start()
self.neutronclient.trunk_get_subports = mock.Mock(
return_value={trunk.SUB_PORTS: self._subports})
# Get the command object to test
self.cmd = trunk.ListNetworkSubport(self.app, self.namespace)
def test_subport_list(self):
arglist = [
'--trunk', self._trunk['name'],
]
verifylist = [
('trunk', self._trunk['name']),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.neutronclient.trunk_get_subports.assert_called_once_with(
self._trunk['name'])
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, list(data))
class TestUnsetNetworkTrunk(test_fakes.TestNeutronClientOSCV2):
_trunk = fakes.FakeTrunk.create_one_trunk()
columns = (
'admin_state_up',
'id',
'name',
'port_id',
'project_id',
'status',
'sub_ports',
)
data = (
v2_utils.AdminStateColumn(_trunk['admin_state_up']),
_trunk['id'],
_trunk['name'],
_trunk['port_id'],
_trunk['project_id'],
_trunk['status'],
format_columns.ListDictColumn(_trunk['sub_ports']),
)
def setUp(self):
super(TestUnsetNetworkTrunk, self).setUp()
mock.patch('neutronclient.osc.v2.trunk.network_trunk._get_id',
new=_get_id).start()
self.neutronclient.trunk_remove_subports = mock.Mock(
return_value=None)
# Get the command object to test
self.cmd = trunk.UnsetNetworkTrunk(self.app, self.namespace)
def test_unset_network_trunk_subport(self):
subport = self._trunk['sub_ports'][0]
arglist = [
"--subport", subport['port_id'],
self._trunk['name'],
]
verifylist = [
('trunk', self._trunk['name']),
('unset_subports', [subport['port_id']]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.neutronclient.trunk_remove_subports.assert_called_once_with(
self._trunk['name'],
{trunk.SUB_PORTS: [{'port_id': subport['port_id']}]}
)
self.assertIsNone(result)
def test_unset_subport_no_arguments_fail(self):
arglist = [
self._trunk['name'],
]
verifylist = [
('trunk', self._trunk['name']),
]
self.assertRaises(tests_utils.ParserException,
self.check_parser, self.cmd, arglist, verifylist)
| apache-2.0 |
andyliuliming/azure-linux-extensions | VMEncryption/main/DecryptionMarkConfig.py | 8 | 2300 | #!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import os.path
import traceback
from ConfigUtil import *
from Common import CommonVariables
class DecryptionMarkConfig(object):
def __init__(self, logger, encryption_environment):
self.logger = logger
self.encryption_environment = encryption_environment
self.command = None
self.volume_type = None
self.decryption_mark_config = ConfigUtil(self.encryption_environment.azure_decrypt_request_queue_path,
'decryption_request_queue',
self.logger)
def get_current_command(self):
return self.decryption_mark_config.get_config(CommonVariables.EncryptionEncryptionOperationKey)
def config_file_exists(self):
return self.decryption_mark_config.config_file_exists()
def commit(self):
key_value_pairs = []
command = ConfigKeyValuePair(CommonVariables.EncryptionEncryptionOperationKey, self.command)
key_value_pairs.append(command)
volume_type = ConfigKeyValuePair(CommonVariables.EncryptionVolumeTypeKey, self.volume_type)
key_value_pairs.append(volume_type)
self.decryption_mark_config.save_configs(key_value_pairs)
def clear_config(self):
try:
if os.path.exists(self.encryption_environment.azure_decrypt_request_queue_path):
os.remove(self.encryption_environment.azure_decrypt_request_queue_path)
return True
except OSError as e:
self.logger.log("Failed to clear_queue with error: {0}, stack trace: {1}".format(e, traceback.format_exc()))
return False
| apache-2.0 |
jinnawat/laradock | jupyterhub/jupyterhub_config.py | 10 | 5035 | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
# Configuration file for JupyterHub
import os
c = get_config()
# create system users that don't exist yet
c.LocalAuthenticator.create_system_users = True
def create_dir_hook(spawner):
username = spawner.user.name # get the username
volume_path = os.path.join('/user-data', username)
if not os.path.exists(volume_path):
# create a directory with umask 0755
# hub and container user must have the same UID to be writeable
# still readable by other users on the system
os.mkdir(volume_path, 0o755)
os.chown(volume_path, 1000,100)
# now do whatever you think your user needs
# ...
pass
# attach the hook function to the spawner
c.Spawner.pre_spawn_hook = create_dir_hook
# We rely on environment variables to configure JupyterHub so that we
# avoid having to rebuild the JupyterHub container every time we change a
# configuration parameter.
# Spawn single-user servers as Docker containers
c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner'
# Spawn containers from this image
c.DockerSpawner.image = os.environ['JUPYTERHUB_LOCAL_NOTEBOOK_IMAGE']
# JupyterHub requires a single-user instance of the Notebook server, so we
# default to using the `start-singleuser.sh` script included in the
# jupyter/docker-stacks *-notebook images as the Docker run command when
# spawning containers. Optionally, you can override the Docker run command
# using the DOCKER_SPAWN_CMD environment variable.
spawn_cmd = os.environ.get('JUPYTERHUB_DOCKER_SPAWN_CMD', "start-singleuser.sh")
c.DockerSpawner.extra_create_kwargs.update({ 'command': spawn_cmd })
# Connect containers to this Docker network
network_name = os.environ.get('JUPYTERHUB_NETWORK_NAME','laradock_backend')
c.DockerSpawner.use_internal_ip = True
c.DockerSpawner.network_name = network_name
enable_nvidia = os.environ.get('JUPYTERHUB_ENABLE_NVIDIA','false')
# Pass the network name as argument to spawned containers
c.DockerSpawner.extra_host_config = { 'network_mode': network_name }
if 'true' == enable_nvidia:
c.DockerSpawner.extra_host_config = { 'network_mode': network_name, 'runtime': 'nvidia' }
pass
# c.DockerSpawner.extra_host_config = { 'network_mode': network_name, "devices":["/dev/nvidiactl","/dev/nvidia-uvm","/dev/nvidia0"] }
# Explicitly set notebook directory because we'll be mounting a host volume to
# it. Most jupyter/docker-stacks *-notebook images run the Notebook server as
# user `jovyan`, and set the notebook directory to `/home/jovyan/work`.
# We follow the same convention.
# notebook_dir = os.environ.get('JUPYTERHUB_DOCKER_NOTEBOOK_DIR') or '/home/jovyan/work'
notebook_dir = '/notebooks'
c.DockerSpawner.notebook_dir = notebook_dir
# Mount the real user's Docker volume on the host to the notebook user's
# notebook directory in the container
user_data = os.environ.get('JUPYTERHUB_USER_DATA','/jupyterhub')
c.DockerSpawner.volumes = {
user_data+'/{username}': notebook_dir
}
c.DockerSpawner.extra_create_kwargs.update({ 'user': 'root'})
# volume_driver is no longer a keyword argument to create_container()
# c.DockerSpawner.extra_create_kwargs.update({ 'volume_driver': 'local' })
# Remove containers once they are stopped
c.DockerSpawner.remove_containers = True
# For debugging arguments passed to spawned containers
c.DockerSpawner.debug = True
# User containers will access hub by container name on the Docker network
c.JupyterHub.hub_ip = 'jupyterhub'
c.JupyterHub.hub_port = 8000
# TLS config
c.JupyterHub.port = 80
# c.JupyterHub.ssl_key = os.environ['SSL_KEY']
# c.JupyterHub.ssl_cert = os.environ['SSL_CERT']
# Authenticate users with GitHub OAuth
c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator'
c.GitHubOAuthenticator.oauth_callback_url = os.environ['JUPYTERHUB_OAUTH_CALLBACK_URL']
c.GitHubOAuthenticator.client_id = os.environ['JUPYTERHUB_OAUTH_CLIENT_ID']
c.GitHubOAuthenticator.client_secret = os.environ['JUPYTERHUB_OAUTH_CLIENT_SECRET']
# Persist hub data on volume mounted inside container
data_dir = '/data'
c.JupyterHub.cookie_secret_file = os.path.join(data_dir,
'jupyterhub_cookie_secret')
print(os.environ)
c.JupyterHub.db_url = 'postgresql://{user}:{password}@{host}/{db}'.format(
user=os.environ['JUPYTERHUB_POSTGRES_USER'],
host=os.environ['JUPYTERHUB_POSTGRES_HOST'],
password=os.environ['JUPYTERHUB_POSTGRES_PASSWORD'],
db=os.environ['JUPYTERHUB_POSTGRES_DB'],
)
# Whitlelist users and admins
c.Authenticator.whitelist = whitelist = set()
c.Authenticator.admin_users = admin = set()
c.JupyterHub.admin_access = True
pwd = os.path.dirname(__file__)
with open(os.path.join(pwd, 'userlist')) as f:
for line in f:
if not line:
continue
parts = line.split()
name = parts[0]
print(name)
whitelist.add(name)
if len(parts) > 1 and parts[1] == 'admin':
admin.add(name)
admin.add('laradock')
| mit |
atmark-techno/atmark-dist | user/python/Lib/posixpath.py | 8 | 10563 | """Common operations on Posix pathnames.
Instead of importing this module directly, import os and refer to
this module as os.path. The "os.path" name is an alias for this
module on Posix systems; on other systems (e.g. Mac, Windows),
os.path provides the same operations in a manner specific to that
platform, and is an alias to another module (e.g. macpath, ntpath).
Some of this can actually be useful on non-Posix systems too, e.g.
for manipulation of the pathname component of URLs.
"""
import os
import stat
# Normalize the case of a pathname. Trivial in Posix, string.lower on Mac.
# On MS-DOS this may also turn slashes into backslashes; however, other
# normalizations (such as optimizing '../' away) are not allowed
# (another function should be defined to do that).
def normcase(s):
"""Normalize case of pathname. Has no effect under Posix"""
return s
# Return whether a path is absolute.
# Trivial in Posix, harder on the Mac or MS-DOS.
def isabs(s):
"""Test whether a path is absolute"""
return s[:1] == '/'
# Join pathnames.
# Ignore the previous parts if a part is absolute.
# Insert a '/' unless the first part is empty or already ends in '/'.
def join(a, *p):
"""Join two or more pathname components, inserting '/' as needed"""
path = a
for b in p:
if b[:1] == '/':
path = b
elif path == '' or path[-1:] == '/':
path = path + b
else:
path = path + '/' + b
return path
# Split a path in head (everything up to the last '/') and tail (the
# rest). If the path ends in '/', tail will be empty. If there is no
# '/' in the path, head will be empty.
# Trailing '/'es are stripped from head unless it is the root.
def split(p):
"""Split a pathname. Returns tuple "(head, tail)" where "tail" is
everything after the final slash. Either part may be empty."""
i = p.rfind('/') + 1
head, tail = p[:i], p[i:]
if head and head <> '/'*len(head):
while head[-1] == '/':
head = head[:-1]
return head, tail
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
def splitext(p):
"""Split the extension from a pathname. Extension is everything from the
last dot to the end. Returns "(root, ext)", either part may be empty."""
root, ext = '', ''
for c in p:
if c == '/':
root, ext = root + ext + c, ''
elif c == '.':
if ext:
root, ext = root + ext, c
else:
ext = c
elif ext:
ext = ext + c
else:
root = root + c
return root, ext
# Split a pathname into a drive specification and the rest of the
# path. Useful on DOS/Windows/NT; on Unix, the drive is always empty.
def splitdrive(p):
"""Split a pathname into drive and path. On Posix, drive is always
empty."""
return '', p
# Return the tail (basename) part of a path.
def basename(p):
"""Returns the final component of a pathname"""
return split(p)[1]
# Return the head (dirname) part of a path.
def dirname(p):
"""Returns the directory component of a pathname"""
return split(p)[0]
# Return the longest prefix of all list elements.
def commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
prefix = m[0]
for item in m:
for i in range(len(prefix)):
if prefix[:i+1] <> item[:i+1]:
prefix = prefix[:i]
if i == 0: return ''
break
return prefix
# Get size, mtime, atime of files.
def getsize(filename):
"""Return the size of a file, reported by os.stat()."""
st = os.stat(filename)
return st[stat.ST_SIZE]
def getmtime(filename):
"""Return the last modification time of a file, reported by os.stat()."""
st = os.stat(filename)
return st[stat.ST_MTIME]
def getatime(filename):
"""Return the last access time of a file, reported by os.stat()."""
st = os.stat(filename)
return st[stat.ST_ATIME]
# Is a path a symbolic link?
# This will always return false on systems where os.lstat doesn't exist.
def islink(path):
"""Test whether a path is a symbolic link"""
try:
st = os.lstat(path)
except (os.error, AttributeError):
return 0
return stat.S_ISLNK(st[stat.ST_MODE])
# Does a path exist?
# This is false for dangling symbolic links.
def exists(path):
"""Test whether a path exists. Returns false for broken symbolic links"""
try:
st = os.stat(path)
except os.error:
return 0
return 1
# Is a path a directory?
# This follows symbolic links, so both islink() and isdir() can be true
# for the same path.
def isdir(path):
"""Test whether a path is a directory"""
try:
st = os.stat(path)
except os.error:
return 0
return stat.S_ISDIR(st[stat.ST_MODE])
# Is a path a regular file?
# This follows symbolic links, so both islink() and isfile() can be true
# for the same path.
def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
except os.error:
return 0
return stat.S_ISREG(st[stat.ST_MODE])
# Are two filenames really pointing to the same file?
def samefile(f1, f2):
"""Test whether two pathnames reference the same actual file"""
s1 = os.stat(f1)
s2 = os.stat(f2)
return samestat(s1, s2)
# Are two open files really referencing the same file?
# (Not necessarily the same file descriptor!)
def sameopenfile(fp1, fp2):
"""Test whether two open file objects reference the same file"""
s1 = os.fstat(fp1)
s2 = os.fstat(fp2)
return samestat(s1, s2)
# Are two stat buffers (obtained from stat, fstat or lstat)
# describing the same file?
def samestat(s1, s2):
"""Test whether two stat buffers reference the same file"""
return s1[stat.ST_INO] == s2[stat.ST_INO] and \
s1[stat.ST_DEV] == s2[stat.ST_DEV]
# Is a path a mount point?
# (Does this work for all UNIXes? Is it even guaranteed to work by Posix?)
def ismount(path):
"""Test whether a path is a mount point"""
try:
s1 = os.stat(path)
s2 = os.stat(join(path, '..'))
except os.error:
return 0 # It doesn't exist -- so not a mount point :-)
dev1 = s1[stat.ST_DEV]
dev2 = s2[stat.ST_DEV]
if dev1 != dev2:
return 1 # path/.. on a different device as path
ino1 = s1[stat.ST_INO]
ino2 = s2[stat.ST_INO]
if ino1 == ino2:
return 1 # path/.. is the same i-node as path
return 0
# Directory tree walk.
# For each directory under top (including top itself, but excluding
# '.' and '..'), func(arg, dirname, filenames) is called, where
# dirname is the name of the directory and filenames is the list
# of files (and subdirectories etc.) in the directory.
# The func may modify the filenames list, to implement a filter,
# or to impose a different order of visiting.
def walk(top, func, arg):
"""walk(top,func,arg) calls func(arg, d, files) for each directory "d"
in the tree rooted at "top" (including "top" itself). "files" is a list
of all the files and subdirs in directory "d".
"""
try:
names = os.listdir(top)
except os.error:
return
func(arg, top, names)
for name in names:
name = join(top, name)
st = os.lstat(name)
if stat.S_ISDIR(st[stat.ST_MODE]):
walk(name, func, arg)
# Expand paths beginning with '~' or '~user'.
# '~' means $HOME; '~user' means that user's home directory.
# If the path doesn't begin with '~', or if the user or $HOME is unknown,
# the path is returned unchanged (leaving error reporting to whatever
# function is called with the expanded path as argument).
# See also module 'glob' for expansion of *, ? and [...] in pathnames.
# (A function should also be defined to do full *sh-style environment
# variable expansion.)
def expanduser(path):
"""Expand ~ and ~user constructions. If user or $HOME is unknown,
do nothing."""
if path[:1] <> '~':
return path
i, n = 1, len(path)
while i < n and path[i] <> '/':
i = i + 1
if i == 1:
if not os.environ.has_key('HOME'):
return path
userhome = os.environ['HOME']
else:
import pwd
try:
pwent = pwd.getpwnam(path[1:i])
except KeyError:
return path
userhome = pwent[5]
if userhome[-1:] == '/': i = i + 1
return userhome + path[i:]
# Expand paths containing shell variable substitutions.
# This expands the forms $variable and ${variable} only.
# Non-existent variables are left unchanged.
_varprog = None
def expandvars(path):
"""Expand shell variables of form $var and ${var}. Unknown variables
are left unchanged."""
global _varprog
if '$' not in path:
return path
if not _varprog:
import re
_varprog = re.compile(r'\$(\w+|\{[^}]*\})')
i = 0
while 1:
m = _varprog.search(path, i)
if not m:
break
i, j = m.span(0)
name = m.group(1)
if name[:1] == '{' and name[-1:] == '}':
name = name[1:-1]
if os.environ.has_key(name):
tail = path[j:]
path = path[:i] + os.environ[name]
i = len(path)
path = path + tail
else:
i = j
return path
# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
# It should be understood that this may change the meaning of the path
# if it contains symbolic links!
def normpath(path):
"""Normalize path, eliminating double slashes, etc."""
if path == '':
return '.'
initial_slash = (path[0] == '/')
comps = path.split('/')
new_comps = []
for comp in comps:
if comp in ('', '.'):
continue
if (comp != '..' or (not initial_slash and not new_comps) or
(new_comps and new_comps[-1] == '..')):
new_comps.append(comp)
elif new_comps:
new_comps.pop()
comps = new_comps
path = '/'.join(comps)
if initial_slash:
path = '/' + path
return path or '.'
def abspath(path):
"""Return an absolute path."""
if not isabs(path):
path = join(os.getcwd(), path)
return normpath(path)
| gpl-2.0 |
Llamatech/sis-fibo | model/vos/cuenta.py | 1 | 2245 | #-*- coding:iso-8859-1 -*-
"""
Clase que modela la información de una cuenta en el sistema
"""
# NUMERO.SALDO.TIPO_CUENTA,CERRADA,CLIENTE,OFICINA
class Cuenta(object):
def __init__(self, numero, saldo, tipo_cuenta, cerrada, cliente, oficina):
self.numero = numero
self.saldo = saldo
self.tipo_cuenta = tipo_cuenta
self.cerrada = cerrada
self.cliente = cliente
self.oficina = oficina
def __repr__(self):
args = [self.numero, self.saldo, self.tipo_cuenta, self.cerrada, self.cliente, self.oficina]
args = map(str, args)
return "numero: %s; saldo: %s; tipo_cuenta:%s; cerrada:%s; cliente: %s; oficina: %s" % tuple(args)
def __str__(self):
return self.__repr__()
class CuentaR(object):
def __init__(self, numero, fecha_creacion, saldo, tipo, cerrada, id_cliente, nom_cliente, ap_cliente, id_of, of_nombre, fecha_umov):
self.numero = numero
if fecha_creacion is not None:
self.fecha_creacion = fecha_creacion.strftime('%d/%m/%Y')
else:
self.fecha_creacion = None
self.saldo = saldo
self.tipo = tipo
self.cerrada = cerrada
self.id_cliente = id_cliente
self.nom_cliente = nom_cliente
self.ap_cliente = ap_cliente
self.id_of = id_of
self.of_nombre = of_nombre
if fecha_umov is not None:
self.fecha_umov = fecha_umov.strftime('%d/%m/%Y')
else:
self.fecha_umov = fecha_umov
def dict_repr(self):
if self.cerrada == 'N':
url = '/cuentas?numero='+str(self.numero)
else:
url = None
d = {
'numero':self.numero,
'fecha_creacion':self.fecha_creacion,
'saldo':self.saldo,
'tipo':self.tipo,
'cerrada':self.cerrada,
'id_cliente':self.id_cliente,
'nom_cliente':self.nom_cliente,
'ap_cliente':self.ap_cliente,
'id_of':self.id_of,
'of_nombre':self.of_nombre,
'fecha_umov':self.fecha_umov,
'delete':url
}
return d
| gpl-2.0 |
kjung/scikit-learn | sklearn/pipeline.py | 12 | 21283 | """
The :mod:`sklearn.pipeline` module implements utilities to build a composite
estimator, as a chain of transforms and estimators.
"""
# Author: Edouard Duchesnay
# Gael Varoquaux
# Virgile Fritsch
# Alexandre Gramfort
# Lars Buitinck
# Licence: BSD
from collections import defaultdict
from warnings import warn
import numpy as np
from scipy import sparse
from .base import BaseEstimator, TransformerMixin
from .externals.joblib import Parallel, delayed
from .externals import six
from .utils import tosequence
from .utils.metaestimators import if_delegate_has_method
from .externals.six import iteritems
__all__ = ['Pipeline', 'FeatureUnion']
class Pipeline(BaseEstimator):
"""Pipeline of transforms with a final estimator.
Sequentially apply a list of transforms and a final estimator.
Intermediate steps of the pipeline must be 'transforms', that is, they
must implement fit and transform methods.
The final estimator only needs to implement fit.
The purpose of the pipeline is to assemble several steps that can be
cross-validated together while setting different parameters.
For this, it enables setting parameters of the various steps using their
names and the parameter name separated by a '__', as in the example below.
Read more in the :ref:`User Guide <pipeline>`.
Parameters
----------
steps : list
List of (name, transform) tuples (implementing fit/transform) that are
chained, in the order in which they are chained, with the last object
an estimator.
Attributes
----------
named_steps : dict
Read-only attribute to access any step parameter by user given name.
Keys are step names and values are steps parameters.
Examples
--------
>>> from sklearn import svm
>>> from sklearn.datasets import samples_generator
>>> from sklearn.feature_selection import SelectKBest
>>> from sklearn.feature_selection import f_regression
>>> from sklearn.pipeline import Pipeline
>>> # generate some data to play with
>>> X, y = samples_generator.make_classification(
... n_informative=5, n_redundant=0, random_state=42)
>>> # ANOVA SVM-C
>>> anova_filter = SelectKBest(f_regression, k=5)
>>> clf = svm.SVC(kernel='linear')
>>> anova_svm = Pipeline([('anova', anova_filter), ('svc', clf)])
>>> # You can set the parameters using the names issued
>>> # For instance, fit using a k of 10 in the SelectKBest
>>> # and a parameter 'C' of the svm
>>> anova_svm.set_params(anova__k=10, svc__C=.1).fit(X, y)
... # doctest: +ELLIPSIS
Pipeline(steps=[...])
>>> prediction = anova_svm.predict(X)
>>> anova_svm.score(X, y) # doctest: +ELLIPSIS
0.77...
>>> # getting the selected features chosen by anova_filter
>>> anova_svm.named_steps['anova'].get_support()
... # doctest: +NORMALIZE_WHITESPACE
array([ True, True, True, False, False, True, False, True, True, True,
False, False, True, False, True, False, False, False, False,
True], dtype=bool)
"""
# BaseEstimator interface
def __init__(self, steps):
names, estimators = zip(*steps)
if len(dict(steps)) != len(steps):
raise ValueError("Provided step names are not unique: %s" % (names,))
# shallow copy of steps
self.steps = tosequence(steps)
transforms = estimators[:-1]
estimator = estimators[-1]
for t in transforms:
if (not (hasattr(t, "fit") or hasattr(t, "fit_transform")) or not
hasattr(t, "transform")):
raise TypeError("All intermediate steps of the chain should "
"be transforms and implement fit and transform"
" '%s' (type %s) doesn't)" % (t, type(t)))
if not hasattr(estimator, "fit"):
raise TypeError("Last step of chain should implement fit "
"'%s' (type %s) doesn't)"
% (estimator, type(estimator)))
@property
def _estimator_type(self):
return self.steps[-1][1]._estimator_type
def get_params(self, deep=True):
if not deep:
return super(Pipeline, self).get_params(deep=False)
else:
out = self.named_steps
for name, step in six.iteritems(self.named_steps):
for key, value in six.iteritems(step.get_params(deep=True)):
out['%s__%s' % (name, key)] = value
out.update(super(Pipeline, self).get_params(deep=False))
return out
@property
def named_steps(self):
return dict(self.steps)
@property
def _final_estimator(self):
return self.steps[-1][1]
# Estimator interface
def _pre_transform(self, X, y=None, **fit_params):
fit_params_steps = dict((step, {}) for step, _ in self.steps)
for pname, pval in six.iteritems(fit_params):
step, param = pname.split('__', 1)
fit_params_steps[step][param] = pval
Xt = X
for name, transform in self.steps[:-1]:
if hasattr(transform, "fit_transform"):
Xt = transform.fit_transform(Xt, y, **fit_params_steps[name])
else:
Xt = transform.fit(Xt, y, **fit_params_steps[name]) \
.transform(Xt)
return Xt, fit_params_steps[self.steps[-1][0]]
def fit(self, X, y=None, **fit_params):
"""Fit all the transforms one after the other and transform the
data, then fit the transformed data using the final estimator.
Parameters
----------
X : iterable
Training data. Must fulfill input requirements of first step of the
pipeline.
y : iterable, default=None
Training targets. Must fulfill label requirements for all steps of
the pipeline.
"""
Xt, fit_params = self._pre_transform(X, y, **fit_params)
self.steps[-1][-1].fit(Xt, y, **fit_params)
return self
def fit_transform(self, X, y=None, **fit_params):
"""Fit all the transforms one after the other and transform the
data, then use fit_transform on transformed data using the final
estimator.
Parameters
----------
X : iterable
Training data. Must fulfill input requirements of first step of the
pipeline.
y : iterable, default=None
Training targets. Must fulfill label requirements for all steps of
the pipeline.
"""
Xt, fit_params = self._pre_transform(X, y, **fit_params)
if hasattr(self.steps[-1][-1], 'fit_transform'):
return self.steps[-1][-1].fit_transform(Xt, y, **fit_params)
else:
return self.steps[-1][-1].fit(Xt, y, **fit_params).transform(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def predict(self, X):
"""Applies transforms to the data, and the predict method of the
final estimator. Valid only if the final estimator implements
predict.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def fit_predict(self, X, y=None, **fit_params):
"""Applies fit_predict of last step in pipeline after transforms.
Applies fit_transforms of a pipeline to the data, followed by the
fit_predict method of the final estimator in the pipeline. Valid
only if the final estimator implements fit_predict.
Parameters
----------
X : iterable
Training data. Must fulfill input requirements of first step of
the pipeline.
y : iterable, default=None
Training targets. Must fulfill label requirements for all steps
of the pipeline.
"""
Xt, fit_params = self._pre_transform(X, y, **fit_params)
return self.steps[-1][-1].fit_predict(Xt, y, **fit_params)
@if_delegate_has_method(delegate='_final_estimator')
def predict_proba(self, X):
"""Applies transforms to the data, and the predict_proba method of the
final estimator. Valid only if the final estimator implements
predict_proba.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict_proba(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def decision_function(self, X):
"""Applies transforms to the data, and the decision_function method of
the final estimator. Valid only if the final estimator implements
decision_function.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].decision_function(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def predict_log_proba(self, X):
"""Applies transforms to the data, and the predict_log_proba method of
the final estimator. Valid only if the final estimator implements
predict_log_proba.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict_log_proba(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def transform(self, X):
"""Applies transforms to the data, and the transform method of the
final estimator. Valid only if the final estimator implements
transform.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps:
Xt = transform.transform(Xt)
return Xt
@if_delegate_has_method(delegate='_final_estimator')
def inverse_transform(self, X):
"""Applies inverse transform to the data.
Starts with the last step of the pipeline and applies ``inverse_transform`` in
inverse order of the pipeline steps.
Valid only if all steps of the pipeline implement inverse_transform.
Parameters
----------
X : iterable
Data to inverse transform. Must fulfill output requirements of the
last step of the pipeline.
"""
if X.ndim == 1:
warn("From version 0.19, a 1d X will not be reshaped in"
" pipeline.inverse_transform any more.", FutureWarning)
X = X[None, :]
Xt = X
for name, step in self.steps[::-1]:
Xt = step.inverse_transform(Xt)
return Xt
@if_delegate_has_method(delegate='_final_estimator')
def score(self, X, y=None):
"""Applies transforms to the data, and the score method of the
final estimator. Valid only if the final estimator implements
score.
Parameters
----------
X : iterable
Data to score. Must fulfill input requirements of first step of the
pipeline.
y : iterable, default=None
Targets used for scoring. Must fulfill label requirements for all steps of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].score(Xt, y)
@property
def classes_(self):
return self.steps[-1][-1].classes_
@property
def _pairwise(self):
# check if first estimator expects pairwise input
return getattr(self.steps[0][1], '_pairwise', False)
def _name_estimators(estimators):
"""Generate names for estimators."""
names = [type(estimator).__name__.lower() for estimator in estimators]
namecount = defaultdict(int)
for est, name in zip(estimators, names):
namecount[name] += 1
for k, v in list(six.iteritems(namecount)):
if v == 1:
del namecount[k]
for i in reversed(range(len(estimators))):
name = names[i]
if name in namecount:
names[i] += "-%d" % namecount[name]
namecount[name] -= 1
return list(zip(names, estimators))
def make_pipeline(*steps):
"""Construct a Pipeline from the given estimators.
This is a shorthand for the Pipeline constructor; it does not require, and
does not permit, naming the estimators. Instead, their names will be set
to the lowercase of their types automatically.
Examples
--------
>>> from sklearn.naive_bayes import GaussianNB
>>> from sklearn.preprocessing import StandardScaler
>>> make_pipeline(StandardScaler(), GaussianNB()) # doctest: +NORMALIZE_WHITESPACE
Pipeline(steps=[('standardscaler',
StandardScaler(copy=True, with_mean=True, with_std=True)),
('gaussiannb', GaussianNB())])
Returns
-------
p : Pipeline
"""
return Pipeline(_name_estimators(steps))
def _fit_one_transformer(transformer, X, y):
return transformer.fit(X, y)
def _transform_one(transformer, name, X, transformer_weights):
if transformer_weights is not None and name in transformer_weights:
# if we have a weight for this transformer, multiply output
return transformer.transform(X) * transformer_weights[name]
return transformer.transform(X)
def _fit_transform_one(transformer, name, X, y, transformer_weights,
**fit_params):
if transformer_weights is not None and name in transformer_weights:
# if we have a weight for this transformer, multiply output
if hasattr(transformer, 'fit_transform'):
X_transformed = transformer.fit_transform(X, y, **fit_params)
return X_transformed * transformer_weights[name], transformer
else:
X_transformed = transformer.fit(X, y, **fit_params).transform(X)
return X_transformed * transformer_weights[name], transformer
if hasattr(transformer, 'fit_transform'):
X_transformed = transformer.fit_transform(X, y, **fit_params)
return X_transformed, transformer
else:
X_transformed = transformer.fit(X, y, **fit_params).transform(X)
return X_transformed, transformer
class FeatureUnion(BaseEstimator, TransformerMixin):
"""Concatenates results of multiple transformer objects.
This estimator applies a list of transformer objects in parallel to the
input data, then concatenates the results. This is useful to combine
several feature extraction mechanisms into a single transformer.
Read more in the :ref:`User Guide <feature_union>`.
Parameters
----------
transformer_list: list of (string, transformer) tuples
List of transformer objects to be applied to the data. The first
half of each tuple is the name of the transformer.
n_jobs: int, optional
Number of jobs to run in parallel (default 1).
transformer_weights: dict, optional
Multiplicative weights for features per transformer.
Keys are transformer names, values the weights.
"""
def __init__(self, transformer_list, n_jobs=1, transformer_weights=None):
self.transformer_list = transformer_list
self.n_jobs = n_jobs
self.transformer_weights = transformer_weights
def get_feature_names(self):
"""Get feature names from all transformers.
Returns
-------
feature_names : list of strings
Names of the features produced by transform.
"""
feature_names = []
for name, trans in self.transformer_list:
if not hasattr(trans, 'get_feature_names'):
raise AttributeError("Transformer %s does not provide"
" get_feature_names." % str(name))
feature_names.extend([name + "__" + f for f in
trans.get_feature_names()])
return feature_names
def fit(self, X, y=None):
"""Fit all transformers using X.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Input data, used to fit transformers.
"""
transformers = Parallel(n_jobs=self.n_jobs)(
delayed(_fit_one_transformer)(trans, X, y)
for name, trans in self.transformer_list)
self._update_transformer_list(transformers)
return self
def fit_transform(self, X, y=None, **fit_params):
"""Fit all transformers using X, transform the data and concatenate
results.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Input data to be transformed.
Returns
-------
X_t : array-like or sparse matrix, shape (n_samples, sum_n_components)
hstack of results of transformers. sum_n_components is the
sum of n_components (output dimension) over transformers.
"""
result = Parallel(n_jobs=self.n_jobs)(
delayed(_fit_transform_one)(trans, name, X, y,
self.transformer_weights, **fit_params)
for name, trans in self.transformer_list)
Xs, transformers = zip(*result)
self._update_transformer_list(transformers)
if any(sparse.issparse(f) for f in Xs):
Xs = sparse.hstack(Xs).tocsr()
else:
Xs = np.hstack(Xs)
return Xs
def transform(self, X):
"""Transform X separately by each transformer, concatenate results.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Input data to be transformed.
Returns
-------
X_t : array-like or sparse matrix, shape (n_samples, sum_n_components)
hstack of results of transformers. sum_n_components is the
sum of n_components (output dimension) over transformers.
"""
Xs = Parallel(n_jobs=self.n_jobs)(
delayed(_transform_one)(trans, name, X, self.transformer_weights)
for name, trans in self.transformer_list)
if any(sparse.issparse(f) for f in Xs):
Xs = sparse.hstack(Xs).tocsr()
else:
Xs = np.hstack(Xs)
return Xs
def get_params(self, deep=True):
if not deep:
return super(FeatureUnion, self).get_params(deep=False)
else:
out = dict(self.transformer_list)
for name, trans in self.transformer_list:
for key, value in iteritems(trans.get_params(deep=True)):
out['%s__%s' % (name, key)] = value
out.update(super(FeatureUnion, self).get_params(deep=False))
return out
def _update_transformer_list(self, transformers):
self.transformer_list[:] = [
(name, new)
for ((name, old), new) in zip(self.transformer_list, transformers)
]
# XXX it would be nice to have a keyword-only n_jobs argument to this function,
# but that's not allowed in Python 2.x.
def make_union(*transformers):
"""Construct a FeatureUnion from the given transformers.
This is a shorthand for the FeatureUnion constructor; it does not require,
and does not permit, naming the transformers. Instead, they will be given
names automatically based on their types. It also does not allow weighting.
Examples
--------
>>> from sklearn.decomposition import PCA, TruncatedSVD
>>> make_union(PCA(), TruncatedSVD()) # doctest: +NORMALIZE_WHITESPACE
FeatureUnion(n_jobs=1,
transformer_list=[('pca', PCA(copy=True, n_components=None,
whiten=False)),
('truncatedsvd',
TruncatedSVD(algorithm='randomized',
n_components=2, n_iter=5,
random_state=None, tol=0.0))],
transformer_weights=None)
Returns
-------
f : FeatureUnion
"""
return FeatureUnion(_name_estimators(transformers))
| bsd-3-clause |
calfonso/ansible | lib/ansible/modules/cloud/azure/azure_rm_virtualmachine.py | 8 | 79001 | #!/usr/bin/python
#
# Copyright (c) 2016 Matt Davis, <mdavis@ansible.com>
# Chris Houseknecht, <house@redhat.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: azure_rm_virtualmachine
version_added: "2.1"
short_description: Manage Azure virtual machines.
description:
- Create, update, stop and start a virtual machine. Provide an existing storage account and network interface or
allow the module to create these for you. If you choose not to provide a network interface, the resource group
must contain a virtual network with at least one subnet.
- Before Ansible 2.5, this required an image found in the Azure Marketplace which can be discovered with
M(azure_rm_virtualmachineimage_facts). In Ansible 2.5 and newer, custom images can be used as well, see the
examples for more details.
options:
resource_group:
description:
- Name of the resource group containing the virtual machine.
required: true
name:
description:
- Name of the virtual machine.
required: true
custom_data:
description:
- Data which is made available to the virtual machine and used by e.g., cloud-init.
version_added: "2.5"
state:
description:
- Assert the state of the virtual machine.
- State 'present' will check that the machine exists with the requested configuration. If the configuration
of the existing machine does not match, the machine will be updated. Use options started, allocated and restarted to change the machine's power
state.
- State 'absent' will remove the virtual machine.
default: present
choices:
- absent
- present
started:
description:
- Use with state 'present' to start the machine. Set to false to have the machine be 'stopped'.
default: true
allocated:
description:
- Toggle that controls if the machine is allocated/deallocated, only useful with state='present'.
default: True
restarted:
description:
- Use with state 'present' to restart a running VM.
location:
description:
- Valid Azure location. Defaults to location of the resource group.
short_hostname:
description:
- Name assigned internally to the host. On a linux VM this is the name returned by the `hostname` command.
When creating a virtual machine, short_hostname defaults to name.
vm_size:
description:
- A valid Azure VM size value. For example, 'Standard_D4'. The list of choices varies depending on the
subscription and location. Check your subscription for available choices. Required when creating a VM.
admin_username:
description:
- Admin username used to access the host after it is created. Required when creating a VM.
admin_password:
description:
- Password for the admin username. Not required if the os_type is Linux and SSH password authentication
is disabled by setting ssh_password_enabled to false.
ssh_password_enabled:
description:
- When the os_type is Linux, setting ssh_password_enabled to false will disable SSH password authentication
and require use of SSH keys.
default: true
ssh_public_keys:
description:
- "For os_type Linux provide a list of SSH keys. Each item in the list should be a dictionary where the
dictionary contains two keys: path and key_data. Set the path to the default location of the
authorized_keys files. On an Enterprise Linux host, for example, the path will be
/home/<admin username>/.ssh/authorized_keys. Set key_data to the actual value of the public key."
image:
description:
- Specifies the image used to build the VM.
- If a string, the image is sourced from a custom image based on the
name.
- 'If a dict with the keys C(publisher), C(offer), C(sku), and
C(version), the image is sourced from a Marketplace image. NOTE:
set image.version to C(latest) to get the most recent version of a
given image.'
- 'If a dict with the keys C(name) and C(resource_group), the image
is sourced from a custom image based on the C(name) and
C(resource_group) set. NOTE: the key C(resource_group) is optional
and if omitted, all images in the subscription will be searched
for by C(name).'
- Custom image support was added in Ansible 2.5
required: true
availability_set:
description:
- Name or ID of an existing availability set to add the VM to. The availability_set should be in the same resource group as VM.
version_added: "2.5"
storage_account_name:
description:
- Name of an existing storage account that supports creation of VHD blobs. If not specified for a new VM,
a new storage account named <vm name>01 will be created using storage type 'Standard_LRS'.
storage_container_name:
description:
- Name of the container to use within the storage account to store VHD blobs. If no name is specified a
default container will created.
default: vhds
storage_blob_name:
description:
- Name fo the storage blob used to hold the VM's OS disk image. If no name is provided, defaults to
the VM name + '.vhd'. If you provide a name, it must end with '.vhd'
aliases:
- storage_blob
managed_disk_type:
description:
- Managed OS disk type
choices:
- Standard_LRS
- Premium_LRS
version_added: "2.4"
os_disk_caching:
description:
- Type of OS disk caching.
choices:
- ReadOnly
- ReadWrite
default: ReadOnly
aliases:
- disk_caching
os_type:
description:
- Base type of operating system.
choices:
- Windows
- Linux
default:
- Linux
data_disks:
description:
- Describes list of data disks.
version_added: "2.4"
suboptions:
lun:
description:
- The logical unit number for data disk
default: 0
version_added: "2.4"
disk_size_gb:
description:
- The initial disk size in GB for blank data disks
version_added: "2.4"
managed_disk_type:
description:
- Managed data disk type
choices:
- Standard_LRS
- Premium_LRS
version_added: "2.4"
storage_account_name:
description:
- Name of an existing storage account that supports creation of VHD blobs. If not specified for a new VM,
a new storage account named <vm name>01 will be created using storage type 'Standard_LRS'.
version_added: "2.4"
storage_container_name:
description:
- Name of the container to use within the storage account to store VHD blobs. If no name is specified a
default container will created.
default: vhds
version_added: "2.4"
storage_blob_name:
description:
- Name fo the storage blob used to hold the VM's OS disk image. If no name is provided, defaults to
the VM name + '.vhd'. If you provide a name, it must end with '.vhd'
version_added: "2.4"
caching:
description:
- Type of data disk caching.
choices:
- ReadOnly
- ReadWrite
default: ReadOnly
version_added: "2.4"
public_ip_allocation_method:
description:
- If a public IP address is created when creating the VM (because a Network Interface was not provided),
determines if the public IP address remains permanently associated with the Network Interface. If set
to 'Dynamic' the public IP address may change any time the VM is rebooted or power cycled.
- The C(Disabled) choice was added in Ansible 2.6.
choices:
- Dynamic
- Static
- Disabled
default:
- Static
aliases:
- public_ip_allocation
open_ports:
description:
- If a network interface is created when creating the VM, a security group will be created as well. For
Linux hosts a rule will be added to the security group allowing inbound TCP connections to the default
SSH port 22, and for Windows hosts ports 3389 and 5986 will be opened. Override the default open ports by
providing a list of ports.
network_interface_names:
description:
- List of existing network interface names to add to the VM. If a network interface name is not provided
when the VM is created, a default network interface will be created. In order for the module to create
a network interface, at least one Virtual Network with one Subnet must exist.
virtual_network_resource_group:
description:
- When creating a virtual machine, if a specific virtual network from another resource group should be
used, use this parameter to specify the resource group to use.
version_added: "2.4"
virtual_network_name:
description:
- When creating a virtual machine, if a network interface name is not provided, one will be created.
The new network interface will be assigned to the first virtual network found in the resource group.
Use this parameter to provide a specific virtual network instead.
aliases:
- virtual_network
subnet_name:
description:
- When creating a virtual machine, if a network interface name is not provided, one will be created.
The new network interface will be assigned to the first subnet found in the virtual network.
Use this parameter to provide a specific subnet instead.
aliases:
- subnet
remove_on_absent:
description:
- When removing a VM using state 'absent', also remove associated resources
- "It can be 'all' or a list with any of the following: ['network_interfaces', 'virtual_storage', 'public_ips']"
- Any other input will be ignored
default: ['all']
plan:
description:
- A dictionary describing a third-party billing plan for an instance
version_added: 2.5
suboptions:
name:
description:
- billing plan name
required: true
product:
description:
- product name
required: true
publisher:
description:
- publisher offering the plan
required: true
promotion_code:
description:
- optional promotion code
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Chris Houseknecht (@chouseknecht)"
- "Matt Davis (@nitzmahone)"
'''
EXAMPLES = '''
- name: Create VM with defaults
azure_rm_virtualmachine:
resource_group: Testing
name: testvm10
admin_username: chouseknecht
admin_password: <your password here>
image:
offer: CentOS
publisher: OpenLogic
sku: '7.1'
version: latest
- name: Create a VM with managed disk
azure_rm_virtualmachine:
resource_group: Testing
name: testvm001
vm_size: Standard_D4
managed_disk_type: Standard_LRS
admin_username: adminUser
ssh_public_keys:
- path: /home/adminUser/.ssh/authorized_keys
key_data: < insert yor ssh public key here... >
image:
offer: CoreOS
publisher: CoreOS
sku: Stable
version: latest
- name: Create a VM with existing storage account and NIC
azure_rm_virtualmachine:
resource_group: Testing
name: testvm002
vm_size: Standard_D4
storage_account: testaccount001
admin_username: adminUser
ssh_public_keys:
- path: /home/adminUser/.ssh/authorized_keys
key_data: < insert yor ssh public key here... >
network_interfaces: testvm001
image:
offer: CentOS
publisher: OpenLogic
sku: '7.1'
version: latest
- name: Create a VM with OS and multiple data managed disks
azure_rm_virtualmachine:
resource_group: Testing
name: testvm001
vm_size: Standard_D4
managed_disk_type: Standard_LRS
admin_username: adminUser
ssh_public_keys:
- path: /home/adminUser/.ssh/authorized_keys
key_data: < insert yor ssh public key here... >
image:
offer: CoreOS
publisher: CoreOS
sku: Stable
version: latest
data_disks:
- lun: 0
disk_size_gb: 64
managed_disk_type: Standard_LRS
- lun: 1
disk_size_gb: 128
managed_disk_type: Premium_LRS
- name: Create a VM with OS and multiple data storage accounts
azure_rm_virtualmachine:
resource_group: Testing
name: testvm001
vm_size: Standard_DS1_v2
admin_username: adminUser
ssh_password_enabled: false
ssh_public_keys:
- path: /home/adminUser/.ssh/authorized_keys
key_data: < insert yor ssh public key here... >
network_interfaces: testvm001
storage_container: osdisk
storage_blob: osdisk.vhd
image:
offer: CoreOS
publisher: CoreOS
sku: Stable
version: latest
data_disks:
- lun: 0
disk_size_gb: 64
storage_container_name: datadisk1
storage_blob_name: datadisk1.vhd
- lun: 1
disk_size_gb: 128
storage_container_name: datadisk2
storage_blob_name: datadisk2.vhd
- name: Create a VM with a custom image
azure_rm_virtualmachine:
resource_group: Testing
name: testvm001
vm_size: Standard_DS1_v2
admin_username: adminUser
admin_password: password01
image: customimage001
- name: Create a VM with a custom image from a particular resource group
azure_rm_virtualmachine:
resource_group: Testing
name: testvm001
vm_size: Standard_DS1_v2
admin_username: adminUser
admin_password: password01
image:
name: customimage001
resource_group: Testing
- name: Power Off
azure_rm_virtualmachine:
resource_group: Testing
name: testvm002
started: no
- name: Deallocate
azure_rm_virtualmachine:
resource_group: Testing
name: testvm002
allocated: no
- name: Power On
azure_rm_virtualmachine:
resource_group:
name: testvm002
- name: Restart
azure_rm_virtualmachine:
resource_group:
name: testvm002
restarted: yes
- name: remove vm and all resources except public ips
azure_rm_virtualmachine:
resource_group: Testing
name: testvm002
state: absent
remove_on_absent:
- network_interfaces
- virtual_storage
'''
RETURN = '''
powerstate:
description: Indicates if the state is running, stopped, deallocated
returned: always
type: string
example: running
deleted_vhd_uris:
description: List of deleted Virtual Hard Disk URIs.
returned: 'on delete'
type: list
example: ["https://testvm104519.blob.core.windows.net/vhds/testvm10.vhd"]
deleted_network_interfaces:
description: List of deleted NICs.
returned: 'on delete'
type: list
example: ["testvm1001"]
deleted_public_ips:
description: List of deleted public IP address names.
returned: 'on delete'
type: list
example: ["testvm1001"]
azure_vm:
description: Facts about the current state of the object. Note that facts are not part of the registered output but available directly.
returned: always
type: complex
contains: {
"properties": {
"availabilitySet": {
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Compute/availabilitySets/MYAVAILABILITYSET"
},
"hardwareProfile": {
"vmSize": "Standard_D1"
},
"instanceView": {
"disks": [
{
"name": "testvm10.vhd",
"statuses": [
{
"code": "ProvisioningState/succeeded",
"displayStatus": "Provisioning succeeded",
"level": "Info",
"time": "2016-03-30T07:11:16.187272Z"
}
]
}
],
"statuses": [
{
"code": "ProvisioningState/succeeded",
"displayStatus": "Provisioning succeeded",
"level": "Info",
"time": "2016-03-30T20:33:38.946916Z"
},
{
"code": "PowerState/running",
"displayStatus": "VM running",
"level": "Info"
}
],
"vmAgent": {
"extensionHandlers": [],
"statuses": [
{
"code": "ProvisioningState/succeeded",
"displayStatus": "Ready",
"level": "Info",
"message": "GuestAgent is running and accepting new configurations.",
"time": "2016-03-30T20:31:16.000Z"
}
],
"vmAgentVersion": "WALinuxAgent-2.0.16"
}
},
"networkProfile": {
"networkInterfaces": [
{
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkInterfaces/testvm10_NIC01",
"name": "testvm10_NIC01",
"properties": {
"dnsSettings": {
"appliedDnsServers": [],
"dnsServers": []
},
"enableIPForwarding": false,
"ipConfigurations": [
{
"etag": 'W/"041c8c2a-d5dd-4cd7-8465-9125cfbe2cf8"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkInterfaces/testvm10_NIC01/ipConfigurations/default",
"name": "default",
"properties": {
"privateIPAddress": "10.10.0.5",
"privateIPAllocationMethod": "Dynamic",
"provisioningState": "Succeeded",
"publicIPAddress": {
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/publicIPAddresses/testvm10_PIP01",
"name": "testvm10_PIP01",
"properties": {
"idleTimeoutInMinutes": 4,
"ipAddress": "13.92.246.197",
"ipConfiguration": {
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkInterfaces/testvm10_NIC01/ipConfigurations/default"
},
"provisioningState": "Succeeded",
"publicIPAllocationMethod": "Static",
"resourceGuid": "3447d987-ca0d-4eca-818b-5dddc0625b42"
}
}
}
}
],
"macAddress": "00-0D-3A-12-AA-14",
"primary": true,
"provisioningState": "Succeeded",
"resourceGuid": "10979e12-ccf9-42ee-9f6d-ff2cc63b3844",
"virtualMachine": {
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Compute/virtualMachines/testvm10"
}
}
}
]
},
"osProfile": {
"adminUsername": "chouseknecht",
"computerName": "test10",
"linuxConfiguration": {
"disablePasswordAuthentication": false
},
"secrets": []
},
"provisioningState": "Succeeded",
"storageProfile": {
"dataDisks": [
{
"caching": "ReadWrite",
"createOption": "empty",
"diskSizeGB": 64,
"lun": 0,
"name": "datadisk1.vhd",
"vhd": {
"uri": "https://testvm10sa1.blob.core.windows.net/datadisk/datadisk1.vhd"
}
}
],
"imageReference": {
"offer": "CentOS",
"publisher": "OpenLogic",
"sku": "7.1",
"version": "7.1.20160308"
},
"osDisk": {
"caching": "ReadOnly",
"createOption": "fromImage",
"name": "testvm10.vhd",
"osType": "Linux",
"vhd": {
"uri": "https://testvm10sa1.blob.core.windows.net/vhds/testvm10.vhd"
}
}
}
},
"type": "Microsoft.Compute/virtualMachines"
}
''' # NOQA
import base64
import random
import re
try:
from msrestazure.azure_exceptions import CloudError
from msrestazure.tools import parse_resource_id
except ImportError:
# This is handled in azure_rm_common
pass
from ansible.module_utils.basic import to_native, to_bytes
from ansible.module_utils.azure_rm_common import AzureRMModuleBase, azure_id_to_dict
AZURE_OBJECT_CLASS = 'VirtualMachine'
AZURE_ENUM_MODULES = ['azure.mgmt.compute.models']
def extract_names_from_blob_uri(blob_uri, storage_suffix):
# HACK: ditch this once python SDK supports get by URI
m = re.match(r'^https://(?P<accountname>[^.]+)\.blob\.{0}/'
r'(?P<containername>[^/]+)/(?P<blobname>.+)$'.format(storage_suffix), blob_uri)
if not m:
raise Exception("unable to parse blob uri '%s'" % blob_uri)
extracted_names = m.groupdict()
return extracted_names
class AzureRMVirtualMachine(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
custom_data=dict(type='str'),
state=dict(choices=['present', 'absent'], default='present', type='str'),
location=dict(type='str'),
short_hostname=dict(type='str'),
vm_size=dict(type='str'),
admin_username=dict(type='str'),
admin_password=dict(type='str', no_log=True),
ssh_password_enabled=dict(type='bool', default=True),
ssh_public_keys=dict(type='list'),
image=dict(type='raw'),
availability_set=dict(type='str'),
storage_account_name=dict(type='str', aliases=['storage_account']),
storage_container_name=dict(type='str', aliases=['storage_container'], default='vhds'),
storage_blob_name=dict(type='str', aliases=['storage_blob']),
os_disk_caching=dict(type='str', aliases=['disk_caching'], choices=['ReadOnly', 'ReadWrite'],
default='ReadOnly'),
managed_disk_type=dict(type='str', choices=['Standard_LRS', 'Premium_LRS']),
os_type=dict(type='str', choices=['Linux', 'Windows'], default='Linux'),
public_ip_allocation_method=dict(type='str', choices=['Dynamic', 'Static', 'Disabled'], default='Static',
aliases=['public_ip_allocation']),
open_ports=dict(type='list'),
network_interface_names=dict(type='list', aliases=['network_interfaces']),
remove_on_absent=dict(type='list', default=['all']),
virtual_network_resource_group=dict(type='str'),
virtual_network_name=dict(type='str', aliases=['virtual_network']),
subnet_name=dict(type='str', aliases=['subnet']),
allocated=dict(type='bool', default=True),
restarted=dict(type='bool', default=False),
started=dict(type='bool', default=True),
data_disks=dict(type='list'),
plan=dict(type='dict')
)
self.resource_group = None
self.name = None
self.custom_data = None
self.state = None
self.location = None
self.short_hostname = None
self.vm_size = None
self.admin_username = None
self.admin_password = None
self.ssh_password_enabled = None
self.ssh_public_keys = None
self.image = None
self.availability_set = None
self.storage_account_name = None
self.storage_container_name = None
self.storage_blob_name = None
self.os_type = None
self.os_disk_caching = None
self.managed_disk_type = None
self.network_interface_names = None
self.remove_on_absent = set()
self.tags = None
self.force = None
self.public_ip_allocation_method = None
self.open_ports = None
self.virtual_network_resource_group = None
self.virtual_network_name = None
self.subnet_name = None
self.allocated = None
self.restarted = None
self.started = None
self.differences = None
self.data_disks = None
self.plan = None
self.results = dict(
changed=False,
actions=[],
powerstate_change=None,
ansible_facts=dict(azure_vm=None)
)
super(AzureRMVirtualMachine, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
# make sure options are lower case
self.remove_on_absent = set([resource.lower() for resource in self.remove_on_absent])
changed = False
powerstate_change = None
results = dict()
vm = None
network_interfaces = []
requested_vhd_uri = None
data_disk_requested_vhd_uri = None
disable_ssh_password = None
vm_dict = None
image_reference = None
custom_image = False
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
# Set default location
self.location = resource_group.location
if self.state == 'present':
# Verify parameters and resolve any defaults
if self.vm_size and not self.vm_size_is_valid():
self.fail("Parameter error: vm_size {0} is not valid for your subscription and location.".format(
self.vm_size
))
if self.network_interface_names:
for name in self.network_interface_names:
nic = self.get_network_interface(name)
network_interfaces.append(nic.id)
if self.ssh_public_keys:
msg = "Parameter error: expecting ssh_public_keys to be a list of type dict where " \
"each dict contains keys: path, key_data."
for key in self.ssh_public_keys:
if not isinstance(key, dict):
self.fail(msg)
if not key.get('path') or not key.get('key_data'):
self.fail(msg)
if self.image and isinstance(self.image, dict):
if all(key in self.image for key in ('publisher', 'offer', 'sku', 'version')):
marketplace_image = self.get_marketplace_image_version()
if self.image['version'] == 'latest':
self.image['version'] = marketplace_image.name
self.log("Using image version {0}".format(self.image['version']))
image_reference = self.compute_models.ImageReference(
publisher=self.image['publisher'],
offer=self.image['offer'],
sku=self.image['sku'],
version=self.image['version']
)
elif self.image.get('name'):
custom_image = True
image_reference = self.get_custom_image_reference(
self.image.get('name'),
self.image.get('resource_group'))
else:
self.fail("parameter error: expecting image to contain [publisher, offer, sku, version] or [name, resource_group]")
elif self.image and isinstance(self.image, str):
custom_image = True
image_reference = self.get_custom_image_reference(self.image)
elif self.image:
self.fail("parameter error: expecting image to be a string or dict not {0}".format(type(self.image).__name__))
if self.plan:
if not self.plan.get('name') or not self.plan.get('product') or not self.plan.get('publisher'):
self.fail("parameter error: plan must include name, product, and publisher")
if not self.storage_blob_name and not self.managed_disk_type:
self.storage_blob_name = self.name + '.vhd'
elif self.managed_disk_type:
self.storage_blob_name = self.name
if self.storage_account_name and not self.managed_disk_type:
properties = self.get_storage_account(self.storage_account_name)
requested_vhd_uri = '{0}{1}/{2}'.format(properties.primary_endpoints.blob,
self.storage_container_name,
self.storage_blob_name)
disable_ssh_password = not self.ssh_password_enabled
try:
self.log("Fetching virtual machine {0}".format(self.name))
vm = self.compute_client.virtual_machines.get(self.resource_group, self.name, expand='instanceview')
self.check_provisioning_state(vm, self.state)
vm_dict = self.serialize_vm(vm)
if self.state == 'present':
differences = []
current_nics = []
results = vm_dict
# Try to determine if the VM needs to be updated
if self.network_interface_names:
for nic in vm_dict['properties']['networkProfile']['networkInterfaces']:
current_nics.append(nic['id'])
if set(current_nics) != set(network_interfaces):
self.log('CHANGED: virtual machine {0} - network interfaces are different.'.format(self.name))
differences.append('Network Interfaces')
updated_nics = [dict(id=id, primary=(i is 0))
for i, id in enumerate(network_interfaces)]
vm_dict['properties']['networkProfile']['networkInterfaces'] = updated_nics
changed = True
if self.os_disk_caching and \
self.os_disk_caching != vm_dict['properties']['storageProfile']['osDisk']['caching']:
self.log('CHANGED: virtual machine {0} - OS disk caching'.format(self.name))
differences.append('OS Disk caching')
changed = True
vm_dict['properties']['storageProfile']['osDisk']['caching'] = self.os_disk_caching
update_tags, vm_dict['tags'] = self.update_tags(vm_dict.get('tags', dict()))
if update_tags:
differences.append('Tags')
changed = True
if self.short_hostname and self.short_hostname != vm_dict['properties']['osProfile']['computerName']:
self.log('CHANGED: virtual machine {0} - short hostname'.format(self.name))
differences.append('Short Hostname')
changed = True
vm_dict['properties']['osProfile']['computerName'] = self.short_hostname
if self.started and vm_dict['powerstate'] not in ['starting', 'running'] and self.allocated:
self.log("CHANGED: virtual machine {0} not running and requested state 'running'".format(self.name))
changed = True
powerstate_change = 'poweron'
elif self.state == 'present' and vm_dict['powerstate'] == 'running' and self.restarted:
self.log("CHANGED: virtual machine {0} {1} and requested state 'restarted'"
.format(self.name, vm_dict['powerstate']))
changed = True
powerstate_change = 'restarted'
elif self.state == 'present' and not self.allocated and vm_dict['powerstate'] not in ['deallocated', 'deallocating']:
self.log("CHANGED: virtual machine {0} {1} and requested state 'deallocated'"
.format(self.name, vm_dict['powerstate']))
changed = True
powerstate_change = 'deallocated'
elif not self.started and vm_dict['powerstate'] == 'running':
self.log("CHANGED: virtual machine {0} running and requested state 'stopped'".format(self.name))
changed = True
powerstate_change = 'poweroff'
self.differences = differences
elif self.state == 'absent':
self.log("CHANGED: virtual machine {0} exists and requested state is 'absent'".format(self.name))
results = dict()
changed = True
except CloudError:
self.log('Virtual machine {0} does not exist'.format(self.name))
if self.state == 'present':
self.log("CHANGED: virtual machine {0} does not exist but state is 'present'.".format(self.name))
changed = True
self.results['changed'] = changed
self.results['ansible_facts']['azure_vm'] = results
self.results['powerstate_change'] = powerstate_change
if self.check_mode:
return self.results
if changed:
if self.state == 'present':
default_storage_account = None
if not vm:
# Create the VM
self.log("Create virtual machine {0}".format(self.name))
self.results['actions'].append('Created VM {0}'.format(self.name))
# Validate parameters
if not self.admin_username:
self.fail("Parameter error: admin_username required when creating a virtual machine.")
if self.os_type == 'Linux':
if disable_ssh_password and not self.ssh_public_keys:
self.fail("Parameter error: ssh_public_keys required when disabling SSH password.")
if not image_reference:
self.fail("Parameter error: an image is required when creating a virtual machine.")
availability_set_resource = None
if self.availability_set:
parsed_availability_set = parse_resource_id(self.availability_set)
availability_set = self.get_availability_set(parsed_availability_set.get('resource_group', self.resource_group),
parsed_availability_set.get('name'))
availability_set_resource = self.compute_models.SubResource(availability_set.id)
# Get defaults
if not self.network_interface_names:
default_nic = self.create_default_nic()
self.log("network interface:")
self.log(self.serialize_obj(default_nic, 'NetworkInterface'), pretty_print=True)
network_interfaces = [default_nic.id]
# os disk
if not self.storage_account_name and not self.managed_disk_type:
storage_account = self.create_default_storage_account()
self.log("storage account:")
self.log(self.serialize_obj(storage_account, 'StorageAccount'), pretty_print=True)
requested_vhd_uri = 'https://{0}.blob.{1}/{2}/{3}'.format(
storage_account.name,
self._cloud_environment.suffixes.storage_endpoint,
self.storage_container_name,
self.storage_blob_name)
default_storage_account = storage_account # store for use by data disks if necessary
if not self.short_hostname:
self.short_hostname = self.name
nics = [self.compute_models.NetworkInterfaceReference(id=id, primary=(i is 0))
for i, id in enumerate(network_interfaces)]
# os disk
if self.managed_disk_type:
vhd = None
managed_disk = self.compute_models.ManagedDiskParameters(storage_account_type=self.managed_disk_type)
elif custom_image:
vhd = None
managed_disk = None
else:
vhd = self.compute_models.VirtualHardDisk(uri=requested_vhd_uri)
managed_disk = None
plan = None
if self.plan:
plan = self.compute_models.Plan(name=self.plan.get('name'), product=self.plan.get('product'),
publisher=self.plan.get('publisher'),
promotion_code=self.plan.get('promotion_code'))
vm_resource = self.compute_models.VirtualMachine(
self.location,
tags=self.tags,
os_profile=self.compute_models.OSProfile(
admin_username=self.admin_username,
computer_name=self.short_hostname,
),
hardware_profile=self.compute_models.HardwareProfile(
vm_size=self.vm_size
),
storage_profile=self.compute_models.StorageProfile(
os_disk=self.compute_models.OSDisk(
name=self.storage_blob_name,
vhd=vhd,
managed_disk=managed_disk,
create_option=self.compute_models.DiskCreateOptionTypes.from_image,
caching=self.os_disk_caching,
),
image_reference=image_reference,
),
network_profile=self.compute_models.NetworkProfile(
network_interfaces=nics
),
availability_set=availability_set_resource,
plan=plan
)
if self.admin_password:
vm_resource.os_profile.admin_password = self.admin_password
if self.custom_data:
# Azure SDK (erroneously?) wants native string type for this
vm_resource.os_profile.custom_data = to_native(base64.b64encode(to_bytes(self.custom_data)))
if self.os_type == 'Linux':
vm_resource.os_profile.linux_configuration = self.compute_models.LinuxConfiguration(
disable_password_authentication=disable_ssh_password
)
if self.ssh_public_keys:
ssh_config = self.compute_models.SshConfiguration()
ssh_config.public_keys = \
[self.compute_models.SshPublicKey(path=key['path'], key_data=key['key_data']) for key in self.ssh_public_keys]
vm_resource.os_profile.linux_configuration.ssh = ssh_config
# data disk
if self.data_disks:
data_disks = []
count = 0
for data_disk in self.data_disks:
if not data_disk.get('managed_disk_type'):
if not data_disk.get('storage_blob_name'):
data_disk['storage_blob_name'] = self.name + '-data-' + str(count) + '.vhd'
count += 1
if data_disk.get('storage_account_name'):
data_disk_storage_account = self.get_storage_account(data_disk['storage_account_name'])
else:
if(not default_storage_account):
data_disk_storage_account = self.create_default_storage_account()
self.log("data disk storage account:")
self.log(self.serialize_obj(data_disk_storage_account, 'StorageAccount'), pretty_print=True)
default_storage_account = data_disk_storage_account # store for use by future data disks if necessary
else:
data_disk_storage_account = default_storage_account
if not data_disk.get('storage_container_name'):
data_disk['storage_container_name'] = 'vhds'
data_disk_requested_vhd_uri = 'https://{0}.blob.{1}/{2}/{3}'.format(
data_disk_storage_account.name,
self._cloud_environment.suffixes.storage_endpoint,
data_disk['storage_container_name'],
data_disk['storage_blob_name']
)
if not data_disk.get('managed_disk_type'):
data_disk_managed_disk = None
disk_name = data_disk['storage_blob_name']
data_disk_vhd = self.compute_models.VirtualHardDisk(uri=data_disk_requested_vhd_uri)
else:
data_disk_vhd = None
data_disk_managed_disk = self.compute_models.ManagedDiskParameters(storage_account_type=data_disk['managed_disk_type'])
disk_name = self.name + "-datadisk-" + str(count)
count += 1
data_disk['caching'] = data_disk.get(
'caching', 'ReadOnly'
)
data_disks.append(self.compute_models.DataDisk(
lun=data_disk['lun'],
name=disk_name,
vhd=data_disk_vhd,
caching=data_disk['caching'],
create_option=self.compute_models.DiskCreateOptionTypes.empty,
disk_size_gb=data_disk['disk_size_gb'],
managed_disk=data_disk_managed_disk,
))
vm_resource.storage_profile.data_disks = data_disks
self.log("Create virtual machine with parameters:")
self.create_or_update_vm(vm_resource)
elif self.differences and len(self.differences) > 0:
# Update the VM based on detected config differences
self.log("Update virtual machine {0}".format(self.name))
self.results['actions'].append('Updated VM {0}'.format(self.name))
nics = [self.compute_models.NetworkInterfaceReference(id=interface['id'], primary=(i is 0))
for i, interface in enumerate(vm_dict['properties']['networkProfile']['networkInterfaces'])]
# os disk
if not vm_dict['properties']['storageProfile']['osDisk'].get('managedDisk'):
managed_disk = None
vhd = self.compute_models.VirtualHardDisk(uri=vm_dict['properties']['storageProfile']['osDisk']['vhd']['uri'])
else:
vhd = None
managed_disk = self.compute_models.ManagedDiskParameters(
storage_account_type=vm_dict['properties']['storageProfile']['osDisk']['managedDisk']['storageAccountType']
)
availability_set_resource = None
try:
availability_set_resource = self.compute_models.SubResource(vm_dict['properties']['availabilitySet']['id'])
except Exception:
# pass if the availability set is not set
pass
vm_resource = self.compute_models.VirtualMachine(
vm_dict['location'],
os_profile=self.compute_models.OSProfile(
admin_username=vm_dict['properties']['osProfile']['adminUsername'],
computer_name=vm_dict['properties']['osProfile']['computerName']
),
hardware_profile=self.compute_models.HardwareProfile(
vm_size=vm_dict['properties']['hardwareProfile']['vmSize']
),
storage_profile=self.compute_models.StorageProfile(
os_disk=self.compute_models.OSDisk(
name=vm_dict['properties']['storageProfile']['osDisk']['name'],
vhd=vhd,
managed_disk=managed_disk,
create_option=vm_dict['properties']['storageProfile']['osDisk']['createOption'],
os_type=vm_dict['properties']['storageProfile']['osDisk']['osType'],
caching=vm_dict['properties']['storageProfile']['osDisk']['caching'],
),
image_reference=self.compute_models.ImageReference(
publisher=vm_dict['properties']['storageProfile']['imageReference']['publisher'],
offer=vm_dict['properties']['storageProfile']['imageReference']['offer'],
sku=vm_dict['properties']['storageProfile']['imageReference']['sku'],
version=vm_dict['properties']['storageProfile']['imageReference']['version']
),
),
availability_set=availability_set_resource,
network_profile=self.compute_models.NetworkProfile(
network_interfaces=nics
),
)
if vm_dict.get('tags'):
vm_resource.tags = vm_dict['tags']
# Add custom_data, if provided
if vm_dict['properties']['osProfile'].get('customData'):
custom_data = vm_dict['properties']['osProfile']['customData']
# Azure SDK (erroneously?) wants native string type for this
vm_resource.os_profile.custom_data = to_native(base64.b64encode(to_bytes(custom_data)))
# Add admin password, if one provided
if vm_dict['properties']['osProfile'].get('adminPassword'):
vm_resource.os_profile.admin_password = vm_dict['properties']['osProfile']['adminPassword']
# Add linux configuration, if applicable
linux_config = vm_dict['properties']['osProfile'].get('linuxConfiguration')
if linux_config:
ssh_config = linux_config.get('ssh', None)
vm_resource.os_profile.linux_configuration = self.compute_models.LinuxConfiguration(
disable_password_authentication=linux_config.get('disablePasswordAuthentication', False)
)
if ssh_config:
public_keys = ssh_config.get('publicKeys')
if public_keys:
vm_resource.os_profile.linux_configuration.ssh = self.compute_models.SshConfiguration(public_keys=[])
for key in public_keys:
vm_resource.os_profile.linux_configuration.ssh.public_keys.append(
self.compute_models.SshPublicKey(path=key['path'], key_data=key['keyData'])
)
# data disk
if vm_dict['properties']['storageProfile'].get('dataDisks'):
data_disks = []
for data_disk in vm_dict['properties']['storageProfile']['dataDisks']:
if data_disk.get('managedDisk'):
managed_disk_type = data_disk['managedDisk']['storageAccountType']
data_disk_managed_disk = self.compute_models.ManagedDiskParameters(storage_account_type=managed_disk_type)
data_disk_vhd = None
else:
data_disk_vhd = data_disk['vhd']['uri']
data_disk_managed_disk = None
data_disks.append(self.compute_models.DataDisk(
lun=int(data_disk['lun']),
name=data_disk.get('name'),
vhd=data_disk_vhd,
caching=data_disk.get('caching'),
create_option=data_disk.get('createOption'),
disk_size_gb=int(data_disk['diskSizeGB']),
managed_disk=data_disk_managed_disk,
))
vm_resource.storage_profile.data_disks = data_disks
self.log("Update virtual machine with parameters:")
self.create_or_update_vm(vm_resource)
# Make sure we leave the machine in requested power state
if (powerstate_change == 'poweron' and
self.results['ansible_facts']['azure_vm']['powerstate'] != 'running'):
# Attempt to power on the machine
self.power_on_vm()
elif (powerstate_change == 'poweroff' and
self.results['ansible_facts']['azure_vm']['powerstate'] == 'running'):
# Attempt to power off the machine
self.power_off_vm()
elif powerstate_change == 'restarted':
self.restart_vm()
elif powerstate_change == 'deallocated':
self.deallocate_vm()
self.results['ansible_facts']['azure_vm'] = self.serialize_vm(self.get_vm())
elif self.state == 'absent':
# delete the VM
self.log("Delete virtual machine {0}".format(self.name))
self.results['ansible_facts']['azure_vm'] = None
self.delete_vm(vm)
# until we sort out how we want to do this globally
del self.results['actions']
return self.results
def get_vm(self):
'''
Get the VM with expanded instanceView
:return: VirtualMachine object
'''
try:
vm = self.compute_client.virtual_machines.get(self.resource_group, self.name, expand='instanceview')
return vm
except Exception as exc:
self.fail("Error getting virtual machine {0} - {1}".format(self.name, str(exc)))
def serialize_vm(self, vm):
'''
Convert a VirtualMachine object to dict.
:param vm: VirtualMachine object
:return: dict
'''
result = self.serialize_obj(vm, AZURE_OBJECT_CLASS, enum_modules=AZURE_ENUM_MODULES)
result['id'] = vm.id
result['name'] = vm.name
result['type'] = vm.type
result['location'] = vm.location
result['tags'] = vm.tags
result['powerstate'] = dict()
if vm.instance_view:
result['powerstate'] = next((s.code.replace('PowerState/', '')
for s in vm.instance_view.statuses if s.code.startswith('PowerState')), None)
# Expand network interfaces to include config properties
for interface in vm.network_profile.network_interfaces:
int_dict = azure_id_to_dict(interface.id)
nic = self.get_network_interface(int_dict['networkInterfaces'])
for interface_dict in result['properties']['networkProfile']['networkInterfaces']:
if interface_dict['id'] == interface.id:
nic_dict = self.serialize_obj(nic, 'NetworkInterface')
interface_dict['name'] = int_dict['networkInterfaces']
interface_dict['properties'] = nic_dict['properties']
# Expand public IPs to include config properties
for interface in result['properties']['networkProfile']['networkInterfaces']:
for config in interface['properties']['ipConfigurations']:
if config['properties'].get('publicIPAddress'):
pipid_dict = azure_id_to_dict(config['properties']['publicIPAddress']['id'])
try:
pip = self.network_client.public_ip_addresses.get(self.resource_group,
pipid_dict['publicIPAddresses'])
except Exception as exc:
self.fail("Error fetching public ip {0} - {1}".format(pipid_dict['publicIPAddresses'],
str(exc)))
pip_dict = self.serialize_obj(pip, 'PublicIPAddress')
config['properties']['publicIPAddress']['name'] = pipid_dict['publicIPAddresses']
config['properties']['publicIPAddress']['properties'] = pip_dict['properties']
self.log(result, pretty_print=True)
if self.state != 'absent' and not result['powerstate']:
self.fail("Failed to determine PowerState of virtual machine {0}".format(self.name))
return result
def power_off_vm(self):
self.log("Powered off virtual machine {0}".format(self.name))
self.results['actions'].append("Powered off virtual machine {0}".format(self.name))
try:
poller = self.compute_client.virtual_machines.power_off(self.resource_group, self.name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error powering off virtual machine {0} - {1}".format(self.name, str(exc)))
return True
def power_on_vm(self):
self.results['actions'].append("Powered on virtual machine {0}".format(self.name))
self.log("Power on virtual machine {0}".format(self.name))
try:
poller = self.compute_client.virtual_machines.start(self.resource_group, self.name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error powering on virtual machine {0} - {1}".format(self.name, str(exc)))
return True
def restart_vm(self):
self.results['actions'].append("Restarted virtual machine {0}".format(self.name))
self.log("Restart virtual machine {0}".format(self.name))
try:
poller = self.compute_client.virtual_machines.restart(self.resource_group, self.name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error restarting virtual machine {0} - {1}".format(self.name, str(exc)))
return True
def deallocate_vm(self):
self.results['actions'].append("Deallocated virtual machine {0}".format(self.name))
self.log("Deallocate virtual machine {0}".format(self.name))
try:
poller = self.compute_client.virtual_machines.deallocate(self.resource_group, self.name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deallocating virtual machine {0} - {1}".format(self.name, str(exc)))
return True
def delete_vm(self, vm):
vhd_uris = []
managed_disk_ids = []
nic_names = []
pip_names = []
if self.remove_on_absent.intersection(set(['all', 'virtual_storage'])):
# store the attached vhd info so we can nuke it after the VM is gone
if(vm.storage_profile.os_disk.managed_disk):
self.log('Storing managed disk ID for deletion')
managed_disk_ids.append(vm.storage_profile.os_disk.managed_disk.id)
elif(vm.storage_profile.os_disk.vhd):
self.log('Storing VHD URI for deletion')
vhd_uris.append(vm.storage_profile.os_disk.vhd.uri)
data_disks = vm.storage_profile.data_disks
for data_disk in data_disks:
if(data_disk.vhd):
vhd_uris.append(data_disk.vhd.uri)
elif(data_disk.managed_disk):
managed_disk_ids.append(data_disk.managed_disk.id)
# FUTURE enable diff mode, move these there...
self.log("VHD URIs to delete: {0}".format(', '.join(vhd_uris)))
self.results['deleted_vhd_uris'] = vhd_uris
self.log("Managed disk IDs to delete: {0}".format(', '.join(managed_disk_ids)))
self.results['deleted_managed_disk_ids'] = managed_disk_ids
if self.remove_on_absent.intersection(set(['all', 'network_interfaces'])):
# store the attached nic info so we can nuke them after the VM is gone
self.log('Storing NIC names for deletion.')
for interface in vm.network_profile.network_interfaces:
id_dict = azure_id_to_dict(interface.id)
nic_names.append(id_dict['networkInterfaces'])
self.log('NIC names to delete {0}'.format(', '.join(nic_names)))
self.results['deleted_network_interfaces'] = nic_names
if self.remove_on_absent.intersection(set(['all', 'public_ips'])):
# also store each nic's attached public IPs and delete after the NIC is gone
for name in nic_names:
nic = self.get_network_interface(name)
for ipc in nic.ip_configurations:
if ipc.public_ip_address:
pip_dict = azure_id_to_dict(ipc.public_ip_address.id)
pip_names.append(pip_dict['publicIPAddresses'])
self.log('Public IPs to delete are {0}'.format(', '.join(pip_names)))
self.results['deleted_public_ips'] = pip_names
self.log("Deleting virtual machine {0}".format(self.name))
self.results['actions'].append("Deleted virtual machine {0}".format(self.name))
try:
poller = self.compute_client.virtual_machines.delete(self.resource_group, self.name)
# wait for the poller to finish
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deleting virtual machine {0} - {1}".format(self.name, str(exc)))
# TODO: parallelize nic, vhd, and public ip deletions with begin_deleting
# TODO: best-effort to keep deleting other linked resources if we encounter an error
if self.remove_on_absent.intersection(set(['all', 'virtual_storage'])):
self.log('Deleting VHDs')
self.delete_vm_storage(vhd_uris)
self.log('Deleting managed disks')
self.delete_managed_disks(managed_disk_ids)
if self.remove_on_absent.intersection(set(['all', 'network_interfaces'])):
self.log('Deleting network interfaces')
for name in nic_names:
self.delete_nic(name)
if self.remove_on_absent.intersection(set(['all', 'public_ips'])):
self.log('Deleting public IPs')
for name in pip_names:
self.delete_pip(name)
return True
def get_network_interface(self, name):
try:
nic = self.network_client.network_interfaces.get(self.resource_group, name)
return nic
except Exception as exc:
self.fail("Error fetching network interface {0} - {1}".format(name, str(exc)))
def delete_nic(self, name):
self.log("Deleting network interface {0}".format(name))
self.results['actions'].append("Deleted network interface {0}".format(name))
try:
poller = self.network_client.network_interfaces.delete(self.resource_group, name)
except Exception as exc:
self.fail("Error deleting network interface {0} - {1}".format(name, str(exc)))
self.get_poller_result(poller)
# Delete doesn't return anything. If we get this far, assume success
return True
def delete_pip(self, name):
self.results['actions'].append("Deleted public IP {0}".format(name))
try:
poller = self.network_client.public_ip_addresses.delete(self.resource_group, name)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deleting {0} - {1}".format(name, str(exc)))
# Delete returns nada. If we get here, assume that all is well.
return True
def delete_managed_disks(self, managed_disk_ids):
for mdi in managed_disk_ids:
try:
poller = self.rm_client.resources.delete_by_id(mdi, '2017-03-30')
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deleting managed disk {0} - {1}".format(mdi, str(exc)))
def delete_vm_storage(self, vhd_uris):
# FUTURE: figure out a cloud_env indepdendent way to delete these
for uri in vhd_uris:
self.log("Extracting info from blob uri '{0}'".format(uri))
try:
blob_parts = extract_names_from_blob_uri(uri, self._cloud_environment.suffixes.storage_endpoint)
except Exception as exc:
self.fail("Error parsing blob URI {0}".format(str(exc)))
storage_account_name = blob_parts['accountname']
container_name = blob_parts['containername']
blob_name = blob_parts['blobname']
blob_client = self.get_blob_client(self.resource_group, storage_account_name)
self.log("Delete blob {0}:{1}".format(container_name, blob_name))
self.results['actions'].append("Deleted blob {0}:{1}".format(container_name, blob_name))
try:
blob_client.delete_blob(container_name, blob_name)
except Exception as exc:
self.fail("Error deleting blob {0}:{1} - {2}".format(container_name, blob_name, str(exc)))
def get_marketplace_image_version(self):
try:
versions = self.compute_client.virtual_machine_images.list(self.location,
self.image['publisher'],
self.image['offer'],
self.image['sku'])
except Exception as exc:
self.fail("Error fetching image {0} {1} {2} - {3}".format(self.image['publisher'],
self.image['offer'],
self.image['sku'],
str(exc)))
if versions and len(versions) > 0:
if self.image['version'] == 'latest':
return versions[len(versions) - 1]
for version in versions:
if version.name == self.image['version']:
return version
self.fail("Error could not find image {0} {1} {2} {3}".format(self.image['publisher'],
self.image['offer'],
self.image['sku'],
self.image['version']))
def get_custom_image_reference(self, name, resource_group=None):
try:
if resource_group:
vm_images = self.compute_client.images.list_by_resource_group(resource_group)
else:
vm_images = self.compute_client.images.list()
except Exception as exc:
self.fail("Error fetching custom images from subscription - {0}".format(str(exc)))
for vm_image in vm_images:
if vm_image.name == name:
self.log("Using custom image id {0}".format(vm_image.id))
return self.compute_models.ImageReference(id=vm_image.id)
self.fail("Error could not find image with name {0}".format(name))
def get_availability_set(self, resource_group, name):
try:
return self.compute_client.availability_sets.get(resource_group, name)
except Exception as exc:
self.fail("Error fetching availability set {0} - {1}".format(name, str(exc)))
def get_storage_account(self, name):
try:
account = self.storage_client.storage_accounts.get_properties(self.resource_group,
name)
return account
except Exception as exc:
self.fail("Error fetching storage account {0} - {1}".format(name, str(exc)))
def create_or_update_vm(self, params):
try:
poller = self.compute_client.virtual_machines.create_or_update(self.resource_group, self.name, params)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Error creating or updating virtual machine {0} - {1}".format(self.name, str(exc)))
def vm_size_is_valid(self):
'''
Validate self.vm_size against the list of virtual machine sizes available for the account and location.
:return: boolean
'''
try:
sizes = self.compute_client.virtual_machine_sizes.list(self.location)
except Exception as exc:
self.fail("Error retrieving available machine sizes - {0}".format(str(exc)))
for size in sizes:
if size.name == self.vm_size:
return True
return False
def create_default_storage_account(self):
'''
Create a default storage account <vm name>XXXX, where XXXX is a random number. If <vm name>XXXX exists, use it.
Otherwise, create one.
:return: storage account object
'''
account = None
valid_name = False
# Attempt to find a valid storage account name
storage_account_name_base = re.sub('[^a-zA-Z0-9]', '', self.name[:20].lower())
for i in range(0, 5):
rand = random.randrange(1000, 9999)
storage_account_name = storage_account_name_base + str(rand)
if self.check_storage_account_name(storage_account_name):
valid_name = True
break
if not valid_name:
self.fail("Failed to create a unique storage account name for {0}. Try using a different VM name."
.format(self.name))
try:
account = self.storage_client.storage_accounts.get_properties(self.resource_group, storage_account_name)
except CloudError:
pass
if account:
self.log("Storage account {0} found.".format(storage_account_name))
self.check_provisioning_state(account)
return account
sku = self.storage_models.Sku(self.storage_models.SkuName.standard_lrs)
sku.tier = self.storage_models.SkuTier.standard
kind = self.storage_models.Kind.storage
parameters = self.storage_models.StorageAccountCreateParameters(sku, kind, self.location)
self.log("Creating storage account {0} in location {1}".format(storage_account_name, self.location))
self.results['actions'].append("Created storage account {0}".format(storage_account_name))
try:
poller = self.storage_client.storage_accounts.create(self.resource_group, storage_account_name, parameters)
self.get_poller_result(poller)
except Exception as exc:
self.fail("Failed to create storage account: {0} - {1}".format(storage_account_name, str(exc)))
return self.get_storage_account(storage_account_name)
def check_storage_account_name(self, name):
self.log("Checking storage account name availability for {0}".format(name))
try:
response = self.storage_client.storage_accounts.check_name_availability(name)
if response.reason == 'AccountNameInvalid':
raise Exception("Invalid default storage account name: {0}".format(name))
except Exception as exc:
self.fail("Error checking storage account name availability for {0} - {1}".format(name, str(exc)))
return response.name_available
def create_default_nic(self):
'''
Create a default Network Interface <vm name>01. Requires an existing virtual network
with one subnet. If NIC <vm name>01 exists, use it. Otherwise, create one.
:return: NIC object
'''
network_interface_name = self.name + '01'
nic = None
self.log("Create default NIC {0}".format(network_interface_name))
self.log("Check to see if NIC {0} exists".format(network_interface_name))
try:
nic = self.network_client.network_interfaces.get(self.resource_group, network_interface_name)
except CloudError:
pass
if nic:
self.log("NIC {0} found.".format(network_interface_name))
self.check_provisioning_state(nic)
return nic
self.log("NIC {0} does not exist.".format(network_interface_name))
virtual_network_resource_group = None
if self.virtual_network_resource_group:
virtual_network_resource_group = self.virtual_network_resource_group
else:
virtual_network_resource_group = self.resource_group
if self.virtual_network_name:
try:
self.network_client.virtual_networks.list(virtual_network_resource_group, self.virtual_network_name)
virtual_network_name = self.virtual_network_name
except CloudError as exc:
self.fail("Error: fetching virtual network {0} - {1}".format(self.virtual_network_name, str(exc)))
else:
# Find a virtual network
no_vnets_msg = "Error: unable to find virtual network in resource group {0}. A virtual network " \
"with at least one subnet must exist in order to create a NIC for the virtual " \
"machine.".format(virtual_network_resource_group)
virtual_network_name = None
try:
vnets = self.network_client.virtual_networks.list(virtual_network_resource_group)
except CloudError:
self.log('cloud error!')
self.fail(no_vnets_msg)
for vnet in vnets:
virtual_network_name = vnet.name
self.log('vnet name: {0}'.format(vnet.name))
break
if not virtual_network_name:
self.fail(no_vnets_msg)
if self.subnet_name:
try:
subnet = self.network_client.subnets.get(virtual_network_resource_group, virtual_network_name, self.subnet_name)
subnet_id = subnet.id
except Exception as exc:
self.fail("Error: fetching subnet {0} - {1}".format(self.subnet_name, str(exc)))
else:
no_subnets_msg = "Error: unable to find a subnet in virtual network {0}. A virtual network " \
"with at least one subnet must exist in order to create a NIC for the virtual " \
"machine.".format(virtual_network_name)
subnet_id = None
try:
subnets = self.network_client.subnets.list(virtual_network_resource_group, virtual_network_name)
except CloudError:
self.fail(no_subnets_msg)
for subnet in subnets:
subnet_id = subnet.id
self.log('subnet id: {0}'.format(subnet_id))
break
if not subnet_id:
self.fail(no_subnets_msg)
pip = None
if self.public_ip_allocation_method != 'Disabled':
self.results['actions'].append('Created default public IP {0}'.format(self.name + '01'))
pip_info = self.create_default_pip(self.resource_group, self.location, self.name + '01', self.public_ip_allocation_method)
pip = self.network_models.PublicIPAddress(id=pip_info.id, location=pip_info.location, resource_guid=pip_info.resource_guid)
self.results['actions'].append('Created default security group {0}'.format(self.name + '01'))
group = self.create_default_securitygroup(self.resource_group, self.location, self.name + '01', self.os_type,
self.open_ports)
parameters = self.network_models.NetworkInterface(
location=self.location,
ip_configurations=[
self.network_models.NetworkInterfaceIPConfiguration(
private_ip_allocation_method='Dynamic',
)
]
)
parameters.ip_configurations[0].subnet = self.network_models.Subnet(id=subnet_id)
parameters.ip_configurations[0].name = 'default'
parameters.network_security_group = self.network_models.NetworkSecurityGroup(id=group.id,
location=group.location,
resource_guid=group.resource_guid)
parameters.ip_configurations[0].public_ip_address = pip
self.log("Creating NIC {0}".format(network_interface_name))
self.log(self.serialize_obj(parameters, 'NetworkInterface'), pretty_print=True)
self.results['actions'].append("Created NIC {0}".format(network_interface_name))
try:
poller = self.network_client.network_interfaces.create_or_update(self.resource_group,
network_interface_name,
parameters)
new_nic = self.get_poller_result(poller)
except Exception as exc:
self.fail("Error creating network interface {0} - {1}".format(network_interface_name, str(exc)))
return new_nic
def main():
AzureRMVirtualMachine()
if __name__ == '__main__':
main()
| gpl-3.0 |
redhat-cip/tempest | tempest/api/identity/admin/v3/test_projects.py | 9 | 7335 | # Copyright 2013 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.identity import base
from tempest.common.utils import data_utils
from tempest import test
class ProjectsTestJSON(base.BaseIdentityV3AdminTest):
@test.idempotent_id('0ecf465c-0dc4-4532-ab53-91ffeb74d12d')
def test_project_create_with_description(self):
# Create project with a description
project_name = data_utils.rand_name('project')
project_desc = data_utils.rand_name('desc')
project = self.client.create_project(
project_name, description=project_desc)
self.data.projects.append(project)
project_id = project['id']
desc1 = project['description']
self.assertEqual(desc1, project_desc, 'Description should have '
'been sent in response for create')
body = self.client.get_project(project_id)
desc2 = body['description']
self.assertEqual(desc2, project_desc, 'Description does not appear'
'to be set')
@test.idempotent_id('5f50fe07-8166-430b-a882-3b2ee0abe26f')
def test_project_create_with_domain(self):
# Create project with a domain
self.data.setup_test_domain()
project_name = data_utils.rand_name('project')
project = self.client.create_project(
project_name, domain_id=self.data.domain['id'])
self.data.projects.append(project)
project_id = project['id']
self.assertEqual(project_name, project['name'])
self.assertEqual(self.data.domain['id'], project['domain_id'])
body = self.client.get_project(project_id)
self.assertEqual(project_name, body['name'])
self.assertEqual(self.data.domain['id'], body['domain_id'])
@test.idempotent_id('1f66dc76-50cc-4741-a200-af984509e480')
def test_project_create_enabled(self):
# Create a project that is enabled
project_name = data_utils.rand_name('project')
project = self.client.create_project(
project_name, enabled=True)
self.data.projects.append(project)
project_id = project['id']
en1 = project['enabled']
self.assertTrue(en1, 'Enable should be True in response')
body = self.client.get_project(project_id)
en2 = body['enabled']
self.assertTrue(en2, 'Enable should be True in lookup')
@test.idempotent_id('78f96a9c-e0e0-4ee6-a3ba-fbf6dfd03207')
def test_project_create_not_enabled(self):
# Create a project that is not enabled
project_name = data_utils.rand_name('project')
project = self.client.create_project(
project_name, enabled=False)
self.data.projects.append(project)
en1 = project['enabled']
self.assertEqual('false', str(en1).lower(),
'Enable should be False in response')
body = self.client.get_project(project['id'])
en2 = body['enabled']
self.assertEqual('false', str(en2).lower(),
'Enable should be False in lookup')
@test.idempotent_id('f608f368-048c-496b-ad63-d286c26dab6b')
def test_project_update_name(self):
# Update name attribute of a project
p_name1 = data_utils.rand_name('project')
project = self.client.create_project(p_name1)
self.data.projects.append(project)
resp1_name = project['name']
p_name2 = data_utils.rand_name('project2')
body = self.client.update_project(project['id'], name=p_name2)
resp2_name = body['name']
self.assertNotEqual(resp1_name, resp2_name)
body = self.client.get_project(project['id'])
resp3_name = body['name']
self.assertNotEqual(resp1_name, resp3_name)
self.assertEqual(p_name1, resp1_name)
self.assertEqual(resp2_name, resp3_name)
@test.idempotent_id('f138b715-255e-4a7d-871d-351e1ef2e153')
def test_project_update_desc(self):
# Update description attribute of a project
p_name = data_utils.rand_name('project')
p_desc = data_utils.rand_name('desc')
project = self.client.create_project(
p_name, description=p_desc)
self.data.projects.append(project)
resp1_desc = project['description']
p_desc2 = data_utils.rand_name('desc2')
body = self.client.update_project(
project['id'], description=p_desc2)
resp2_desc = body['description']
self.assertNotEqual(resp1_desc, resp2_desc)
body = self.client.get_project(project['id'])
resp3_desc = body['description']
self.assertNotEqual(resp1_desc, resp3_desc)
self.assertEqual(p_desc, resp1_desc)
self.assertEqual(resp2_desc, resp3_desc)
@test.idempotent_id('b6b25683-c97f-474d-a595-55d410b68100')
def test_project_update_enable(self):
# Update the enabled attribute of a project
p_name = data_utils.rand_name('project')
p_en = False
project = self.client.create_project(p_name, enabled=p_en)
self.data.projects.append(project)
resp1_en = project['enabled']
p_en2 = True
body = self.client.update_project(
project['id'], enabled=p_en2)
resp2_en = body['enabled']
self.assertNotEqual(resp1_en, resp2_en)
body = self.client.get_project(project['id'])
resp3_en = body['enabled']
self.assertNotEqual(resp1_en, resp3_en)
self.assertEqual('false', str(resp1_en).lower())
self.assertEqual(resp2_en, resp3_en)
@test.idempotent_id('59398d4a-5dc5-4f86-9a4c-c26cc804d6c6')
def test_associate_user_to_project(self):
# Associate a user to a project
# Create a Project
p_name = data_utils.rand_name('project')
project = self.client.create_project(p_name)
self.data.projects.append(project)
# Create a User
u_name = data_utils.rand_name('user')
u_desc = u_name + 'description'
u_email = u_name + '@testmail.tm'
u_password = data_utils.rand_name('pass')
user = self.client.create_user(
u_name, description=u_desc, password=u_password,
email=u_email, project_id=project['id'])
# Delete the User at the end of this method
self.addCleanup(self.client.delete_user, user['id'])
# Get User To validate the user details
new_user_get = self.client.get_user(user['id'])
# Assert response body of GET
self.assertEqual(u_name, new_user_get['name'])
self.assertEqual(u_desc, new_user_get['description'])
self.assertEqual(project['id'],
new_user_get['project_id'])
self.assertEqual(u_email, new_user_get['email'])
| apache-2.0 |
UManPychron/pychron | pychron/lasers/stage_managers/video_stage_manager.py | 2 | 36670 | # ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import os
import shutil
import time
from threading import Timer, Event as TEvent
from apptools.preferences.preference_binding import bind_preference
from numpy import copy
from skimage.color import gray2rgb
from skimage.draw import circle_perimeter, line
# ============= enthought library imports =======================
from traits.api import Instance, String, Property, Button, Bool, Event, on_trait_change, Str, Float, Enum, Int
from pychron.canvas.canvas2D.camera import Camera, YamlCamera, BaseCamera
from pychron.core.helpers.binpack import pack, encode_blob
from pychron.core.helpers.filetools import unique_path, unique_path_from_manifest
from pychron.core.ui.stage_component_editor import VideoComponentEditor
from pychron.core.ui.thread import Thread as QThread
from pychron.core.ui.thread import sleep
from pychron.core.yaml import yload
from pychron.image.video import Video, pil_save
from pychron.mv.lumen_detector import LumenDetector
from pychron.paths import paths
from .stage_manager import StageManager
try:
from pychron.canvas.canvas2D.video_laser_tray_canvas import \
VideoLaserTrayCanvas
except ImportError:
from pychron.canvas.canvas2D.laser_tray_canvas import \
LaserTrayCanvas as VideoLaserTrayCanvas
class VideoStageManager(StageManager):
"""
"""
video = Instance(Video)
camera = Instance(BaseCamera)
canvas_editor_klass = VideoComponentEditor
camera_zoom_coefficients = Property(String(enter_set=True, auto_set=False),
depends_on='_camera_zoom_coefficients')
_camera_zoom_coefficients = String
use_auto_center_interpolation = Bool(False)
configure_camera_device_button = Button
autocenter_button = Button('AutoCenter')
configure_autocenter_button = Button('Configure')
autocenter_manager = Instance(
'pychron.mv.autocenter_manager.AutoCenterManager')
autofocus_manager = Instance(
'pychron.mv.focus.autofocus_manager.AutoFocusManager')
# zoom_calibration_manager = Instance(
# 'pychron.mv.zoom.zoom_calibration.ZoomCalibrationManager')
snapshot_button = Button('Snapshot')
snapshot_mode = Enum('Single', '3 Burst', '10 Burst')
auto_save_snapshot = Bool(True)
record = Event
record_label = Property(depends_on='is_recording')
is_recording = Bool
use_db = False
use_video_archiver = Bool(True)
video_archiver = Instance('pychron.core.helpers.archiver.Archiver')
video_identifier = Str
# use_video_server = Bool(False)
# video_server_port = Int
# video_server_quality = Int
# video_server = Instance('pychron.image.video_server.VideoServer')
use_media_storage = Bool(False)
auto_upload = Bool(False)
keep_local_copy = Bool(False)
lumen_detector = Instance(LumenDetector)
render_with_markup = Bool(False)
burst_delay = Int(250)
_auto_correcting = False
stop_timer = Event
pxpermm = Float(23)
_measure_grain_t = None
_measure_grain_evt = None
grain_polygons = None
# test_button = Button
# _test_state = False
# def _test_button_fired(self):
# if self._test_state:
# # self.stop_measure_grain_polygon()
# #
# # time.sleep(2)
# #
# # d = self.get_grain_polygon_blob()
# # print d
# self.parent.disable_laser()
# else:
# self.parent.luminosity_degas_test()
# # self.start_measure_grain_polygon()
# self._test_state = not self._test_state
def motor_event_hook(self, name, value, *args, **kw):
if name == 'zoom':
self._update_zoom(value)
def bind_preferences(self, pref_id):
self.debug('binding preferences')
super(VideoStageManager, self).bind_preferences(pref_id)
if self.autocenter_manager:
self.autocenter_manager.bind_preferences(pref_id)
# bind_preference(self.autocenter_manager, 'use_autocenter',
# '{}.use_autocenter'.format(pref_id))
bind_preference(self, 'render_with_markup',
'{}.render_with_markup'.format(pref_id))
bind_preference(self, 'burst_delay',
'{}.burst_delay'.format(pref_id))
bind_preference(self, 'auto_upload', '{}.auto_upload'.format(pref_id))
bind_preference(self, 'use_media_storage', '{}.use_media_storage'.format(pref_id))
bind_preference(self, 'keep_local_copy', '{}.keep_local_copy'.format(pref_id))
bind_preference(self, 'use_video_archiver',
'{}.use_video_archiver'.format(pref_id))
bind_preference(self, 'video_identifier',
'{}.video_identifier'.format(pref_id))
bind_preference(self, 'use_video_server',
'{}.use_video_server'.format(pref_id))
bind_preference(self.video_archiver, 'archive_months',
'{}.video_archive_months'.format(pref_id))
bind_preference(self.video_archiver, 'archive_days',
'{}.video_archive_days'.format(pref_id))
bind_preference(self.video_archiver, 'archive_hours',
'{}.video_archive_hours'.format(pref_id))
bind_preference(self.video_archiver, 'root',
'{}.video_directory'.format(pref_id))
# bind_preference(self.video, 'output_mode',
# '{}.video_output_mode'.format(pref_id))
# bind_preference(self.video, 'ffmpeg_path',
# '{}.ffmpeg_path'.format(pref_id))
def get_grain_polygon(self):
ld = self.lumen_detector
l, m = ld.lum()
return m.tostring()
def get_grain_polygon_blob(self):
# self.debug('Get grain polygons n={}'.format(len(self.grain_polygons)))
try:
t, md, p = next(self.grain_polygons)
a = pack('ff', ((t, md),))
b = pack('HH', p)
return encode_blob(a + b)
except (StopIteration, TypeError) as e:
self.debug('No more grain polygons. {}'.format(e))
def stop_measure_grain_polygon(self):
self.debug('Stop measure polygons {}'.format(self._measure_grain_evt))
if self._measure_grain_evt:
self._measure_grain_evt.set()
return True
def start_measure_grain_polygon(self):
self._measure_grain_evt = evt = TEvent()
def _measure_grain_polygon():
ld = self.lumen_detector
dim = self.stage_map.g_dimension
ld.pxpermm = self.pxpermm
self.debug('Starting measure grain polygon')
masks = []
display_image = self.autocenter_manager.display_image
mask_dim = dim * 1.05
mask_dim_mm = mask_dim * self.pxpermm
ld.grain_measuring = True
while not evt.is_set():
src = self._get_preprocessed_src()
if src is not None:
targets = ld.find_targets(display_image, src, dim, mask=mask_dim,
search={'start_offset_scalar': 1})
if targets:
t = time.time()
targets = [(t, mask_dim_mm, ti.poly_points.tolist()) for ti in targets]
masks.extend(targets)
sleep(0.1)
ld.grain_measuring = False
self.grain_polygons = (m for m in masks)
self.debug('exiting measure grain')
self._measure_grain_t = QThread(target=_measure_grain_polygon)
self._measure_grain_t.start()
return True
def start_recording(self, path=None, use_dialog=False, basename='vm_recording', **kw):
"""
"""
directory = None
if os.path.sep in basename:
args = os.path.split(basename)
directory, basename = os.path.sep.join(args[:-1]), args[-1]
if path is None:
if use_dialog:
path = self.save_file_dialog()
else:
vd = self.video_archiver.root
self.debug('video archiver root {}'.format(vd))
if not vd:
vd = paths.video_dir
if directory:
vd = os.path.join(vd, directory)
if not os.path.isdir(vd):
os.mkdir(vd)
path = unique_path_from_manifest(vd, basename, extension='avi')
kw['path'] = path
kw['basename'] = basename
self._start_recording(**kw)
self.is_recording = True
return path
def stop_recording(self, user='remote', delay=None):
"""
"""
def close():
self.is_recording = False
self.info('stop video recording')
p = self.video.output_path
if self.video.stop_recording(wait=True):
if self.auto_upload:
try:
p = self._upload(p, inform=False)
except BaseException as e:
self.critical('Failed uploading {}. error={}'.format(p, e))
return p
if self.video.is_recording():
if delay:
t = Timer(delay, close)
t.start()
else:
return close()
@property
def video_configuration_path(self):
if self.configuration_dir_path:
return os.path.join(self.configuration_dir_path, 'camera.yaml')
def initialize_video(self):
if self.video:
identifier = 0
p = self.video_configuration_path
if os.path.isfile(p):
yd = yload(p)
vid = yd['Device']
identifier = vid.get('identifier', 0)
self.video.open(identifier=identifier)
self.video.load_configuration(p)
self.lumen_detector.pixel_depth = self.video.pixel_depth
def initialize_stage(self):
super(VideoStageManager, self).initialize_stage()
self.initialize_video()
# s = self.stage_controller
# if s.axes:
# xa = s.axes['x'].drive_ratio
# ya = s.axes['y'].drive_ratio
# self._drive_xratio = xa
# self._drive_yratio = ya
self._update_zoom(0)
def autocenter(self, *args, **kw):
return self._autocenter(*args, **kw)
def snapshot(self, path=None, name=None, auto=False, inform=True, return_blob=False,
pic_format='.jpg', include_raw=True):
"""
path: abs path to use
name: base name to use if auto saving in default dir
auto: force auto save
returns:
path: local abs path
upath: remote abs path
"""
if path is None:
if self.auto_save_snapshot or auto:
if name is None:
name = 'snapshot'
path = unique_path_from_manifest(paths.snapshot_dir, name, pic_format)
elif name is not None:
if not os.path.isdir(os.path.dirname(name)):
path = unique_path_from_manifest(paths.snapshot_dir, name, pic_format)
else:
path = name
else:
path = self.save_file_dialog()
if path:
self.info('saving snapshot {}'.format(path))
# play camera shutter sound
# play_sound('shutter')
if include_raw:
frame = self.video.get_cached_frame()
head, _ = os.path.splitext(path)
raw_path = '{}.tif'.format(head)
pil_save(frame, raw_path)
self._render_snapshot(path)
if self.auto_upload:
if include_raw:
self._upload(raw_path)
upath = self._upload(path, inform=inform)
if upath is None:
upath = ''
if inform:
if self.keep_local_copy:
self.information_dialog('Snapshot saved: "{}".\nUploaded : "{}"'.format(path, upath))
else:
self.information_dialog('Snapshot uploaded to "{}"'.format(upath))
else:
upath = None
if inform:
self.information_dialog('Snapshot saved to "{}"'.format(path))
if return_blob:
with open(path, 'rb') as rfile:
im = rfile.read()
return path, upath, im
else:
return path, upath
def kill(self):
"""
"""
super(VideoStageManager, self).kill()
if self.camera:
self.camera.save_calibration()
self.stop_timer = True
self.canvas.close_video()
if self.video:
self.video.close(force=True)
# if self.use_video_server:
# self.video_server.stop()
# if self._stage_maps:
# for s in self._stage_maps:
# s.dump_correction_file()
self.clean_video_archive()
def clean_video_archive(self):
if self.use_video_archiver:
self.info('Cleaning video directory')
self.video_archiver.clean(('manifest.yaml',))
def is_auto_correcting(self):
return self._auto_correcting
crop_width = 5
crop_height = 5
def get_scores(self, **kw):
ld = self.lumen_detector
src = self._get_preprocessed_src()
return ld.get_scores(src, **kw)
def find_lum_peak(self, min_distance, blur, **kw):
ld = self.lumen_detector
src = self._get_preprocessed_src()
dim = self.stage_map.g_dimension
mask_dim = dim * 1.05
# mask_dim_mm = mask_dim * self.pxpermm
if src is not None and src.ndim >= 2:
return ld.find_lum_peak(src, dim, mask_dim,
blur=blur,
min_distance=min_distance, **kw)
def get_brightness(self, **kw):
ld = self.lumen_detector
src = self._get_preprocessed_src()
dim = self.stage_map.g_dimension
return ld.get_value(src, dim, **kw)
# src = self.video.get_cached_frame()
# csrc = copy(src)
# src, v = ld.get_value(csrc, **kw)
# return csrc, src, v
def get_frame_size(self):
cw = 2 * self.crop_width * self.pxpermm
ch = 2 * self.crop_height * self.pxpermm
return cw, ch
def close_open_images(self):
if self.autocenter_manager:
self.autocenter_manager.close_open_images()
def finish_move_to_hole(self, user_entry):
self.debug('finish move to hole')
# if user_entry and not self.keep_images_open:
# self.close_open_images()
def get_preprocessed_src(self):
return self._get_preprocessed_src()
# private
def _get_preprocessed_src(self):
ld = self.lumen_detector
src = copy(self.video.get_cached_frame())
dim = self.stage_map.g_dimension
ld.pxpermm = self.pxpermm
offx, offy = self.canvas.get_screen_offset()
cropdim = dim * 2.5
if src is not None:
if len(src.shape):
src = ld.crop(src, cropdim, cropdim, offx, offy, verbose=False)
return src
def _stage_map_changed_hook(self):
self.lumen_detector.hole_radius = self.stage_map.g_dimension
def _upload(self, src, inform=True):
if not self.use_media_storage:
msg = 'Use Media Storage not enabled in Laser preferences'
if inform:
self.warning_dialog(msg)
else:
self.warning(msg)
else:
srv = 'pychron.media_storage.manager.MediaStorageManager'
msm = self.parent.application.get_service(srv)
if msm is not None:
d = os.path.split(os.path.dirname(src))[-1]
dest = os.path.join(self.parent.name, d,
os.path.basename(src))
msm.put(src, dest)
if not self.keep_local_copy:
self.debug('removing {}'.format(src))
if src.endswith('.avi'):
head, ext = os.path.splitext(src)
vd = '{}-images'.format(head)
self.debug('removing video build directory {}'.format(vd))
shutil.rmtree(vd)
os.remove(src)
dest = '{}/{}'.format(msm.get_base_url(), dest)
return dest
else:
msg = 'Media Storage Plugin not enabled'
if inform:
self.warning_dialog(msg)
else:
self.warning(msg)
def _render_snapshot(self, path):
from chaco.plot_graphics_context import PlotGraphicsContext
c = self.canvas
p = None
was_visible = False
if not self.render_with_markup:
p = c.show_laser_position
c.show_laser_position = False
if self.points_programmer.is_visible:
c.hide_all()
was_visible = True
gc = PlotGraphicsContext((int(c.outer_width), int(c.outer_height)))
c.do_layout()
gc.render_component(c)
# gc.save(path)
from pychron.core.helpers import save_gc
save_gc.save(gc, path)
if p is not None:
c.show_laser_position = p
if was_visible:
c.show_all()
def _start_recording(self, path, basename):
self.info('start video recording {}'.format(path))
d = os.path.dirname(path)
if not os.path.isdir(d):
self.warning('invalid directory {}'.format(d))
self.warning('using default directory')
path, _ = unique_path(paths.video_dir, basename,
extension='avi')
self.info('saving recording to path {}'.format(path))
# if self.use_db:
# db = self.get_video_database()
# db.connect()
#
# v = db.add_video_record(rid=basename)
# db.add_path(v, path)
# self.info('saving {} to database'.format(basename))
# db.commit()
video = self.video
crop_to_hole = True
dim = self.stage_map.g_dimension
cropdim = dim * 8 * self.pxpermm
color = self.canvas.crosshairs_color.getRgb()[:3]
r = int(self.canvas.get_crosshairs_radius() * self.pxpermm)
# offx, offy = self.canvas.get_screen_offset()
def renderer(p):
# cw, ch = self.get_frame_size()
frame = video.get_cached_frame()
if frame is not None:
if not len(frame.shape):
return
frame = copy(frame)
# ch, cw, _ = frame.shape
# ch, cw = int(ch), int(cw)
if crop_to_hole:
frame = video.crop(frame, 0, 0, cropdim, cropdim)
if self.render_with_markup:
# draw crosshairs
if len(frame.shape) == 2:
frame = gray2rgb(frame)
ch, cw, _ = frame.shape
ch, cw = int(ch), int(cw)
y = ch // 2
x = cw // 2
cp = circle_perimeter(y, x, r, shape=(ch, cw))
frame[cp] = color
frame[line(y, 0, y, x - r)] = color # left
frame[line(y, x + r, y, int(cw) - 1)] = color # right
frame[line(0, x, y - r, x)] = color # bottom
frame[line(y + r, x, int(ch) - 1, x)] = color # top
if frame is not None:
pil_save(frame, p)
self.video.start_recording(path, renderer)
def _move_to_hole_hook(self, holenum, correct, autocentered_position):
args = holenum, correct, autocentered_position
self.debug('move to hole hook holenum={}, '
'correct={}, autocentered_position={}'.format(*args))
if correct:
ntries = 1 if autocentered_position else 3
self._auto_correcting = True
try:
self._autocenter(holenum=holenum, ntries=ntries, save=True)
except BaseException as e:
self.critical('Autocentering failed. {}'.format(e))
self._auto_correcting = False
# def find_center(self):
# ox, oy = self.canvas.get_screen_offset()
# rpos, src = self.autocenter_manager.calculate_new_center(
# self.stage_controller.x,
# self.stage_controller.y,
# ox, oy,
# dim=self.stage_map.g_dimension, open_image=False)
#
# return rpos, src
# def find_target(self):
# if self.video:
# ox, oy = self.canvas.get_screen_offset()
# src = self.video.get_cached_frame()
#
# ch = cw = self.pxpermm * self.stage_map.g_dimension * 2.5
# src = self.video.crop(src, ox, oy, cw, ch)
# return self.lumen_detector.find_target(src)
#
# def find_best_target(self):
# if self.video:
# src = self.video.get_cached_frame()
# src = self.autocenter_manager.crop(src)
# return self.lumen_detector.find_best_target(src)
def _autocenter(self, holenum=None, ntries=3, save=False, inform=False):
self.debug('do autocenter')
rpos = None
interp = False
sm = self.stage_map
st = time.time()
if self.autocenter_manager.use_autocenter:
time.sleep(0.1)
dim = sm.g_dimension
shape = sm.g_shape
if holenum is not None:
hole = sm.get_hole(holenum)
if hole is not None:
dim = hole.dimension
shape = hole.shape
ox, oy = self.canvas.get_screen_offset()
for ti in range(max(1, ntries)):
# use machine vision to calculate positioning error
rpos = self.autocenter_manager.calculate_new_center(
self.stage_controller.x,
self.stage_controller.y,
ox, oy,
dim=dim,
shape=shape)
if rpos is not None:
self.linear_move(*rpos, block=True,
source='autocenter',
use_calibration=False,
update_hole=False,
velocity_scalar=0.1)
time.sleep(0.1)
else:
self.snapshot(auto=True,
name='pos_err_{}_{}'.format(holenum, ti),
inform=inform)
break
# if use_interpolation and rpos is None:
# self.info('trying to get interpolated position')
# rpos = sm.get_interpolated_position(holenum)
# if rpos:
# s = '{:0.3f},{:0.3f}'
# interp = True
# else:
# s = 'None'
# self.info('interpolated position= {}'.format(s))
if rpos:
corrected = True
# add an adjustment value to the stage map
if save and holenum is not None:
sm.set_hole_correction(holenum, *rpos)
sm.dump_correction_file()
# f = 'interpolation' if interp else 'correction'
else:
# f = 'uncorrected'
corrected = False
if holenum is not None:
hole = sm.get_hole(holenum)
if hole:
rpos = hole.nominal_position
self.debug('Autocenter duration ={}'.format(time.time() - st))
return rpos, corrected, interp
# ===============================================================================
# views
# ===============================================================================
# ===============================================================================
# view groups
# ===============================================================================
# ===============================================================================
# handlers
# ===============================================================================
def _configure_camera_device_button_fired(self):
if self.video:
self.video.load_configuration(self.video_configuration_path)
if hasattr(self.video.cap, 'reload_configuration'):
self.video.cap.reload_configuration(self.video_configuration_path)
self.lumen_detector.pixel_depth = self.video.pixel_depth
def _update_zoom(self, v):
if self.camera:
self._update_xy_limits()
@on_trait_change('parent:motor_event')
def _update_motor(self, new):
print('motor event', new, self.canvas, self.canvas.camera)
# s = self.stage_controller
if self.camera:
if not isinstance(new, (int, float)):
args, _ = new
name, v = args[:2]
else:
name = 'zoom'
v = new
if name == 'zoom':
self._update_xy_limits()
# pxpermm = self.canvas.camera.set_limits_by_zoom(v, s.x, s.y)
# self.pxpermm = pxpermm
elif name == 'beam':
self.lumen_detector.beam_radius = v / 2.0
def _pxpermm_changed(self, new):
if self.autocenter_manager:
self.autocenter_manager.pxpermm = new
self.lumen_detector.pxpermm = new
# self.lumen_detector.mask_radius = new*self.stage_map.g_dimension
def _autocenter_button_fired(self):
self.goto_position(self.calibrated_position_entry, autocenter_only=True)
# def _configure_autocenter_button_fired(self):
# info = self.autocenter_manager.edit_traits(view='configure_view',
# kind='livemodal')
# if info.result:
# self.autocenter_manager.dump_detector()
def _snapshot_button_fired(self):
n = 1
if self.snapshot_mode == '3 Burst':
n = 3
elif self.snapshot_mode == '10 Burst':
n = 10
bd = self.burst_delay * 0.001
delay = n > 1
for i in range(n):
st = time.time()
self.snapshot(inform=False)
if delay:
time.sleep(max(0, bd - time.time() + st))
def _record_fired(self):
# time.sleep(4)
# self.stop_recording()
if self.is_recording:
self.stop_recording()
else:
self.start_recording()
def _use_video_server_changed(self):
if self.use_video_server:
self.video_server.start()
else:
self.video_server.stop()
def _get_camera_zoom_coefficients(self):
return self.camera.zoom_coefficients
def _set_camera_zoom_coefficients(self, v):
self.camera.zoom_coefficients = ','.join(map(str, v))
self._update_xy_limits()
def _validate_camera_zoom_coefficients(self, v):
try:
return list(map(float, v.split(',')))
except ValueError:
pass
def _update_xy_limits(self):
z = 0
if self.parent is not None:
zoom = self.parent.get_motor('zoom')
if zoom is not None:
z = zoom.data_position
x = self.stage_controller.get_current_position('x')
y = self.stage_controller.get_current_position('y')
if self.camera:
pxpermm = self.camera.set_limits_by_zoom(z, x, y, self.canvas)
self.pxpermm = pxpermm
self.debug('updated xy limits zoom={}, pxpermm={}'.format(z, pxpermm))
self.canvas.request_redraw()
def _get_record_label(self):
return 'Start Recording' if not self.is_recording else 'Stop'
# ===============================================================================
# factories
# ===============================================================================
def _canvas_factory(self):
"""
"""
v = VideoLaserTrayCanvas(stage_manager=self,
padding=30)
return v
def _canvas_editor_factory(self):
e = super(VideoStageManager, self)._canvas_editor_factory()
e.stop_timer = 'stop_timer'
return e
# ===============================================================================
# defaults
# ===============================================================================
def _camera_default(self):
klass = YamlCamera
# p = os.path.join(self.configuration_dir_path, 'camera.yaml')
p = self.video_configuration_path
if p is not None:
if not os.path.isfile(p):
klass = Camera
pp = os.path.join(self.configuration_dir_path, 'camera.cfg')
if not os.path.isfile(pp):
self.warning_dialog('No Camera configuration file a {} or {}'.format(p, pp))
p = pp
camera = klass()
camera.load(p)
else:
camera = Camera()
camera.set_limits_by_zoom(0, 0, 0, self.canvas)
self._camera_zoom_coefficients = camera.zoom_coefficients
return camera
def _lumen_detector_default(self):
ld = LumenDetector()
ld.pixel_depth = self.video.pixel_depth
return ld
def _video_default(self):
v = Video()
self.canvas.video = v
return v
def _video_server_default(self):
from pychron.image.video_server import VideoServer
return VideoServer(video=self.video)
def _video_archiver_default(self):
from pychron.core.helpers.archiver import Archiver
return Archiver()
def _autocenter_manager_default(self):
if self.parent.mode != 'client':
# from pychron.mv.autocenter_manager import AutoCenterManager
if 'co2' in self.parent.name.lower():
from pychron.mv.autocenter_manager import CO2AutocenterManager
klass = CO2AutocenterManager
else:
from pychron.mv.autocenter_manager import DiodeAutocenterManager
klass = DiodeAutocenterManager
return klass(video=self.video,
canvas=self.canvas,
application=self.application)
def _autofocus_manager_default(self):
if self.parent.mode != 'client':
from pychron.mv.focus.autofocus_manager import AutoFocusManager
return AutoFocusManager(video=self.video,
laser_manager=self.parent,
stage_controller=self.stage_controller,
canvas=self.canvas,
application=self.application)
# def _zoom_calibration_manager_default(self):
# if self.parent.mode != 'client':
# from pychron.mv.zoom.zoom_calibration import ZoomCalibrationManager
# return ZoomCalibrationManager(laser_manager=self.parent)
# ===============================================================================
# calcualte camera params
# ===============================================================================
# def _calculate_indicator_positions(self, shift=None):
# ccm = self.camera_calibration_manager
#
# zoom = self.parent.zoom
# pychron, name = self.video_manager.snapshot(identifier=zoom)
# ccm.image_factory(pychron=pychron)
#
# ccm.process_image()
# ccm.title = name
#
# cond = Condition()
# ccm.cond = cond
# cond.acquire()
# do_later(ccm.edit_traits, view='snapshot_view')
# if shift:
# self.stage_controller.linear_move(*shift, block=False)
#
# cond.wait()
# cond.release()
#
# def _calculate_camera_parameters(self):
# ccm = self.camera_calibration_manager
# self._calculate_indicator_positions()
# if ccm.result:
# if self.calculate_offsets:
# rdxmm = 5
# rdymm = 5
#
# x = self.stage_controller.x + rdxmm
# y = self.stage_controller.y + rdymm
# self.stage_controller.linear_move(x, y, block=True)
#
# time.sleep(2)
#
# polygons1 = ccm.polygons
# x = self.stage_controller.x - rdxmm
# y = self.stage_controller.y - rdymm
# self._calculate_indicator_positions(shift=(x, y))
#
# polygons2 = ccm.polygons
#
# # compare polygon sets
# # calculate pixel displacement
# dxpx = sum([sum([(pts1.x - pts2.x)
# for pts1, pts2 in zip(p1.points, p2.points)]) / len(p1.points)
# for p1, p2 in zip(polygons1, polygons2)]) / len(polygons1)
# dypx = sum([sum([(pts1.y - pts2.y)
# for pts1, pts2 in zip(p1.points, p2.points)]) / len(p1.points)
# for p1, p2 in zip(polygons1, polygons2)]) / len(polygons1)
#
# # convert pixel displacement to mm using defined mapping
# dxmm = dxpx / self.pxpercmx
# dymm = dypx / self.pxpercmy
#
# # calculate drive offset. ratio of request/actual
# try:
# self.drive_xratio = rdxmm / dxmm
# self.drive_yratio = rdymm / dymm
# except ZeroDivisionError:
# self.drive_xratio = 100
#
# def _calibration_manager_default(self):
#
# # self.video.open(user = 'calibration')
# return CalibrationManager(parent = self,
# laser_manager = self.parent,
# video_manager = self.video_manager,
# )
# ============= EOF ====================================
# adxs = []
# adys = []
# for p1, p2 in zip(polygons, polygons2):
# # dxs = []
# # dys = []
# # for pts1, pts2 in zip(p1.points, p2.points):
# #
# # dx = pts1.x - pts2.x
# # dy = pts1.y - pts2.y
# # dxs.append(dx)
# # dys.append(dy)
# # dxs = [(pts1.x - pts2.x) for pts1, pts2 in zip(p1.points, p2.points)]
# # dys = [(pts1.y - pts2.y) for pts1, pts2 in zip(p1.points, p2.points)]
# #
# adx = sum([(pts1.x - pts2.x) for pts1, pts2 in zip(p1.points, p2.points)]) / len(p1.points)
# ady = sum([(pts1.y - pts2.y) for pts1, pts2 in zip(p1.points, p2.points)]) / len(p1.points)
#
# # adx = sum(dxs) / len(dxs)
# # ady = sum(dys) / len(dys)
# adxs.append(adx)
# adys.append(ady)
# print 'xffset', sum(adxs) / len(adxs)
# print 'yffset', sum(adys) / len(adys)
| apache-2.0 |
michaelkirk/QGIS | python/plugins/processing/gui/NumberInputDialog.py | 4 | 6245 | # -*- coding: utf-8 -*-
"""
***************************************************************************
NumberInputDialog.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4 import uic
from PyQt4.QtGui import QDialog, QTreeWidgetItem, QMessageBox
from qgis.core import QgsRasterLayer
from qgis.utils import iface
from processing.tools import dataobjects
pluginPath = os.path.split(os.path.dirname(__file__))[0]
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'DlgNumberInput.ui'))
class NumberInputDialog(BASE, WIDGET):
def __init__(self, isInteger):
super(NumberInputDialog, self).__init__(None)
self.setupUi(self)
if hasattr(self.leFormula, 'setPlaceholderText'):
self.leFormula.setPlaceholderText(
self.tr('[Enter your formula here]'))
self.treeValues.doubleClicked.connect(self.addValue)
self.value = None
self.isInteger = isInteger
if not self.isInteger:
self.lblWarning.hide()
self.fillTree()
def fillTree(self):
layersItem = QTreeWidgetItem()
layersItem.setText(0, self.tr('Values from data layers extents'))
self.treeValues.addTopLevelItem(layersItem)
layers = dataobjects.getAllLayers()
for layer in layers:
layerItem = QTreeWidgetItem()
layerItem.setText(0, unicode(layer.name()))
layerItem.addChild(TreeValueItem(self.tr('Min X'),
layer.extent().xMinimum()))
layerItem.addChild(TreeValueItem(self.tr('Max X'),
layer.extent().xMaximum()))
layerItem.addChild(TreeValueItem(self.tr('Min Y'),
layer.extent().yMinimum()))
layerItem.addChild(TreeValueItem(self.tr('Max Y'),
layer.extent().yMaximum()))
if isinstance(layer, QgsRasterLayer):
cellsize = (layer.extent().xMaximum()
- layer.extent().xMinimum()) / layer.width()
layerItem.addChild(TreeValueItem(self.tr('Cellsize'),
cellsize))
layersItem.addChild(layerItem)
layersItem = QTreeWidgetItem()
layersItem.setText(0, self.tr('Values from raster layers statistics'))
self.treeValues.addTopLevelItem(layersItem)
layers = dataobjects.getRasterLayers()
for layer in layers:
for i in range(layer.bandCount()):
stats = layer.dataProvider().bandStatistics(i + 1)
layerItem = QTreeWidgetItem()
layerItem.setText(0, unicode(layer.name()))
layerItem.addChild(TreeValueItem(self.tr('Mean'), stats.mean))
layerItem.addChild(TreeValueItem(self.tr('Std. deviation'),
stats.stdDev))
layerItem.addChild(TreeValueItem(self.tr('Max value'),
stats.maximumValue))
layerItem.addChild(TreeValueItem(self.tr('Min value'),
stats.minimumValue))
layersItem.addChild(layerItem)
canvasItem = QTreeWidgetItem()
canvasItem.setText(0, self.tr('Values from QGIS map canvas'))
self.treeValues.addTopLevelItem(canvasItem)
extent = iface.mapCanvas().extent()
extentItem = QTreeWidgetItem()
extentItem.setText(0, self.tr('Current extent'))
extentItem.addChild(TreeValueItem(self.tr('Min X'), extent.xMinimum()))
extentItem.addChild(TreeValueItem(self.tr('Max X'), extent.xMaximum()))
extentItem.addChild(TreeValueItem(self.tr('Min Y'), extent.yMinimum()))
extentItem.addChild(TreeValueItem(self.tr('Max Y'), extent.yMaximum()))
canvasItem.addChild(extentItem)
extent = iface.mapCanvas().fullExtent()
extentItem = QTreeWidgetItem()
extentItem.setText(0,
self.tr('Full extent of all layers in map canvas'))
extentItem.addChild(TreeValueItem(self.tr('Min X'), extent.xMinimum()))
extentItem.addChild(TreeValueItem(self.tr('Max X'), extent.xMaximum()))
extentItem.addChild(TreeValueItem(self.tr('Min Y'), extent.yMinimum()))
extentItem.addChild(TreeValueItem(self.tr('Max Y'), extent.yMaximum()))
canvasItem.addChild(extentItem)
def addValue(self):
item = self.treeValues.currentItem()
if isinstance(item, TreeValueItem):
formula = self.leFormula.text() + ' ' + str(item.value)
self.leFormula.setText(formula.strip())
def accept(self):
try:
self.value = float(eval(str(self.leFormula.text())))
if self.isInteger:
self.value = int(round(self.value))
QDialog.accept(self)
except:
QMessageBox.critical(self, self.tr('Wrong expression'),
self.tr('The expression entered is not correct'))
def reject(self):
self.value = None
QDialog.reject(self)
class TreeValueItem(QTreeWidgetItem):
def __init__(self, name, value):
QTreeWidgetItem.__init__(self)
self.value = value
self.setText(0, name + ': ' + str(value))
| gpl-2.0 |
99cloud/keystone_register | openstack_dashboard/dashboards/project/access_and_security/security_groups/forms.py | 1 | 12028 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core import validators
from django.core.urlresolvers import reverse
from django.forms import ValidationError
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon.utils.validators import validate_port_range
from horizon.utils import fields
from openstack_dashboard import api
class CreateGroup(forms.SelfHandlingForm):
name = forms.CharField(label=_("Name"),
error_messages={
'required': _('This field is required.'),
'invalid': _("The string may only contain"
" ASCII characters and numbers.")},
validators=[validators.validate_slug])
description = forms.CharField(label=_("Description"))
def handle(self, request, data):
try:
sg = api.nova.security_group_create(request,
data['name'],
data['description'])
messages.success(request,
_('Successfully created security group: %s')
% data['name'])
return sg
except:
redirect = reverse("horizon:project:access_and_security:index")
exceptions.handle(request,
_('Unable to create security group.'),
redirect=redirect)
class AddRule(forms.SelfHandlingForm):
id = forms.IntegerField(widget=forms.HiddenInput())
ip_protocol = forms.ChoiceField(label=_('IP Protocol'),
choices=[('tcp', _('TCP')),
('udp', _('UDP')),
('icmp', _('ICMP'))],
help_text=_("The protocol which this "
"rule should be applied to."),
widget=forms.Select(attrs={
'class': 'switchable',
'data-slug': 'protocol'}))
port_or_range = forms.ChoiceField(label=_('Open'),
choices=[('port', _('Port')),
('range', _('Port Range'))],
widget=forms.Select(attrs={
'class': 'switchable switched',
'data-slug': 'range',
'data-switch-on': 'protocol',
'data-protocol-tcp': _('Open'),
'data-protocol-udp': _('Open')}))
port = forms.IntegerField(label=_("Port"),
required=False,
help_text=_("Enter an integer value "
"between 1 and 65535."),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'range',
'data-range-port': _('Port')}),
validators=[validate_port_range])
from_port = forms.IntegerField(label=_("From Port"),
required=False,
help_text=_("Enter an integer value "
"between 1 and 65535."),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'range',
'data-range-range': _('From Port')}),
validators=[validate_port_range])
to_port = forms.IntegerField(label=_("To Port"),
required=False,
help_text=_("Enter an integer value "
"between 1 and 65535."),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'range',
'data-range-range': _('To Port')}),
validators=[validate_port_range])
icmp_type = forms.IntegerField(label=_("Type"),
required=False,
help_text=_("Enter a value for ICMP type "
"in the range (-1: 255)"),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'protocol',
'data-protocol-icmp': _('Type')}),
validators=[validate_port_range])
icmp_code = forms.IntegerField(label=_("Code"),
required=False,
help_text=_("Enter a value for ICMP code "
"in the range (-1: 255)"),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'protocol',
'data-protocol-icmp': _('Code')}),
validators=[validate_port_range])
source = forms.ChoiceField(label=_('Source'),
choices=[('cidr', _('CIDR')),
('sg', _('Security Group'))],
help_text=_('To specify an allowed IP '
'range, select "CIDR". To '
'allow access from all '
'members of another security '
'group select "Security '
'Group".'),
widget=forms.Select(attrs={
'class': 'switchable',
'data-slug': 'source'}))
cidr = fields.IPField(label=_("CIDR"),
required=False,
initial="0.0.0.0/0",
help_text=_("Classless Inter-Domain Routing "
"(e.g. 192.168.0.0/24)"),
version=fields.IPv4 | fields.IPv6,
mask=True,
widget=forms.TextInput(
attrs={'class': 'switched',
'data-switch-on': 'source',
'data-source-cidr': _('CIDR')}))
security_group = forms.ChoiceField(label=_('Security Group'),
required=False,
widget=forms.Select(attrs={
'class': 'switched',
'data-switch-on': 'source',
'data-source-sg': _('Security '
'Group')}))
def __init__(self, *args, **kwargs):
sg_list = kwargs.pop('sg_list', [])
super(AddRule, self).__init__(*args, **kwargs)
# Determine if there are security groups available for the
# source group option; add the choices and enable the option if so.
if sg_list:
security_groups_choices = sg_list
else:
security_groups_choices = [("", _("No security groups available"))]
self.fields['security_group'].choices = security_groups_choices
def clean(self):
cleaned_data = super(AddRule, self).clean()
ip_proto = cleaned_data.get('ip_protocol')
port_or_range = cleaned_data.get("port_or_range")
source = cleaned_data.get("source")
icmp_type = cleaned_data.get("icmp_type", None)
icmp_code = cleaned_data.get("icmp_code", None)
from_port = cleaned_data.get("from_port", None)
to_port = cleaned_data.get("to_port", None)
port = cleaned_data.get("port", None)
if ip_proto == 'icmp':
if icmp_type is None:
msg = _('The ICMP type is invalid.')
raise ValidationError(msg)
if icmp_code is None:
msg = _('The ICMP code is invalid.')
raise ValidationError(msg)
if icmp_type not in xrange(-1, 256):
msg = _('The ICMP type not in range (-1, 255)')
raise ValidationError(msg)
if icmp_code not in xrange(-1, 256):
msg = _('The ICMP code not in range (-1, 255)')
raise ValidationError(msg)
cleaned_data['from_port'] = icmp_type
cleaned_data['to_port'] = icmp_code
else:
if port_or_range == "port":
cleaned_data["from_port"] = port
cleaned_data["to_port"] = port
if port is None:
msg = _('The specified port is invalid.')
raise ValidationError(msg)
else:
if from_port is None:
msg = _('The "from" port number is invalid.')
raise ValidationError(msg)
if to_port is None:
msg = _('The "to" port number is invalid.')
raise ValidationError(msg)
if to_port < from_port:
msg = _('The "to" port number must be greater than '
'or equal to the "from" port number.')
raise ValidationError(msg)
if source == "cidr":
cleaned_data['security_group'] = None
else:
cleaned_data['cidr'] = None
return cleaned_data
def handle(self, request, data):
try:
rule = api.nova.security_group_rule_create(
request,
data['id'],
data['ip_protocol'],
data['from_port'],
data['to_port'],
data['cidr'],
data['security_group'])
messages.success(request,
_('Successfully added rule: %s') % unicode(rule))
return rule
except:
redirect = reverse("horizon:project:access_and_security:"
"security_groups:detail", args=[data['id']])
exceptions.handle(request,
_('Unable to add rule to security group.'),
redirect=redirect)
| apache-2.0 |
tvalacarta/tvalacarta | python/main-classic/lib/youtube_dl/extractor/vimeo.py | 1 | 46584 | # coding: utf-8
from __future__ import unicode_literals
import base64
import functools
import json
import re
import itertools
from .common import InfoExtractor
from ..compat import (
compat_kwargs,
compat_HTTPError,
compat_str,
compat_urlparse,
)
from ..utils import (
clean_html,
determine_ext,
dict_get,
ExtractorError,
js_to_json,
int_or_none,
merge_dicts,
OnDemandPagedList,
parse_filesize,
RegexNotFoundError,
sanitized_Request,
smuggle_url,
std_headers,
str_or_none,
try_get,
unified_timestamp,
unsmuggle_url,
urlencode_postdata,
unescapeHTML,
)
class VimeoBaseInfoExtractor(InfoExtractor):
_NETRC_MACHINE = 'vimeo'
_LOGIN_REQUIRED = False
_LOGIN_URL = 'https://vimeo.com/log_in'
def _login(self):
username, password = self._get_login_info()
if username is None:
if self._LOGIN_REQUIRED:
raise ExtractorError('No login info available, needed for using %s.' % self.IE_NAME, expected=True)
return
webpage = self._download_webpage(
self._LOGIN_URL, None, 'Downloading login page')
token, vuid = self._extract_xsrft_and_vuid(webpage)
data = {
'action': 'login',
'email': username,
'password': password,
'service': 'vimeo',
'token': token,
}
self._set_vimeo_cookie('vuid', vuid)
try:
self._download_webpage(
self._LOGIN_URL, None, 'Logging in',
data=urlencode_postdata(data), headers={
'Content-Type': 'application/x-www-form-urlencoded',
'Referer': self._LOGIN_URL,
})
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 418:
raise ExtractorError(
'Unable to log in: bad username or password',
expected=True)
raise ExtractorError('Unable to log in')
def _verify_video_password(self, url, video_id, webpage):
password = self._downloader.params.get('videopassword')
if password is None:
raise ExtractorError('This video is protected by a password, use the --video-password option', expected=True)
token, vuid = self._extract_xsrft_and_vuid(webpage)
data = urlencode_postdata({
'password': password,
'token': token,
})
if url.startswith('http://'):
# vimeo only supports https now, but the user can give an http url
url = url.replace('http://', 'https://')
password_request = sanitized_Request(url + '/password', data)
password_request.add_header('Content-Type', 'application/x-www-form-urlencoded')
password_request.add_header('Referer', url)
self._set_vimeo_cookie('vuid', vuid)
return self._download_webpage(
password_request, video_id,
'Verifying the password', 'Wrong password')
def _extract_xsrft_and_vuid(self, webpage):
xsrft = self._search_regex(
r'(?:(?P<q1>["\'])xsrft(?P=q1)\s*:|xsrft\s*[=:])\s*(?P<q>["\'])(?P<xsrft>.+?)(?P=q)',
webpage, 'login token', group='xsrft')
vuid = self._search_regex(
r'["\']vuid["\']\s*:\s*(["\'])(?P<vuid>.+?)\1',
webpage, 'vuid', group='vuid')
return xsrft, vuid
def _extract_vimeo_config(self, webpage, video_id, *args, **kwargs):
vimeo_config = self._search_regex(
r'vimeo\.config\s*=\s*(?:({.+?})|_extend\([^,]+,\s+({.+?})\));',
webpage, 'vimeo config', *args, **compat_kwargs(kwargs))
if vimeo_config:
return self._parse_json(vimeo_config, video_id)
def _set_vimeo_cookie(self, name, value):
self._set_cookie('vimeo.com', name, value)
def _vimeo_sort_formats(self, formats):
# Bitrates are completely broken. Single m3u8 may contain entries in kbps and bps
# at the same time without actual units specified. This lead to wrong sorting.
self._sort_formats(formats, field_preference=('preference', 'height', 'width', 'fps', 'tbr', 'format_id'))
def _parse_config(self, config, video_id):
video_data = config['video']
video_title = video_data['title']
live_event = video_data.get('live_event') or {}
is_live = live_event.get('status') == 'started'
formats = []
config_files = video_data.get('files') or config['request'].get('files', {})
for f in config_files.get('progressive', []):
video_url = f.get('url')
if not video_url:
continue
formats.append({
'url': video_url,
'format_id': 'http-%s' % f.get('quality'),
'width': int_or_none(f.get('width')),
'height': int_or_none(f.get('height')),
'fps': int_or_none(f.get('fps')),
'tbr': int_or_none(f.get('bitrate')),
})
# TODO: fix handling of 308 status code returned for live archive manifest requests
for files_type in ('hls', 'dash'):
for cdn_name, cdn_data in config_files.get(files_type, {}).get('cdns', {}).items():
manifest_url = cdn_data.get('url')
if not manifest_url:
continue
format_id = '%s-%s' % (files_type, cdn_name)
if files_type == 'hls':
formats.extend(self._extract_m3u8_formats(
manifest_url, video_id, 'mp4',
'm3u8' if is_live else 'm3u8_native', m3u8_id=format_id,
note='Downloading %s m3u8 information' % cdn_name,
fatal=False))
elif files_type == 'dash':
mpd_pattern = r'/%s/(?:sep/)?video/' % video_id
mpd_manifest_urls = []
if re.search(mpd_pattern, manifest_url):
for suffix, repl in (('', 'video'), ('_sep', 'sep/video')):
mpd_manifest_urls.append((format_id + suffix, re.sub(
mpd_pattern, '/%s/%s/' % (video_id, repl), manifest_url)))
else:
mpd_manifest_urls = [(format_id, manifest_url)]
for f_id, m_url in mpd_manifest_urls:
if 'json=1' in m_url:
real_m_url = (self._download_json(m_url, video_id, fatal=False) or {}).get('url')
if real_m_url:
m_url = real_m_url
mpd_formats = self._extract_mpd_formats(
m_url.replace('/master.json', '/master.mpd'), video_id, f_id,
'Downloading %s MPD information' % cdn_name,
fatal=False)
for f in mpd_formats:
if f.get('vcodec') == 'none':
f['preference'] = -50
elif f.get('acodec') == 'none':
f['preference'] = -40
formats.extend(mpd_formats)
live_archive = live_event.get('archive') or {}
live_archive_source_url = live_archive.get('source_url')
if live_archive_source_url and live_archive.get('status') == 'done':
formats.append({
'format_id': 'live-archive-source',
'url': live_archive_source_url,
'preference': 1,
})
subtitles = {}
text_tracks = config['request'].get('text_tracks')
if text_tracks:
for tt in text_tracks:
subtitles[tt['lang']] = [{
'ext': 'vtt',
'url': 'https://vimeo.com' + tt['url'],
}]
thumbnails = []
if not is_live:
for key, thumb in video_data.get('thumbs', {}).items():
thumbnails.append({
'id': key,
'width': int_or_none(key),
'url': thumb,
})
thumbnail = video_data.get('thumbnail')
if thumbnail:
thumbnails.append({
'url': thumbnail,
})
owner = video_data.get('owner') or {}
video_uploader_url = owner.get('url')
return {
'id': str_or_none(video_data.get('id')) or video_id,
'title': self._live_title(video_title) if is_live else video_title,
'uploader': owner.get('name'),
'uploader_id': video_uploader_url.split('/')[-1] if video_uploader_url else None,
'uploader_url': video_uploader_url,
'thumbnails': thumbnails,
'duration': int_or_none(video_data.get('duration')),
'formats': formats,
'subtitles': subtitles,
'is_live': is_live,
}
def _extract_original_format(self, url, video_id):
download_data = self._download_json(
url, video_id, fatal=False,
query={'action': 'load_download_config'},
headers={'X-Requested-With': 'XMLHttpRequest'})
if download_data:
source_file = download_data.get('source_file')
if isinstance(source_file, dict):
download_url = source_file.get('download_url')
if download_url and not source_file.get('is_cold') and not source_file.get('is_defrosting'):
source_name = source_file.get('public_name', 'Original')
if self._is_valid_url(download_url, video_id, '%s video' % source_name):
ext = (try_get(
source_file, lambda x: x['extension'],
compat_str) or determine_ext(
download_url, None) or 'mp4').lower()
return {
'url': download_url,
'ext': ext,
'width': int_or_none(source_file.get('width')),
'height': int_or_none(source_file.get('height')),
'filesize': parse_filesize(source_file.get('size')),
'format_id': source_name,
'preference': 1,
}
class VimeoIE(VimeoBaseInfoExtractor):
"""Information extractor for vimeo.com."""
# _VALID_URL matches Vimeo URLs
_VALID_URL = r'''(?x)
https?://
(?:
(?:
www|
player
)
\.
)?
vimeo(?:pro)?\.com/
(?!(?:channels|album|showcase)/[^/?#]+/?(?:$|[?#])|[^/]+/review/|ondemand/)
(?:.*?/)?
(?:
(?:
play_redirect_hls|
moogaloop\.swf)\?clip_id=
)?
(?:videos?/)?
(?P<id>[0-9]+)
(?:/[\da-f]+)?
/?(?:[?&].*)?(?:[#].*)?$
'''
IE_NAME = 'vimeo'
_TESTS = [
{
'url': 'http://vimeo.com/56015672#at=0',
'md5': '8879b6cc097e987f02484baf890129e5',
'info_dict': {
'id': '56015672',
'ext': 'mp4',
'title': "youtube-dl test video - \u2605 \" ' \u5e78 / \\ \u00e4 \u21ad \U0001d550",
'description': 'md5:2d3305bad981a06ff79f027f19865021',
'timestamp': 1355990239,
'upload_date': '20121220',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user7108434',
'uploader_id': 'user7108434',
'uploader': 'Filippo Valsorda',
'duration': 10,
'license': 'by-sa',
},
'params': {
'format': 'best[protocol=https]',
},
},
{
'url': 'http://vimeopro.com/openstreetmapus/state-of-the-map-us-2013/video/68093876',
'md5': '3b5ca6aa22b60dfeeadf50b72e44ed82',
'note': 'Vimeo Pro video (#1197)',
'info_dict': {
'id': '68093876',
'ext': 'mp4',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/openstreetmapus',
'uploader_id': 'openstreetmapus',
'uploader': 'OpenStreetMap US',
'title': 'Andy Allan - Putting the Carto into OpenStreetMap Cartography',
'description': 'md5:2c362968038d4499f4d79f88458590c1',
'duration': 1595,
'upload_date': '20130610',
'timestamp': 1370893156,
},
'params': {
'format': 'best[protocol=https]',
},
},
{
'url': 'http://player.vimeo.com/video/54469442',
'md5': '619b811a4417aa4abe78dc653becf511',
'note': 'Videos that embed the url in the player page',
'info_dict': {
'id': '54469442',
'ext': 'mp4',
'title': 'Kathy Sierra: Building the minimum Badass User, Business of Software 2012',
'uploader': 'The BLN & Business of Software',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/theblnbusinessofsoftware',
'uploader_id': 'theblnbusinessofsoftware',
'duration': 3610,
'description': None,
},
'params': {
'format': 'best[protocol=https]',
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
'url': 'http://vimeo.com/68375962',
'md5': 'aaf896bdb7ddd6476df50007a0ac0ae7',
'note': 'Video protected with password',
'info_dict': {
'id': '68375962',
'ext': 'mp4',
'title': 'youtube-dl password protected test video',
'timestamp': 1371200155,
'upload_date': '20130614',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user18948128',
'uploader_id': 'user18948128',
'uploader': 'Jaime Marquínez Ferrándiz',
'duration': 10,
'description': 'md5:dca3ea23adb29ee387127bc4ddfce63f',
},
'params': {
'format': 'best[protocol=https]',
'videopassword': 'youtube-dl',
},
},
{
'url': 'http://vimeo.com/channels/keypeele/75629013',
'md5': '2f86a05afe9d7abc0b9126d229bbe15d',
'info_dict': {
'id': '75629013',
'ext': 'mp4',
'title': 'Key & Peele: Terrorist Interrogation',
'description': 'md5:8678b246399b070816b12313e8b4eb5c',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/atencio',
'uploader_id': 'atencio',
'uploader': 'Peter Atencio',
'channel_id': 'keypeele',
'channel_url': r're:https?://(?:www\.)?vimeo\.com/channels/keypeele',
'timestamp': 1380339469,
'upload_date': '20130928',
'duration': 187,
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
'url': 'http://vimeo.com/76979871',
'note': 'Video with subtitles',
'info_dict': {
'id': '76979871',
'ext': 'mp4',
'title': 'The New Vimeo Player (You Know, For Videos)',
'description': 'md5:2ec900bf97c3f389378a96aee11260ea',
'timestamp': 1381846109,
'upload_date': '20131015',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/staff',
'uploader_id': 'staff',
'uploader': 'Vimeo Staff',
'duration': 62,
}
},
{
# from https://www.ouya.tv/game/Pier-Solar-and-the-Great-Architects/
'url': 'https://player.vimeo.com/video/98044508',
'note': 'The js code contains assignments to the same variable as the config',
'info_dict': {
'id': '98044508',
'ext': 'mp4',
'title': 'Pier Solar OUYA Official Trailer',
'uploader': 'Tulio Gonçalves',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user28849593',
'uploader_id': 'user28849593',
},
},
{
# contains original format
'url': 'https://vimeo.com/33951933',
'md5': '53c688fa95a55bf4b7293d37a89c5c53',
'info_dict': {
'id': '33951933',
'ext': 'mp4',
'title': 'FOX CLASSICS - Forever Classic ID - A Full Minute',
'uploader': 'The DMCI',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/dmci',
'uploader_id': 'dmci',
'timestamp': 1324343742,
'upload_date': '20111220',
'description': 'md5:ae23671e82d05415868f7ad1aec21147',
},
},
{
# only available via https://vimeo.com/channels/tributes/6213729 and
# not via https://vimeo.com/6213729
'url': 'https://vimeo.com/channels/tributes/6213729',
'info_dict': {
'id': '6213729',
'ext': 'mp4',
'title': 'Vimeo Tribute: The Shining',
'uploader': 'Casey Donahue',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/caseydonahue',
'uploader_id': 'caseydonahue',
'channel_url': r're:https?://(?:www\.)?vimeo\.com/channels/tributes',
'channel_id': 'tributes',
'timestamp': 1250886430,
'upload_date': '20090821',
'description': 'md5:bdbf314014e58713e6e5b66eb252f4a6',
},
'params': {
'skip_download': True,
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
# redirects to ondemand extractor and should be passed through it
# for successful extraction
'url': 'https://vimeo.com/73445910',
'info_dict': {
'id': '73445910',
'ext': 'mp4',
'title': 'The Reluctant Revolutionary',
'uploader': '10Ft Films',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/tenfootfilms',
'uploader_id': 'tenfootfilms',
'description': 'md5:0fa704e05b04f91f40b7f3ca2e801384',
'upload_date': '20130830',
'timestamp': 1377853339,
},
'params': {
'skip_download': True,
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
'url': 'http://player.vimeo.com/video/68375962',
'md5': 'aaf896bdb7ddd6476df50007a0ac0ae7',
'info_dict': {
'id': '68375962',
'ext': 'mp4',
'title': 'youtube-dl password protected test video',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user18948128',
'uploader_id': 'user18948128',
'uploader': 'Jaime Marquínez Ferrándiz',
'duration': 10,
},
'params': {
'format': 'best[protocol=https]',
'videopassword': 'youtube-dl',
},
},
{
'url': 'http://vimeo.com/moogaloop.swf?clip_id=2539741',
'only_matching': True,
},
{
'url': 'https://vimeo.com/109815029',
'note': 'Video not completely processed, "failed" seed status',
'only_matching': True,
},
{
'url': 'https://vimeo.com/groups/travelhd/videos/22439234',
'only_matching': True,
},
{
'url': 'https://vimeo.com/album/2632481/video/79010983',
'only_matching': True,
},
{
# source file returns 403: Forbidden
'url': 'https://vimeo.com/7809605',
'only_matching': True,
},
{
'url': 'https://vimeo.com/160743502/abd0e13fb4',
'only_matching': True,
}
# https://gettingthingsdone.com/workflowmap/
# vimeo embed with check-password page protected by Referer header
]
@staticmethod
def _smuggle_referrer(url, referrer_url):
return smuggle_url(url, {'http_headers': {'Referer': referrer_url}})
@staticmethod
def _extract_urls(url, webpage):
urls = []
# Look for embedded (iframe) Vimeo player
for mobj in re.finditer(
r'<iframe[^>]+?src=(["\'])(?P<url>(?:https?:)?//player\.vimeo\.com/video/\d+.*?)\1',
webpage):
urls.append(VimeoIE._smuggle_referrer(unescapeHTML(mobj.group('url')), url))
PLAIN_EMBED_RE = (
# Look for embedded (swf embed) Vimeo player
r'<embed[^>]+?src=(["\'])(?P<url>(?:https?:)?//(?:www\.)?vimeo\.com/moogaloop\.swf.+?)\1',
# Look more for non-standard embedded Vimeo player
r'<video[^>]+src=(["\'])(?P<url>(?:https?:)?//(?:www\.)?vimeo\.com/[0-9]+)\1',
)
for embed_re in PLAIN_EMBED_RE:
for mobj in re.finditer(embed_re, webpage):
urls.append(mobj.group('url'))
return urls
@staticmethod
def _extract_url(url, webpage):
urls = VimeoIE._extract_urls(url, webpage)
return urls[0] if urls else None
def _verify_player_video_password(self, url, video_id, headers):
password = self._downloader.params.get('videopassword')
if password is None:
raise ExtractorError('This video is protected by a password, use the --video-password option', expected=True)
data = urlencode_postdata({
'password': base64.b64encode(password.encode()),
})
headers = merge_dicts(headers, {
'Content-Type': 'application/x-www-form-urlencoded',
})
checked = self._download_json(
url + '/check-password', video_id,
'Verifying the password', data=data, headers=headers)
if checked is False:
raise ExtractorError('Wrong video password', expected=True)
return checked
def _real_initialize(self):
self._login()
def _real_extract(self, url):
url, data = unsmuggle_url(url, {})
headers = std_headers.copy()
if 'http_headers' in data:
headers.update(data['http_headers'])
if 'Referer' not in headers:
headers['Referer'] = url
channel_id = self._search_regex(
r'vimeo\.com/channels/([^/]+)', url, 'channel id', default=None)
# Extract ID from URL
video_id = self._match_id(url)
orig_url = url
is_pro = 'vimeopro.com/' in url
is_player = '://player.vimeo.com/video/' in url
if is_pro:
# some videos require portfolio_id to be present in player url
# https://github.com/ytdl-org/youtube-dl/issues/20070
url = self._extract_url(url, self._download_webpage(url, video_id))
if not url:
url = 'https://vimeo.com/' + video_id
elif is_player:
url = 'https://player.vimeo.com/video/' + video_id
elif any(p in url for p in ('play_redirect_hls', 'moogaloop.swf')):
url = 'https://vimeo.com/' + video_id
try:
# Retrieve video webpage to extract further information
webpage, urlh = self._download_webpage_handle(
url, video_id, headers=headers)
redirect_url = compat_str(urlh.geturl())
except ExtractorError as ee:
if isinstance(ee.cause, compat_HTTPError) and ee.cause.code == 403:
errmsg = ee.cause.read()
if b'Because of its privacy settings, this video cannot be played here' in errmsg:
raise ExtractorError(
'Cannot download embed-only video without embedding '
'URL. Please call youtube-dl with the URL of the page '
'that embeds this video.',
expected=True)
raise
# Now we begin extracting as much information as we can from what we
# retrieved. First we extract the information common to all extractors,
# and latter we extract those that are Vimeo specific.
self.report_extraction(video_id)
vimeo_config = self._extract_vimeo_config(webpage, video_id, default=None)
if vimeo_config:
seed_status = vimeo_config.get('seed_status', {})
if seed_status.get('state') == 'failed':
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, seed_status['title']),
expected=True)
cc_license = None
timestamp = None
video_description = None
# Extract the config JSON
try:
try:
config_url = self._html_search_regex(
r' data-config-url="(.+?)"', webpage,
'config URL', default=None)
if not config_url:
# Sometimes new react-based page is served instead of old one that require
# different config URL extraction approach (see
# https://github.com/ytdl-org/youtube-dl/pull/7209)
page_config = self._parse_json(self._search_regex(
r'vimeo\.(?:clip|vod_title)_page_config\s*=\s*({.+?});',
webpage, 'page config'), video_id)
config_url = page_config['player']['config_url']
cc_license = page_config.get('cc_license')
timestamp = try_get(
page_config, lambda x: x['clip']['uploaded_on'],
compat_str)
video_description = clean_html(dict_get(
page_config, ('description', 'description_html_escaped')))
config = self._download_json(config_url, video_id)
except RegexNotFoundError:
# For pro videos or player.vimeo.com urls
# We try to find out to which variable is assigned the config dic
m_variable_name = re.search(r'(\w)\.video\.id', webpage)
if m_variable_name is not None:
config_re = [r'%s=({[^}].+?});' % re.escape(m_variable_name.group(1))]
else:
config_re = [r' = {config:({.+?}),assets:', r'(?:[abc])=({.+?});']
config_re.append(r'\bvar\s+r\s*=\s*({.+?})\s*;')
config_re.append(r'\bconfig\s*=\s*({.+?})\s*;')
config = self._search_regex(config_re, webpage, 'info section',
flags=re.DOTALL)
config = json.loads(config)
except Exception as e:
if re.search('The creator of this video has not given you permission to embed it on this domain.', webpage):
raise ExtractorError('The author has restricted the access to this video, try with the "--referer" option')
if re.search(r'<form[^>]+?id="pw_form"', webpage) is not None:
if '_video_password_verified' in data:
raise ExtractorError('video password verification failed!')
self._verify_video_password(redirect_url, video_id, webpage)
return self._real_extract(
smuggle_url(redirect_url, {'_video_password_verified': 'verified'}))
else:
raise ExtractorError('Unable to extract info section',
cause=e)
else:
if config.get('view') == 4:
config = self._verify_player_video_password(redirect_url, video_id, headers)
vod = config.get('video', {}).get('vod', {})
def is_rented():
if '>You rented this title.<' in webpage:
return True
if config.get('user', {}).get('purchased'):
return True
for purchase_option in vod.get('purchase_options', []):
if purchase_option.get('purchased'):
return True
label = purchase_option.get('label_string')
if label and (label.startswith('You rented this') or label.endswith(' remaining')):
return True
return False
if is_rented() and vod.get('is_trailer'):
feature_id = vod.get('feature_id')
if feature_id and not data.get('force_feature_id', False):
return self.url_result(smuggle_url(
'https://player.vimeo.com/player/%s' % feature_id,
{'force_feature_id': True}), 'Vimeo')
# Extract video description
if not video_description:
video_description = self._html_search_regex(
r'(?s)<div\s+class="[^"]*description[^"]*"[^>]*>(.*?)</div>',
webpage, 'description', default=None)
if not video_description:
video_description = self._html_search_meta(
'description', webpage, default=None)
if not video_description and is_pro:
orig_webpage = self._download_webpage(
orig_url, video_id,
note='Downloading webpage for description',
fatal=False)
if orig_webpage:
video_description = self._html_search_meta(
'description', orig_webpage, default=None)
if not video_description and not is_player:
self._downloader.report_warning('Cannot find video description')
# Extract upload date
if not timestamp:
timestamp = self._search_regex(
r'<time[^>]+datetime="([^"]+)"', webpage,
'timestamp', default=None)
try:
view_count = int(self._search_regex(r'UserPlays:(\d+)', webpage, 'view count'))
like_count = int(self._search_regex(r'UserLikes:(\d+)', webpage, 'like count'))
comment_count = int(self._search_regex(r'UserComments:(\d+)', webpage, 'comment count'))
except RegexNotFoundError:
# This info is only available in vimeo.com/{id} urls
view_count = None
like_count = None
comment_count = None
formats = []
source_format = self._extract_original_format(
'https://vimeo.com/' + video_id, video_id)
if source_format:
formats.append(source_format)
info_dict_config = self._parse_config(config, video_id)
formats.extend(info_dict_config['formats'])
self._vimeo_sort_formats(formats)
json_ld = self._search_json_ld(webpage, video_id, default={})
if not cc_license:
cc_license = self._search_regex(
r'<link[^>]+rel=["\']license["\'][^>]+href=(["\'])(?P<license>(?:(?!\1).)+)\1',
webpage, 'license', default=None, group='license')
channel_url = 'https://vimeo.com/channels/%s' % channel_id if channel_id else None
info_dict = {
'formats': formats,
'timestamp': unified_timestamp(timestamp),
'description': video_description,
'webpage_url': url,
'view_count': view_count,
'like_count': like_count,
'comment_count': comment_count,
'license': cc_license,
'channel_id': channel_id,
'channel_url': channel_url,
}
info_dict = merge_dicts(info_dict, info_dict_config, json_ld)
return info_dict
class VimeoOndemandIE(VimeoIE):
IE_NAME = 'vimeo:ondemand'
_VALID_URL = r'https?://(?:www\.)?vimeo\.com/ondemand/([^/]+/)?(?P<id>[^/?#&]+)'
_TESTS = [{
# ondemand video not available via https://vimeo.com/id
'url': 'https://vimeo.com/ondemand/20704',
'md5': 'c424deda8c7f73c1dfb3edd7630e2f35',
'info_dict': {
'id': '105442900',
'ext': 'mp4',
'title': 'המעבדה - במאי יותם פלדמן',
'uploader': 'גם סרטים',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/gumfilms',
'uploader_id': 'gumfilms',
'description': 'md5:4c027c965e439de4baab621e48b60791',
'upload_date': '20140906',
'timestamp': 1410032453,
},
'params': {
'format': 'best[protocol=https]',
},
'expected_warnings': ['Unable to download JSON metadata'],
}, {
# requires Referer to be passed along with og:video:url
'url': 'https://vimeo.com/ondemand/36938/126682985',
'info_dict': {
'id': '126584684',
'ext': 'mp4',
'title': 'Rävlock, rätt läte på rätt plats',
'uploader': 'Lindroth & Norin',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/lindrothnorin',
'uploader_id': 'lindrothnorin',
'description': 'md5:c3c46a90529612c8279fb6af803fc0df',
'upload_date': '20150502',
'timestamp': 1430586422,
},
'params': {
'skip_download': True,
},
'expected_warnings': ['Unable to download JSON metadata'],
}, {
'url': 'https://vimeo.com/ondemand/nazmaalik',
'only_matching': True,
}, {
'url': 'https://vimeo.com/ondemand/141692381',
'only_matching': True,
}, {
'url': 'https://vimeo.com/ondemand/thelastcolony/150274832',
'only_matching': True,
}]
class VimeoChannelIE(VimeoBaseInfoExtractor):
IE_NAME = 'vimeo:channel'
_VALID_URL = r'https://vimeo\.com/channels/(?P<id>[^/?#]+)/?(?:$|[?#])'
_MORE_PAGES_INDICATOR = r'<a.+?rel="next"'
_TITLE = None
_TITLE_RE = r'<link rel="alternate"[^>]+?title="(.*?)"'
_TESTS = [{
'url': 'https://vimeo.com/channels/tributes',
'info_dict': {
'id': 'tributes',
'title': 'Vimeo Tributes',
},
'playlist_mincount': 25,
}]
_BASE_URL_TEMPL = 'https://vimeo.com/channels/%s'
def _page_url(self, base_url, pagenum):
return '%s/videos/page:%d/' % (base_url, pagenum)
def _extract_list_title(self, webpage):
return self._TITLE or self._html_search_regex(
self._TITLE_RE, webpage, 'list title', fatal=False)
def _login_list_password(self, page_url, list_id, webpage):
login_form = self._search_regex(
r'(?s)<form[^>]+?id="pw_form"(.*?)</form>',
webpage, 'login form', default=None)
if not login_form:
return webpage
password = self._downloader.params.get('videopassword')
if password is None:
raise ExtractorError('This album is protected by a password, use the --video-password option', expected=True)
fields = self._hidden_inputs(login_form)
token, vuid = self._extract_xsrft_and_vuid(webpage)
fields['token'] = token
fields['password'] = password
post = urlencode_postdata(fields)
password_path = self._search_regex(
r'action="([^"]+)"', login_form, 'password URL')
password_url = compat_urlparse.urljoin(page_url, password_path)
password_request = sanitized_Request(password_url, post)
password_request.add_header('Content-type', 'application/x-www-form-urlencoded')
self._set_vimeo_cookie('vuid', vuid)
self._set_vimeo_cookie('xsrft', token)
return self._download_webpage(
password_request, list_id,
'Verifying the password', 'Wrong password')
def _title_and_entries(self, list_id, base_url):
for pagenum in itertools.count(1):
page_url = self._page_url(base_url, pagenum)
webpage = self._download_webpage(
page_url, list_id,
'Downloading page %s' % pagenum)
if pagenum == 1:
webpage = self._login_list_password(page_url, list_id, webpage)
yield self._extract_list_title(webpage)
# Try extracting href first since not all videos are available via
# short https://vimeo.com/id URL (e.g. https://vimeo.com/channels/tributes/6213729)
clips = re.findall(
r'id="clip_(\d+)"[^>]*>\s*<a[^>]+href="(/(?:[^/]+/)*\1)(?:[^>]+\btitle="([^"]+)")?', webpage)
if clips:
for video_id, video_url, video_title in clips:
yield self.url_result(
compat_urlparse.urljoin(base_url, video_url),
VimeoIE.ie_key(), video_id=video_id, video_title=video_title)
# More relaxed fallback
else:
for video_id in re.findall(r'id=["\']clip_(\d+)', webpage):
yield self.url_result(
'https://vimeo.com/%s' % video_id,
VimeoIE.ie_key(), video_id=video_id)
if re.search(self._MORE_PAGES_INDICATOR, webpage, re.DOTALL) is None:
break
def _extract_videos(self, list_id, base_url):
title_and_entries = self._title_and_entries(list_id, base_url)
list_title = next(title_and_entries)
return self.playlist_result(title_and_entries, list_id, list_title)
def _real_extract(self, url):
channel_id = self._match_id(url)
return self._extract_videos(channel_id, self._BASE_URL_TEMPL % channel_id)
class VimeoUserIE(VimeoChannelIE):
IE_NAME = 'vimeo:user'
_VALID_URL = r'https://vimeo\.com/(?!(?:[0-9]+|watchlater)(?:$|[?#/]))(?P<id>[^/]+)(?:/videos|[#?]|$)'
_TITLE_RE = r'<a[^>]+?class="user">([^<>]+?)</a>'
_TESTS = [{
'url': 'https://vimeo.com/nkistudio/videos',
'info_dict': {
'title': 'Nki',
'id': 'nkistudio',
},
'playlist_mincount': 66,
}]
_BASE_URL_TEMPL = 'https://vimeo.com/%s'
class VimeoAlbumIE(VimeoChannelIE):
IE_NAME = 'vimeo:album'
_VALID_URL = r'https://vimeo\.com/(?:album|showcase)/(?P<id>\d+)(?:$|[?#]|/(?!video))'
_TITLE_RE = r'<header id="page_header">\n\s*<h1>(.*?)</h1>'
_TESTS = [{
'url': 'https://vimeo.com/album/2632481',
'info_dict': {
'id': '2632481',
'title': 'Staff Favorites: November 2013',
},
'playlist_mincount': 13,
}, {
'note': 'Password-protected album',
'url': 'https://vimeo.com/album/3253534',
'info_dict': {
'title': 'test',
'id': '3253534',
},
'playlist_count': 1,
'params': {
'videopassword': 'youtube-dl',
}
}]
_PAGE_SIZE = 100
def _fetch_page(self, album_id, authorizaion, hashed_pass, page):
api_page = page + 1
query = {
'fields': 'link,uri',
'page': api_page,
'per_page': self._PAGE_SIZE,
}
if hashed_pass:
query['_hashed_pass'] = hashed_pass
videos = self._download_json(
'https://api.vimeo.com/albums/%s/videos' % album_id,
album_id, 'Downloading page %d' % api_page, query=query, headers={
'Authorization': 'jwt ' + authorizaion,
})['data']
for video in videos:
link = video.get('link')
if not link:
continue
uri = video.get('uri')
video_id = self._search_regex(r'/videos/(\d+)', uri, 'video_id', default=None) if uri else None
yield self.url_result(link, VimeoIE.ie_key(), video_id)
def _real_extract(self, url):
album_id = self._match_id(url)
webpage = self._download_webpage(url, album_id)
webpage = self._login_list_password(url, album_id, webpage)
api_config = self._extract_vimeo_config(webpage, album_id)['api']
entries = OnDemandPagedList(functools.partial(
self._fetch_page, album_id, api_config['jwt'],
api_config.get('hashed_pass')), self._PAGE_SIZE)
return self.playlist_result(entries, album_id, self._html_search_regex(
r'<title>\s*(.+?)(?:\s+on Vimeo)?</title>', webpage, 'title', fatal=False))
class VimeoGroupsIE(VimeoChannelIE):
IE_NAME = 'vimeo:group'
_VALID_URL = r'https://vimeo\.com/groups/(?P<id>[^/]+)(?:/(?!videos?/\d+)|$)'
_TESTS = [{
'url': 'https://vimeo.com/groups/kattykay',
'info_dict': {
'id': 'kattykay',
'title': 'Katty Kay',
},
'playlist_mincount': 27,
}]
_BASE_URL_TEMPL = 'https://vimeo.com/groups/%s'
class VimeoReviewIE(VimeoBaseInfoExtractor):
IE_NAME = 'vimeo:review'
IE_DESC = 'Review pages on vimeo'
_VALID_URL = r'(?P<url>https://vimeo\.com/[^/]+/review/(?P<id>[^/]+)/[0-9a-f]{10})'
_TESTS = [{
'url': 'https://vimeo.com/user21297594/review/75524534/3c257a1b5d',
'md5': 'c507a72f780cacc12b2248bb4006d253',
'info_dict': {
'id': '75524534',
'ext': 'mp4',
'title': "DICK HARDWICK 'Comedian'",
'uploader': 'Richard Hardwick',
'uploader_id': 'user21297594',
'description': "Comedian Dick Hardwick's five minute demo filmed in front of a live theater audience.\nEdit by Doug Mattocks",
},
'expected_warnings': ['Unable to download JSON metadata'],
}, {
'note': 'video player needs Referer',
'url': 'https://vimeo.com/user22258446/review/91613211/13f927e053',
'md5': '6295fdab8f4bf6a002d058b2c6dce276',
'info_dict': {
'id': '91613211',
'ext': 'mp4',
'title': 're:(?i)^Death by dogma versus assembling agile . Sander Hoogendoorn',
'uploader': 'DevWeek Events',
'duration': 2773,
'thumbnail': r're:^https?://.*\.jpg$',
'uploader_id': 'user22258446',
},
'skip': 'video gone',
}, {
'note': 'Password protected',
'url': 'https://vimeo.com/user37284429/review/138823582/c4d865efde',
'info_dict': {
'id': '138823582',
'ext': 'mp4',
'title': 'EFFICIENT PICKUP MASTERCLASS MODULE 1',
'uploader': 'TMB',
'uploader_id': 'user37284429',
},
'params': {
'videopassword': 'holygrail',
},
'skip': 'video gone',
}]
def _real_initialize(self):
self._login()
def _real_extract(self, url):
page_url, video_id = re.match(self._VALID_URL, url).groups()
clip_data = self._download_json(
page_url.replace('/review/', '/review/data/'),
video_id)['clipData']
config_url = clip_data['configUrl']
config = self._download_json(config_url, video_id)
info_dict = self._parse_config(config, video_id)
source_format = self._extract_original_format(
page_url + '/action', video_id)
if source_format:
info_dict['formats'].append(source_format)
self._vimeo_sort_formats(info_dict['formats'])
info_dict['description'] = clean_html(clip_data.get('description'))
return info_dict
class VimeoWatchLaterIE(VimeoChannelIE):
IE_NAME = 'vimeo:watchlater'
IE_DESC = 'Vimeo watch later list, "vimeowatchlater" keyword (requires authentication)'
_VALID_URL = r'https://vimeo\.com/(?:home/)?watchlater|:vimeowatchlater'
_TITLE = 'Watch Later'
_LOGIN_REQUIRED = True
_TESTS = [{
'url': 'https://vimeo.com/watchlater',
'only_matching': True,
}]
def _real_initialize(self):
self._login()
def _page_url(self, base_url, pagenum):
url = '%s/page:%d/' % (base_url, pagenum)
request = sanitized_Request(url)
# Set the header to get a partial html page with the ids,
# the normal page doesn't contain them.
request.add_header('X-Requested-With', 'XMLHttpRequest')
return request
def _real_extract(self, url):
return self._extract_videos('watchlater', 'https://vimeo.com/watchlater')
class VimeoLikesIE(VimeoChannelIE):
_VALID_URL = r'https://(?:www\.)?vimeo\.com/(?P<id>[^/]+)/likes/?(?:$|[?#]|sort:)'
IE_NAME = 'vimeo:likes'
IE_DESC = 'Vimeo user likes'
_TESTS = [{
'url': 'https://vimeo.com/user755559/likes/',
'playlist_mincount': 293,
'info_dict': {
'id': 'user755559',
'title': 'urza’s Likes',
},
}, {
'url': 'https://vimeo.com/stormlapse/likes',
'only_matching': True,
}]
def _page_url(self, base_url, pagenum):
return '%s/page:%d/' % (base_url, pagenum)
def _real_extract(self, url):
user_id = self._match_id(url)
return self._extract_videos(user_id, 'https://vimeo.com/%s/likes' % user_id)
class VHXEmbedIE(VimeoBaseInfoExtractor):
IE_NAME = 'vhx:embed'
_VALID_URL = r'https?://embed\.vhx\.tv/videos/(?P<id>\d+)'
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
config_url = self._parse_json(self._search_regex(
r'window\.OTTData\s*=\s*({.+})', webpage,
'ott data'), video_id, js_to_json)['config_url']
config = self._download_json(config_url, video_id)
info = self._parse_config(config, video_id)
self._vimeo_sort_formats(info['formats'])
return info
| gpl-3.0 |
IanDoarn/zbsmsa | tests/loans.py | 1 | 15622 | """
loans.py
Final working version of the mutation automation
Mutates items from special excel file from inventory type
CI - Centralized Inventory to type
ZDI - Zimmer Distributor Inventory
Written by: Ian Doarn
Maintained by: Ian Doarn
"""
from zbsmsa.site import Site
from zbsmsa.inventory.stock import Stock, ProductChooser
from zbsmsa.utils.exceptions import InvalidRange, ItemAddError
import xlrd
import ctypes
import sys
import time
import logging
import os
from datetime import datetime
# TODO: Comment this file
__author__ = "Ian Doarn"
__maintainer__ = "Ian Doarn"
__current_date__ = '{:%m-%d-%Y}'.format(datetime.now())
PATH = os.path.dirname(os.path.realpath(__file__))
LOG_FILE_NAME = 'mutation_loans_{}.log'.format(__current_date__)
LOG_FILE_PATH = os.path.join(PATH, LOG_FILE_NAME)
MB_OK = 0x0
MB_HELP = 0x4000
ICON_EXLAIM = 0x30
ICON_INFO = 0x40
ICON_STOP = 0x10
# if os.path.isfile(os.path.join(PATH, LOG_FILE_NAME)):
# os.remove(os.path.join(PATH, 'LOG_FILE_NAME))
# elif os.path.isfile(LOG_FILE_PATH):
# os.remove(LOG_FILE_PATH)
# else:
if not os.path.isfile(LOG_FILE_PATH):
with open(LOG_FILE_PATH, 'w')as l_file:
l_file.close()
logger = logging.getLogger()
handler = logging.FileHandler(LOG_FILE_PATH)
formatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
def message_box(title, text, style, log=True):
if log:
logger.info("[{}]: {}".format(title, text))
ctypes.windll.user32.MessageBoxW(0, text, title, style)
def load_workbook(file):
workbook = xlrd.open_workbook(file)
sheets = []
for sheet in workbook.sheets():
try:
headers = sheet.row_values(0)
rows = []
for y in range(1, sheet.nrows):
row_dict = dict(zip(headers, sheet.row_values(y)))
if row_dict['serial_number'] == '' and row_dict['lot_number'] == '':
logger.warning("Missing lot and serial at sheet:{} row:{} item:{} lot/serial:{}".format(
sheet.name, str(y + 1), row_dict['product_number'], 'NULL'
))
else:
row_dict['row'] = str(y + 1)
rows.append(row_dict)
sheets.append({'sheet_name': sheet.name,
'data': rows,
'headers': headers,
'total_rows': sheet.nrows})
del rows
del headers
except Exception:
sheets.append({'sheet_name': sheet.name,
'data': None,
'headers': None,
'total_rows': None})
return sheets
def mutate_loans(site, stock, product_chooser, data, note="CI-ZDI Mutation, Loaners Late Debit Policy"):
time.sleep(3)
site.ribbon("InventoryTab")
stock.stock_tab()
for row in data:
current_site = site.get_site_name()
logger.info('logged into [{}]'.format(site.get_site_name()))
current_item_info = "Item: {} Lot: {} Row: {}".format(
row['product_number'],
row['lot_number'],
row['row'])
if row['distributor'] != current_site:
site.change_site(row['distributor'])
logger.info("site changed to [{}]".format(site.get_site_name()))
site.ribbon("InventoryTab")
stock.stock_tab()
stock.stock_tab_product_chooser()
product_chooser.search(row['product_number'])
time.sleep(3)
try:
product_chooser.add(row['product_number'])
except ItemAddError as iae:
message_box('Error',
'Unable to add [{}] to the product chooser. '
'Please add this manually and press ok to continue!'.format(current_item_info),
MB_OK | ICON_INFO)
product_chooser.finish()
stock.stock_tab_search()
time.sleep(3)
stock.iterate_search_table()
try:
for table_row in stock.current_table_data:
if row['lot_number'] != '' and row['serial_number'] == '':
tr_text = table_row['text']
if tr_text.__contains__(row['lot_number']) and tr_text.__contains__('CASE-'+row['case_number']):
stock.select_row(table_row['row'])
try:
if stock.mutate_stock(note, click_ok_on_error=False):
logger.info("SUCCESSFUL MUTATION: [{}]".format(current_item_info))
break
else:
message_box("Mutation",
"Item was not mutated!"
"\nItem:{} Lot:{} Row:{}".format(row['product_number'],
row['lot_number'],
row['row']),
MB_OK | ICON_INFO)
except Exception as m_error:
message_box('Error', "{}. {}\nPlease close any open menus and errors then press ok"
" to continue.".format(m_error, current_item_info), MB_OK | ICON_INFO,
log=False)
logger.error("MutationError:{} {}".format(m_error, current_item_info))
break
break
elif str(int(row['serial_number'])) != '':
tr_text = table_row['text']
if tr_text.__contains__(row['case_number']) and tr_text.__contains__(
"Serial " + row['serial_number']):
stock.select_row(table_row['row'])
try:
if stock.mutate_stock(note, click_ok_on_error=False):
logger.info("SUCCESSFUL MUTATION: [{}]".format(current_item_info))
break
else:
message_box("Mutation", "Item was not mutated! {}".format(current_item_info),
MB_OK | ICON_INFO)
except Exception as m_error:
message_box('Error', "{}. {}\nPlease close any open menus and errors then press ok"
" to continue.".format(m_error, current_item_info), MB_OK | ICON_INFO,
log=False)
logger.error("MutationError:{} {}".format(m_error, current_item_info))
break
break
except ValueError as val_error:
message_box('Error', "No Records found. [{}] {}".format(val_error, current_item_info), MB_OK | ICON_INFO)
except InvalidRange as ivr_error:
message_box('Error', ivr_error.message + " {}".format(current_item_info), MB_OK | ICON_INFO)
except Exception as other_error:
message_box('Error', str(other_error) + " {}".format(current_item_info), MB_OK | ICON_INFO)
else:
continue
def mutate_bins(site, stock, product_chooser, data, note="CI-ZDI Mutation, Loaners Late Debit Policy"):
time.sleep(3)
site.ribbon("InventoryTab")
stock.stock_tab()
for row in data:
current_item_info = "Item: {} Lot: {} Row: {}".format(
row['product_number'],
row['lot_number'],
row['row'])
if row['bin'] == 'CI MISSING ZONE-0-0':
message_box('Error', 'Can not mutate item in bin [{}]: {}'.format(row['bin'], current_item_info),
MB_OK | ICON_EXLAIM, log=False)
logger.warning('Can not mutate item in bin [{}]: {}'.format(row['bin'], current_item_info))
else:
current_site = site.get_site_name()
logger.info('logged into [{}]'.format(site.get_site_name()))
if row['name'] != current_site:
site.change_site(row['name'])
logger.info("site changed to [{}]".format(site.get_site_name()))
site.ribbon("InventoryTab")
stock.stock_tab()
stock.stock_tab_product_chooser()
product_chooser.search(row['product_number'])
time.sleep(3)
try:
product_chooser.add(row['product_number'])
except ItemAddError as iae:
message_box('Error',
'Unable to add [{}] to the product chooser. '
'Please add this manually and press ok to continue!'.format(current_item_info),
MB_OK | ICON_INFO)
product_chooser.finish()
stock.stock_tab_search()
time.sleep(3)
stock.iterate_search_table()
try:
for table_row in stock.current_table_data:
if row['lot_number'] != '' and row['serial_number'] == '':
tr_text = table_row['text']
bin_num = ''
if row['bin'] == 'location_bin':
if tr_text.__contains__('Location Bin'):
bin_num = 'Location Bin'
if tr_text.__contains__('Consigned'):
bin_num = 'Consigned'
else:
bin_num = row['bin']
if tr_text.__contains__(row['lot_number']) and tr_text.__contains__(bin_num):
stock.select_row(table_row['row'])
try:
if stock.mutate_stock(note, click_ok_on_error=False):
logger.info("SUCCESSFUL MUTATION: [{}]".format(current_item_info))
break
else:
message_box("Mutation",
"Item was not mutated!"
"\nItem:{} Lot:{} Row:{}".format(row['product_number'],
row['lot_number'],
row['row']),
MB_OK | ICON_INFO)
except Exception as m_error:
message_box('Error', "{}. {}\nPlease close any open menus and errors then press ok"
" to continue.".format(m_error, current_item_info), MB_OK | ICON_INFO,
log=False)
logger.error("MutationError:{} {}".format(m_error, current_item_info))
break
break
elif row['serial_number'] != '':
tr_text = table_row['text']
bin_num = ''
if row['bin'] == 'location_bin':
if tr_text.__contains__('Location Bin'):
bin_num = 'Location Bin'
if tr_text.__contains__('Consigned'):
bin_num = 'Consigned'
else:
bin_num = row['bin']
if tr_text.__contains__(bin_num) and tr_text.__contains__("Serial " + row['serial_number']):
stock.select_row(table_row['row'])
try:
if stock.mutate_stock(note, click_ok_on_error=False):
logger.info("SUCCESSFUL MUTATION: [{}]".format(current_item_info))
break
else:
message_box("Mutation", "Item was not mutated! {}".format(current_item_info),
MB_OK | ICON_INFO)
except Exception as m_error:
message_box('Error', "{}. {}\nPlease close any open menus and errors then press ok"
" to continue.".format(m_error, current_item_info), MB_OK | ICON_INFO,
log=False)
logger.error("MutationError:{} {}".format(m_error, current_item_info))
break
break
except ValueError as val_error:
message_box('Error', "No Records found. [{}] {}".format(val_error, current_item_info),
MB_OK | ICON_INFO)
except InvalidRange as ivr_error:
message_box('Error', ivr_error.message + " {}".format(current_item_info),
MB_OK | ICON_INFO)
except Exception as other_error:
message_box('Error', str(other_error) + " {}".format(current_item_info),
MB_OK | ICON_INFO)
else:
continue
def main(username, password, file, driver):
site = Site(username, password, driver, launch=False)
mutation_data = load_workbook(file)
site.launch(maximize=False)
# Log launch information
logger.debug("USER[{}]".format(username))
logger.debug("DRIVER[{}]".format(driver))
logger.debug("FILE[{}]".format(file))
logger.debug("URI[{}]".format(site.driver_uri))
logger.debug("SESSIONID[{}]".format(site.driver_session_id))
stock = Stock(site)
pc = ProductChooser(stock)
message_box("Mutation", "Please press ok when the site has fully loaded",
MB_OK | ICON_INFO, log=False)
site.login()
for sheet in mutation_data:
if sheet['sheet_name'] == 'Loans Transferred':
mutate_loans(site, stock, pc, sheet['data'])
if sheet['sheet_name'] == 'Bin Transferred':
mutate_bins(site, stock, pc, sheet['data'])
site.close()
if __name__ == '__main__':
usage = "loans.py [username] [password] [driver location] [file location]"
if len(sys.argv[1:]) not in [4, 5]:
print(usage)
else:
_user = sys.argv[1]
_pass = sys.argv[2]
_drive_loc = sys.argv[3]
_file_loc = sys.argv[4]
try:
logger.info("Begin program execution at main()")
main(_user, _pass, _file_loc, _drive_loc)
except KeyboardInterrupt as ki_error:
message_box("Fatal Error", "[FATAL]::Fatal error caused program to fail.\nERROR:{}".format(ki_error),
MB_OK | ICON_STOP, log=False)
logger.critical("[FATAL]:: Fatal error caused program to fail. ERROR:{}".format(ki_error))
except Exception as fatal_error:
message_box("Fatal Error", "[FATAL]::Fatal error caused program to fail.\nERROR:{}".format(fatal_error),
MB_OK | ICON_STOP, log=False)
logger.critical("[FATAL]:: Fatal error caused program to fail. ERROR:{}".format(fatal_error))
else:
pass
| apache-2.0 |
LAIRLAB/qr_trees | src/python/run_ilqr_diffdrive.py | 1 | 2328 | #!/usr/bin/env python
#
# Arun Venkatraman (arunvenk@cs.cmu.edu)
# December 2016
#
# If we are not running from the build directory, then add lib to path from
# build assuming we are running from the python folder
import os
full_path = os.path.realpath(__file__)
if full_path.count("src/python") > 0:
import sys
to_add = os.path.abspath(os.path.join(os.path.split(full_path)[0], "../../build/"))
sys.path.append(to_add)
from IPython import embed
import lib.ilqr_diffdrive as ilqr
import visualize_circle_world as vis
import numpy as np
import matplotlib.pyplot as plt
if __name__ == "__main__":
obs_prior = [0.5, 0.5]
world_dims = [-30, 30, -30, 30]
w1 = ilqr.CircleWorld(world_dims)
w2 = ilqr.CircleWorld(world_dims)
obs_pos_1 = [-2, 0.0]
obs_pos_2 = [2, 0.0]
obs_radius = 10.0
obstacle_1 = ilqr.Circle(obs_radius, obs_pos_1);
obstacle_2 = ilqr.Circle(obs_radius, obs_pos_2);
# add obstacle to world 1
w1.add_obstacle(obstacle_1);
# add obstacle to world 2
w2.add_obstacle(obstacle_2);
cost, states_true_1, obs_fname_1 = ilqr.control_diffdrive(ilqr.TRUE_ILQR,
w1, w2, obs_prior, "true1", "true1")
cost, states_true_2, obs_fname_2 = ilqr.control_diffdrive(ilqr.TRUE_ILQR,
w2, w1, obs_prior, "true2", "true2")
cost, states_weighted_1, obs_fname_3 =\
ilqr.control_diffdrive(ilqr.PROB_WEIGHTED_CONTROL,
w1, w2, obs_prior, "weight3", "weight3")
cost, states_weighted_2, obs_fname_4 =\
ilqr.control_diffdrive(ilqr.PROB_WEIGHTED_CONTROL,
w2, w1, obs_prior, "weight4", "weight4")
cost, states_hind_1, obs_fname_5 =\
ilqr.control_diffdrive(ilqr.HINDSIGHT,
w1, w2, obs_prior, "hind3", "hind3")
cost, states_hind_2, obs_fname_6 =\
ilqr.control_diffdrive(ilqr.HINDSIGHT,
w2, w1, obs_prior, "hind4", "hind4")
print("Drawing world 1")
ax1 = vis.parse_draw_files([states_true_1, states_weighted_1, states_hind_1], obs_fname_1,
show=False)
plt.title('World 1')
print("Drawing world 2")
ax2 = vis.parse_draw_files([states_true_2, states_weighted_2, states_hind_2],
obs_fname_2, show=False)
plt.title('World 2')
plt.show()
embed()
| bsd-3-clause |
drcapulet/sentry | src/sentry/db/models/fields/gzippeddict.py | 29 | 1600 | """
sentry.db.models.fields.gzippeddict
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import logging
import six
from django.db import models
from south.modelsinspector import add_introspection_rules
from sentry.utils.compat import pickle
from sentry.utils.strings import decompress, compress
__all__ = ('GzippedDictField',)
logger = logging.getLogger('sentry')
class GzippedDictField(models.TextField):
"""
Slightly different from a JSONField in the sense that the default
value is a dictionary.
"""
__metaclass__ = models.SubfieldBase
def to_python(self, value):
if isinstance(value, six.string_types) and value:
try:
value = pickle.loads(decompress(value))
except Exception as e:
logger.exception(e)
return {}
elif not value:
return {}
return value
def get_prep_value(self, value):
if not value and self.null:
# save ourselves some storage
return None
# enforce unicode strings to guarantee consistency
if isinstance(value, str):
value = six.text_type(value)
return compress(pickle.dumps(value))
def value_to_string(self, obj):
value = self._get_val_from_obj(obj)
return self.get_prep_value(value)
add_introspection_rules([], ["^sentry\.db\.models\.fields\.gzippeddict\.GzippedDictField"])
| bsd-3-clause |
lucach/spellcorrect | appengine/lib/markupsafe/_native.py | 1243 | 1187 | # -*- coding: utf-8 -*-
"""
markupsafe._native
~~~~~~~~~~~~~~~~~~
Native Python implementation the C module is not compiled.
:copyright: (c) 2010 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from markupsafe import Markup
from markupsafe._compat import text_type
def escape(s):
"""Convert the characters &, <, >, ' and " in string s to HTML-safe
sequences. Use this if you need to display text that might contain
such characters in HTML. Marks return value as markup string.
"""
if hasattr(s, '__html__'):
return s.__html__()
return Markup(text_type(s)
.replace('&', '&')
.replace('>', '>')
.replace('<', '<')
.replace("'", ''')
.replace('"', '"')
)
def escape_silent(s):
"""Like :func:`escape` but converts `None` into an empty
markup string.
"""
if s is None:
return Markup()
return escape(s)
def soft_unicode(s):
"""Make a string unicode if it isn't already. That way a markup
string is not converted back to unicode.
"""
if not isinstance(s, text_type):
s = text_type(s)
return s
| agpl-3.0 |
Mic92/ansible | lib/ansible/playbook/handler.py | 237 | 1957 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
#from ansible.inventory.host import Host
from ansible.playbook.task import Task
class Handler(Task):
def __init__(self, block=None, role=None, task_include=None):
self._flagged_hosts = []
super(Handler, self).__init__(block=block, role=role, task_include=task_include)
def __repr__(self):
''' returns a human readable representation of the handler '''
return "HANDLER: %s" % self.get_name()
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = Handler(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def flag_for_host(self, host):
#assert instanceof(host, Host)
if host not in self._flagged_hosts:
self._flagged_hosts.append(host)
def has_triggered(self, host):
return host in self._flagged_hosts
def serialize(self):
result = super(Handler, self).serialize()
result['is_handler'] = True
return result
| gpl-3.0 |
xen0l/ansible | lib/ansible/plugins/terminal/nxos.py | 3 | 3853 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from ansible.plugins.terminal import TerminalBase
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_bytes, to_text
class TerminalModule(TerminalBase):
terminal_stdout_re = [
re.compile(br'[\r\n]?(?!\s*<)?(\x1b\S+)*[a-zA-Z_0-9]{1}[a-zA-Z0-9-_.]*[>|#](?:\s*)*(\x1b\S+)*$'),
re.compile(br'[\r\n]?[a-zA-Z0-9]{1}[a-zA-Z0-9-_.]*\(.+\)#(?:\s*)$')
]
terminal_stderr_re = [
re.compile(br"% ?Error"),
re.compile(br"^error:(.*)", re.I),
re.compile(br"^% \w+", re.M),
re.compile(br"% ?Bad secret"),
re.compile(br"invalid input", re.I),
re.compile(br"(?:incomplete|ambiguous) command", re.I),
re.compile(br"connection timed out", re.I),
re.compile(br"[^\r\n]+ not found", re.I),
re.compile(br"'[^']' +returned error code: ?\d+"),
re.compile(br"syntax error"),
re.compile(br"unknown command"),
re.compile(br"user not present"),
re.compile(br"invalid (.+?)at '\^' marker", re.I),
re.compile(br"baud rate of console should be (\d*) to increase severity level", re.I),
]
def on_become(self, passwd=None):
if self._get_prompt().endswith(b'enable#'):
return
out = self._exec_cli_command('show privilege')
out = to_text(out, errors='surrogate_then_replace').strip()
if 'Disabled' in out:
raise AnsibleConnectionFailure('Feature privilege is not enabled')
# if already at privilege level 15 return
if '15' in out:
return
cmd = {u'command': u'enable'}
if passwd:
cmd[u'prompt'] = to_text(r"(?i)[\r\n]?Password: $", errors='surrogate_or_strict')
cmd[u'answer'] = passwd
cmd[u'prompt_retry_check'] = True
try:
self._exec_cli_command(to_bytes(json.dumps(cmd), errors='surrogate_or_strict'))
prompt = self._get_prompt()
if prompt is None or not prompt.strip().endswith(b'enable#'):
raise AnsibleConnectionFailure('failed to elevate privilege to enable mode still at prompt [%s]' % prompt)
except AnsibleConnectionFailure as e:
prompt = self._get_prompt()
raise AnsibleConnectionFailure('unable to elevate privilege to enable mode, at prompt [%s] with error: %s' % (prompt, e.message))
def on_unbecome(self):
prompt = self._get_prompt()
if prompt is None:
# if prompt is None most likely the terminal is hung up at a prompt
return
if b'(config' in prompt:
self._exec_cli_command('end')
self._exec_cli_command('exit')
elif prompt.endswith(b'enable#'):
self._exec_cli_command('exit')
def on_open_shell(self):
try:
for cmd in ('terminal length 0', 'terminal width 511'):
self._exec_cli_command(cmd)
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')
| gpl-3.0 |
michaelkuty/django-oscar | src/oscar/apps/search/app.py | 25 | 1040 | from django.conf.urls import url
from haystack.views import search_view_factory
from oscar.apps.search import facets
from oscar.core.application import Application
from oscar.core.loading import get_class
class SearchApplication(Application):
name = 'search'
search_view = get_class('search.views', 'FacetedSearchView')
search_form = get_class('search.forms', 'SearchForm')
def get_urls(self):
# The form class has to be passed to the __init__ method as that is how
# Haystack works. It's slightly different to normal CBVs.
urlpatterns = [
url(r'^$', search_view_factory(
view_class=self.search_view,
form_class=self.search_form,
searchqueryset=self.get_sqs()),
name='search'),
]
return self.post_process_urls(urlpatterns)
def get_sqs(self):
"""
Return the SQS required by a the Haystack search view
"""
return facets.base_sqs()
application = SearchApplication()
| bsd-3-clause |
40223220/2015cb | static/Brython3.1.1-20150328-091302/Lib/platform.py | 620 | 51006 | #!/usr/bin/env python3
""" This module tries to retrieve as much platform-identifying data as
possible. It makes this information available via function APIs.
If called from the command line, it prints the platform
information concatenated as single string to stdout. The output
format is useable as part of a filename.
"""
# This module is maintained by Marc-Andre Lemburg <mal@egenix.com>.
# If you find problems, please submit bug reports/patches via the
# Python bug tracker (http://bugs.python.org) and assign them to "lemburg".
#
# Still needed:
# * more support for WinCE
# * support for MS-DOS (PythonDX ?)
# * support for Amiga and other still unsupported platforms running Python
# * support for additional Linux distributions
#
# Many thanks to all those who helped adding platform-specific
# checks (in no particular order):
#
# Charles G Waldman, David Arnold, Gordon McMillan, Ben Darnell,
# Jeff Bauer, Cliff Crawford, Ivan Van Laningham, Josef
# Betancourt, Randall Hopper, Karl Putland, John Farrell, Greg
# Andruk, Just van Rossum, Thomas Heller, Mark R. Levinson, Mark
# Hammond, Bill Tutt, Hans Nowak, Uwe Zessin (OpenVMS support),
# Colin Kong, Trent Mick, Guido van Rossum, Anthony Baxter
#
# History:
#
# <see CVS and SVN checkin messages for history>
#
# 1.0.7 - added DEV_NULL
# 1.0.6 - added linux_distribution()
# 1.0.5 - fixed Java support to allow running the module on Jython
# 1.0.4 - added IronPython support
# 1.0.3 - added normalization of Windows system name
# 1.0.2 - added more Windows support
# 1.0.1 - reformatted to make doc.py happy
# 1.0.0 - reformatted a bit and checked into Python CVS
# 0.8.0 - added sys.version parser and various new access
# APIs (python_version(), python_compiler(), etc.)
# 0.7.2 - fixed architecture() to use sizeof(pointer) where available
# 0.7.1 - added support for Caldera OpenLinux
# 0.7.0 - some fixes for WinCE; untabified the source file
# 0.6.2 - support for OpenVMS - requires version 1.5.2-V006 or higher and
# vms_lib.getsyi() configured
# 0.6.1 - added code to prevent 'uname -p' on platforms which are
# known not to support it
# 0.6.0 - fixed win32_ver() to hopefully work on Win95,98,NT and Win2k;
# did some cleanup of the interfaces - some APIs have changed
# 0.5.5 - fixed another type in the MacOS code... should have
# used more coffee today ;-)
# 0.5.4 - fixed a few typos in the MacOS code
# 0.5.3 - added experimental MacOS support; added better popen()
# workarounds in _syscmd_ver() -- still not 100% elegant
# though
# 0.5.2 - fixed uname() to return '' instead of 'unknown' in all
# return values (the system uname command tends to return
# 'unknown' instead of just leaving the field emtpy)
# 0.5.1 - included code for slackware dist; added exception handlers
# to cover up situations where platforms don't have os.popen
# (e.g. Mac) or fail on socket.gethostname(); fixed libc
# detection RE
# 0.5.0 - changed the API names referring to system commands to *syscmd*;
# added java_ver(); made syscmd_ver() a private
# API (was system_ver() in previous versions) -- use uname()
# instead; extended the win32_ver() to also return processor
# type information
# 0.4.0 - added win32_ver() and modified the platform() output for WinXX
# 0.3.4 - fixed a bug in _follow_symlinks()
# 0.3.3 - fixed popen() and "file" command invokation bugs
# 0.3.2 - added architecture() API and support for it in platform()
# 0.3.1 - fixed syscmd_ver() RE to support Windows NT
# 0.3.0 - added system alias support
# 0.2.3 - removed 'wince' again... oh well.
# 0.2.2 - added 'wince' to syscmd_ver() supported platforms
# 0.2.1 - added cache logic and changed the platform string format
# 0.2.0 - changed the API to use functions instead of module globals
# since some action take too long to be run on module import
# 0.1.0 - first release
#
# You can always get the latest version of this module at:
#
# http://www.egenix.com/files/python/platform.py
#
# If that URL should fail, try contacting the author.
__copyright__ = """
Copyright (c) 1999-2000, Marc-Andre Lemburg; mailto:mal@lemburg.com
Copyright (c) 2000-2010, eGenix.com Software GmbH; mailto:info@egenix.com
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee or royalty is hereby granted,
provided that the above copyright notice appear in all copies and that
both that copyright notice and this permission notice appear in
supporting documentation or portions thereof, including modifications,
that you make.
EGENIX.COM SOFTWARE GMBH DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
WITH THE USE OR PERFORMANCE OF THIS SOFTWARE !
"""
__version__ = '1.0.7'
import collections
import sys, os, re, subprocess
### Globals & Constants
# Determine the platform's /dev/null device
try:
DEV_NULL = os.devnull
except AttributeError:
# os.devnull was added in Python 2.4, so emulate it for earlier
# Python versions
if sys.platform in ('dos','win32','win16','os2'):
# Use the old CP/M NUL as device name
DEV_NULL = 'NUL'
else:
# Standard Unix uses /dev/null
DEV_NULL = '/dev/null'
### Platform specific APIs
_libc_search = re.compile(b'(__libc_init)'
b'|'
b'(GLIBC_([0-9.]+))'
b'|'
br'(libc(_\w+)?\.so(?:\.(\d[0-9.]*))?)', re.ASCII)
def libc_ver(executable=sys.executable,lib='',version='',
chunksize=16384):
""" Tries to determine the libc version that the file executable
(which defaults to the Python interpreter) is linked against.
Returns a tuple of strings (lib,version) which default to the
given parameters in case the lookup fails.
Note that the function has intimate knowledge of how different
libc versions add symbols to the executable and thus is probably
only useable for executables compiled using gcc.
The file is read and scanned in chunks of chunksize bytes.
"""
if hasattr(os.path, 'realpath'):
# Python 2.2 introduced os.path.realpath(); it is used
# here to work around problems with Cygwin not being
# able to open symlinks for reading
executable = os.path.realpath(executable)
f = open(executable,'rb')
binary = f.read(chunksize)
pos = 0
while 1:
if b'libc' in binary or b'GLIBC' in binary:
m = _libc_search.search(binary,pos)
else:
m = None
if not m:
binary = f.read(chunksize)
if not binary:
break
pos = 0
continue
libcinit,glibc,glibcversion,so,threads,soversion = [
s.decode('latin1') if s is not None else s
for s in m.groups()]
if libcinit and not lib:
lib = 'libc'
elif glibc:
if lib != 'glibc':
lib = 'glibc'
version = glibcversion
elif glibcversion > version:
version = glibcversion
elif so:
if lib != 'glibc':
lib = 'libc'
if soversion and soversion > version:
version = soversion
if threads and version[-len(threads):] != threads:
version = version + threads
pos = m.end()
f.close()
return lib,version
def _dist_try_harder(distname,version,id):
""" Tries some special tricks to get the distribution
information in case the default method fails.
Currently supports older SuSE Linux, Caldera OpenLinux and
Slackware Linux distributions.
"""
if os.path.exists('/var/adm/inst-log/info'):
# SuSE Linux stores distribution information in that file
distname = 'SuSE'
for line in open('/var/adm/inst-log/info'):
tv = line.split()
if len(tv) == 2:
tag,value = tv
else:
continue
if tag == 'MIN_DIST_VERSION':
version = value.strip()
elif tag == 'DIST_IDENT':
values = value.split('-')
id = values[2]
return distname,version,id
if os.path.exists('/etc/.installed'):
# Caldera OpenLinux has some infos in that file (thanks to Colin Kong)
for line in open('/etc/.installed'):
pkg = line.split('-')
if len(pkg) >= 2 and pkg[0] == 'OpenLinux':
# XXX does Caldera support non Intel platforms ? If yes,
# where can we find the needed id ?
return 'OpenLinux',pkg[1],id
if os.path.isdir('/usr/lib/setup'):
# Check for slackware version tag file (thanks to Greg Andruk)
verfiles = os.listdir('/usr/lib/setup')
for n in range(len(verfiles)-1, -1, -1):
if verfiles[n][:14] != 'slack-version-':
del verfiles[n]
if verfiles:
verfiles.sort()
distname = 'slackware'
version = verfiles[-1][14:]
return distname,version,id
return distname,version,id
_release_filename = re.compile(r'(\w+)[-_](release|version)', re.ASCII)
_lsb_release_version = re.compile(r'(.+)'
' release '
'([\d.]+)'
'[^(]*(?:\((.+)\))?', re.ASCII)
_release_version = re.compile(r'([^0-9]+)'
'(?: release )?'
'([\d.]+)'
'[^(]*(?:\((.+)\))?', re.ASCII)
# See also http://www.novell.com/coolsolutions/feature/11251.html
# and http://linuxmafia.com/faq/Admin/release-files.html
# and http://data.linux-ntfs.org/rpm/whichrpm
# and http://www.die.net/doc/linux/man/man1/lsb_release.1.html
_supported_dists = (
'SuSE', 'debian', 'fedora', 'redhat', 'centos',
'mandrake', 'mandriva', 'rocks', 'slackware', 'yellowdog', 'gentoo',
'UnitedLinux', 'turbolinux', 'arch', 'mageia')
def _parse_release_file(firstline):
# Default to empty 'version' and 'id' strings. Both defaults are used
# when 'firstline' is empty. 'id' defaults to empty when an id can not
# be deduced.
version = ''
id = ''
# Parse the first line
m = _lsb_release_version.match(firstline)
if m is not None:
# LSB format: "distro release x.x (codename)"
return tuple(m.groups())
# Pre-LSB format: "distro x.x (codename)"
m = _release_version.match(firstline)
if m is not None:
return tuple(m.groups())
# Unknown format... take the first two words
l = firstline.strip().split()
if l:
version = l[0]
if len(l) > 1:
id = l[1]
return '', version, id
def linux_distribution(distname='', version='', id='',
supported_dists=_supported_dists,
full_distribution_name=1):
""" Tries to determine the name of the Linux OS distribution name.
The function first looks for a distribution release file in
/etc and then reverts to _dist_try_harder() in case no
suitable files are found.
supported_dists may be given to define the set of Linux
distributions to look for. It defaults to a list of currently
supported Linux distributions identified by their release file
name.
If full_distribution_name is true (default), the full
distribution read from the OS is returned. Otherwise the short
name taken from supported_dists is used.
Returns a tuple (distname,version,id) which default to the
args given as parameters.
"""
try:
etc = os.listdir('/etc')
except os.error:
# Probably not a Unix system
return distname,version,id
etc.sort()
for file in etc:
m = _release_filename.match(file)
if m is not None:
_distname,dummy = m.groups()
if _distname in supported_dists:
distname = _distname
break
else:
return _dist_try_harder(distname,version,id)
# Read the first line
with open('/etc/'+file, 'r') as f:
firstline = f.readline()
_distname, _version, _id = _parse_release_file(firstline)
if _distname and full_distribution_name:
distname = _distname
if _version:
version = _version
if _id:
id = _id
return distname, version, id
# To maintain backwards compatibility:
def dist(distname='',version='',id='',
supported_dists=_supported_dists):
""" Tries to determine the name of the Linux OS distribution name.
The function first looks for a distribution release file in
/etc and then reverts to _dist_try_harder() in case no
suitable files are found.
Returns a tuple (distname,version,id) which default to the
args given as parameters.
"""
return linux_distribution(distname, version, id,
supported_dists=supported_dists,
full_distribution_name=0)
def popen(cmd, mode='r', bufsize=-1):
""" Portable popen() interface.
"""
import warnings
warnings.warn('use os.popen instead', DeprecationWarning, stacklevel=2)
return os.popen(cmd, mode, bufsize)
def _norm_version(version, build=''):
""" Normalize the version and build strings and return a single
version string using the format major.minor.build (or patchlevel).
"""
l = version.split('.')
if build:
l.append(build)
try:
ints = map(int,l)
except ValueError:
strings = l
else:
strings = list(map(str,ints))
version = '.'.join(strings[:3])
return version
_ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) '
'.*'
'\[.* ([\d.]+)\])')
# Examples of VER command output:
#
# Windows 2000: Microsoft Windows 2000 [Version 5.00.2195]
# Windows XP: Microsoft Windows XP [Version 5.1.2600]
# Windows Vista: Microsoft Windows [Version 6.0.6002]
#
# Note that the "Version" string gets localized on different
# Windows versions.
def _syscmd_ver(system='', release='', version='',
supported_platforms=('win32','win16','dos','os2')):
""" Tries to figure out the OS version used and returns
a tuple (system,release,version).
It uses the "ver" shell command for this which is known
to exists on Windows, DOS and OS/2. XXX Others too ?
In case this fails, the given parameters are used as
defaults.
"""
if sys.platform not in supported_platforms:
return system,release,version
# Try some common cmd strings
for cmd in ('ver','command /c ver','cmd /c ver'):
try:
pipe = popen(cmd)
info = pipe.read()
if pipe.close():
raise os.error('command failed')
# XXX How can I suppress shell errors from being written
# to stderr ?
except os.error as why:
#print 'Command %s failed: %s' % (cmd,why)
continue
except IOError as why:
#print 'Command %s failed: %s' % (cmd,why)
continue
else:
break
else:
return system,release,version
# Parse the output
info = info.strip()
m = _ver_output.match(info)
if m is not None:
system,release,version = m.groups()
# Strip trailing dots from version and release
if release[-1] == '.':
release = release[:-1]
if version[-1] == '.':
version = version[:-1]
# Normalize the version and build strings (eliminating additional
# zeros)
version = _norm_version(version)
return system,release,version
def _win32_getvalue(key,name,default=''):
""" Read a value for name from the registry key.
In case this fails, default is returned.
"""
try:
# Use win32api if available
from win32api import RegQueryValueEx
except ImportError:
# On Python 2.0 and later, emulate using winreg
import winreg
RegQueryValueEx = winreg.QueryValueEx
try:
return RegQueryValueEx(key,name)
except:
return default
def win32_ver(release='',version='',csd='',ptype=''):
""" Get additional version information from the Windows Registry
and return a tuple (version,csd,ptype) referring to version
number, CSD level (service pack), and OS type (multi/single
processor).
As a hint: ptype returns 'Uniprocessor Free' on single
processor NT machines and 'Multiprocessor Free' on multi
processor machines. The 'Free' refers to the OS version being
free of debugging code. It could also state 'Checked' which
means the OS version uses debugging code, i.e. code that
checks arguments, ranges, etc. (Thomas Heller).
Note: this function works best with Mark Hammond's win32
package installed, but also on Python 2.3 and later. It
obviously only runs on Win32 compatible platforms.
"""
# XXX Is there any way to find out the processor type on WinXX ?
# XXX Is win32 available on Windows CE ?
#
# Adapted from code posted by Karl Putland to comp.lang.python.
#
# The mappings between reg. values and release names can be found
# here: http://msdn.microsoft.com/library/en-us/sysinfo/base/osversioninfo_str.asp
# Import the needed APIs
try:
import win32api
from win32api import RegQueryValueEx, RegOpenKeyEx, \
RegCloseKey, GetVersionEx
from win32con import HKEY_LOCAL_MACHINE, VER_PLATFORM_WIN32_NT, \
VER_PLATFORM_WIN32_WINDOWS, VER_NT_WORKSTATION
except ImportError:
# Emulate the win32api module using Python APIs
try:
sys.getwindowsversion
except AttributeError:
# No emulation possible, so return the defaults...
return release,version,csd,ptype
else:
# Emulation using winreg (added in Python 2.0) and
# sys.getwindowsversion() (added in Python 2.3)
import winreg
GetVersionEx = sys.getwindowsversion
RegQueryValueEx = winreg.QueryValueEx
RegOpenKeyEx = winreg.OpenKeyEx
RegCloseKey = winreg.CloseKey
HKEY_LOCAL_MACHINE = winreg.HKEY_LOCAL_MACHINE
VER_PLATFORM_WIN32_WINDOWS = 1
VER_PLATFORM_WIN32_NT = 2
VER_NT_WORKSTATION = 1
VER_NT_SERVER = 3
REG_SZ = 1
# Find out the registry key and some general version infos
winver = GetVersionEx()
maj,min,buildno,plat,csd = winver
version = '%i.%i.%i' % (maj,min,buildno & 0xFFFF)
if hasattr(winver, "service_pack"):
if winver.service_pack != "":
csd = 'SP%s' % winver.service_pack_major
else:
if csd[:13] == 'Service Pack ':
csd = 'SP' + csd[13:]
if plat == VER_PLATFORM_WIN32_WINDOWS:
regkey = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion'
# Try to guess the release name
if maj == 4:
if min == 0:
release = '95'
elif min == 10:
release = '98'
elif min == 90:
release = 'Me'
else:
release = 'postMe'
elif maj == 5:
release = '2000'
elif plat == VER_PLATFORM_WIN32_NT:
regkey = 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion'
if maj <= 4:
release = 'NT'
elif maj == 5:
if min == 0:
release = '2000'
elif min == 1:
release = 'XP'
elif min == 2:
release = '2003Server'
else:
release = 'post2003'
elif maj == 6:
if hasattr(winver, "product_type"):
product_type = winver.product_type
else:
product_type = VER_NT_WORKSTATION
# Without an OSVERSIONINFOEX capable sys.getwindowsversion(),
# or help from the registry, we cannot properly identify
# non-workstation versions.
try:
key = RegOpenKeyEx(HKEY_LOCAL_MACHINE, regkey)
name, type = RegQueryValueEx(key, "ProductName")
# Discard any type that isn't REG_SZ
if type == REG_SZ and name.find("Server") != -1:
product_type = VER_NT_SERVER
except WindowsError:
# Use default of VER_NT_WORKSTATION
pass
if min == 0:
if product_type == VER_NT_WORKSTATION:
release = 'Vista'
else:
release = '2008Server'
elif min == 1:
if product_type == VER_NT_WORKSTATION:
release = '7'
else:
release = '2008ServerR2'
elif min == 2:
if product_type == VER_NT_WORKSTATION:
release = '8'
else:
release = '2012Server'
else:
release = 'post2012Server'
else:
if not release:
# E.g. Win3.1 with win32s
release = '%i.%i' % (maj,min)
return release,version,csd,ptype
# Open the registry key
try:
keyCurVer = RegOpenKeyEx(HKEY_LOCAL_MACHINE, regkey)
# Get a value to make sure the key exists...
RegQueryValueEx(keyCurVer, 'SystemRoot')
except:
return release,version,csd,ptype
# Parse values
#subversion = _win32_getvalue(keyCurVer,
# 'SubVersionNumber',
# ('',1))[0]
#if subversion:
# release = release + subversion # 95a, 95b, etc.
build = _win32_getvalue(keyCurVer,
'CurrentBuildNumber',
('',1))[0]
ptype = _win32_getvalue(keyCurVer,
'CurrentType',
(ptype,1))[0]
# Normalize version
version = _norm_version(version,build)
# Close key
RegCloseKey(keyCurVer)
return release,version,csd,ptype
def _mac_ver_lookup(selectors,default=None):
from _gestalt import gestalt
l = []
append = l.append
for selector in selectors:
try:
append(gestalt(selector))
except (RuntimeError, OSError):
append(default)
return l
def _bcd2str(bcd):
return hex(bcd)[2:]
def _mac_ver_gestalt():
"""
Thanks to Mark R. Levinson for mailing documentation links and
code examples for this function. Documentation for the
gestalt() API is available online at:
http://www.rgaros.nl/gestalt/
"""
# Check whether the version info module is available
try:
import _gestalt
except ImportError:
return None
# Get the infos
sysv, sysa = _mac_ver_lookup(('sysv','sysa'))
# Decode the infos
if sysv:
major = (sysv & 0xFF00) >> 8
minor = (sysv & 0x00F0) >> 4
patch = (sysv & 0x000F)
if (major, minor) >= (10, 4):
# the 'sysv' gestald cannot return patchlevels
# higher than 9. Apple introduced 3 new
# gestalt codes in 10.4 to deal with this
# issue (needed because patch levels can
# run higher than 9, such as 10.4.11)
major,minor,patch = _mac_ver_lookup(('sys1','sys2','sys3'))
release = '%i.%i.%i' %(major, minor, patch)
else:
release = '%s.%i.%i' % (_bcd2str(major),minor,patch)
if sysa:
machine = {0x1: '68k',
0x2: 'PowerPC',
0xa: 'i386'}.get(sysa,'')
versioninfo=('', '', '')
return release,versioninfo,machine
def _mac_ver_xml():
fn = '/System/Library/CoreServices/SystemVersion.plist'
if not os.path.exists(fn):
return None
try:
import plistlib
except ImportError:
return None
pl = plistlib.readPlist(fn)
release = pl['ProductVersion']
versioninfo=('', '', '')
machine = os.uname().machine
if machine in ('ppc', 'Power Macintosh'):
# for compatibility with the gestalt based code
machine = 'PowerPC'
return release,versioninfo,machine
def mac_ver(release='',versioninfo=('','',''),machine=''):
""" Get MacOS version information and return it as tuple (release,
versioninfo, machine) with versioninfo being a tuple (version,
dev_stage, non_release_version).
Entries which cannot be determined are set to the parameter values
which default to ''. All tuple entries are strings.
"""
# First try reading the information from an XML file which should
# always be present
info = _mac_ver_xml()
if info is not None:
return info
# If that doesn't work for some reason fall back to reading the
# information using gestalt calls.
info = _mac_ver_gestalt()
if info is not None:
return info
# If that also doesn't work return the default values
return release,versioninfo,machine
def _java_getprop(name,default):
from java.lang import System
try:
value = System.getProperty(name)
if value is None:
return default
return value
except AttributeError:
return default
def java_ver(release='',vendor='',vminfo=('','',''),osinfo=('','','')):
""" Version interface for Jython.
Returns a tuple (release,vendor,vminfo,osinfo) with vminfo being
a tuple (vm_name,vm_release,vm_vendor) and osinfo being a
tuple (os_name,os_version,os_arch).
Values which cannot be determined are set to the defaults
given as parameters (which all default to '').
"""
# Import the needed APIs
try:
import java.lang
except ImportError:
return release,vendor,vminfo,osinfo
vendor = _java_getprop('java.vendor', vendor)
release = _java_getprop('java.version', release)
vm_name, vm_release, vm_vendor = vminfo
vm_name = _java_getprop('java.vm.name', vm_name)
vm_vendor = _java_getprop('java.vm.vendor', vm_vendor)
vm_release = _java_getprop('java.vm.version', vm_release)
vminfo = vm_name, vm_release, vm_vendor
os_name, os_version, os_arch = osinfo
os_arch = _java_getprop('java.os.arch', os_arch)
os_name = _java_getprop('java.os.name', os_name)
os_version = _java_getprop('java.os.version', os_version)
osinfo = os_name, os_version, os_arch
return release, vendor, vminfo, osinfo
### System name aliasing
def system_alias(system,release,version):
""" Returns (system,release,version) aliased to common
marketing names used for some systems.
It also does some reordering of the information in some cases
where it would otherwise cause confusion.
"""
if system == 'Rhapsody':
# Apple's BSD derivative
# XXX How can we determine the marketing release number ?
return 'MacOS X Server',system+release,version
elif system == 'SunOS':
# Sun's OS
if release < '5':
# These releases use the old name SunOS
return system,release,version
# Modify release (marketing release = SunOS release - 3)
l = release.split('.')
if l:
try:
major = int(l[0])
except ValueError:
pass
else:
major = major - 3
l[0] = str(major)
release = '.'.join(l)
if release < '6':
system = 'Solaris'
else:
# XXX Whatever the new SunOS marketing name is...
system = 'Solaris'
elif system == 'IRIX64':
# IRIX reports IRIX64 on platforms with 64-bit support; yet it
# is really a version and not a different platform, since 32-bit
# apps are also supported..
system = 'IRIX'
if version:
version = version + ' (64bit)'
else:
version = '64bit'
elif system in ('win32','win16'):
# In case one of the other tricks
system = 'Windows'
return system,release,version
### Various internal helpers
def _platform(*args):
""" Helper to format the platform string in a filename
compatible format e.g. "system-version-machine".
"""
# Format the platform string
platform = '-'.join(x.strip() for x in filter(len, args))
# Cleanup some possible filename obstacles...
platform = platform.replace(' ','_')
platform = platform.replace('/','-')
platform = platform.replace('\\','-')
platform = platform.replace(':','-')
platform = platform.replace(';','-')
platform = platform.replace('"','-')
platform = platform.replace('(','-')
platform = platform.replace(')','-')
# No need to report 'unknown' information...
platform = platform.replace('unknown','')
# Fold '--'s and remove trailing '-'
while 1:
cleaned = platform.replace('--','-')
if cleaned == platform:
break
platform = cleaned
while platform[-1] == '-':
platform = platform[:-1]
return platform
def _node(default=''):
""" Helper to determine the node name of this machine.
"""
try:
import socket
except ImportError:
# No sockets...
return default
try:
return socket.gethostname()
except socket.error:
# Still not working...
return default
def _follow_symlinks(filepath):
""" In case filepath is a symlink, follow it until a
real file is reached.
"""
filepath = os.path.abspath(filepath)
while os.path.islink(filepath):
filepath = os.path.normpath(
os.path.join(os.path.dirname(filepath),os.readlink(filepath)))
return filepath
def _syscmd_uname(option,default=''):
""" Interface to the system's uname command.
"""
if sys.platform in ('dos','win32','win16','os2'):
# XXX Others too ?
return default
try:
f = os.popen('uname %s 2> %s' % (option, DEV_NULL))
except (AttributeError,os.error):
return default
output = f.read().strip()
rc = f.close()
if not output or rc:
return default
else:
return output
def _syscmd_file(target,default=''):
""" Interface to the system's file command.
The function uses the -b option of the file command to have it
omit the filename in its output. Follow the symlinks. It returns
default in case the command should fail.
"""
if sys.platform in ('dos','win32','win16','os2'):
# XXX Others too ?
return default
target = _follow_symlinks(target)
try:
proc = subprocess.Popen(['file', target],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except (AttributeError,os.error):
return default
output = proc.communicate()[0].decode('latin-1')
rc = proc.wait()
if not output or rc:
return default
else:
return output
### Information about the used architecture
# Default values for architecture; non-empty strings override the
# defaults given as parameters
_default_architecture = {
'win32': ('','WindowsPE'),
'win16': ('','Windows'),
'dos': ('','MSDOS'),
}
def architecture(executable=sys.executable,bits='',linkage=''):
""" Queries the given executable (defaults to the Python interpreter
binary) for various architecture information.
Returns a tuple (bits,linkage) which contains information about
the bit architecture and the linkage format used for the
executable. Both values are returned as strings.
Values that cannot be determined are returned as given by the
parameter presets. If bits is given as '', the sizeof(pointer)
(or sizeof(long) on Python version < 1.5.2) is used as
indicator for the supported pointer size.
The function relies on the system's "file" command to do the
actual work. This is available on most if not all Unix
platforms. On some non-Unix platforms where the "file" command
does not exist and the executable is set to the Python interpreter
binary defaults from _default_architecture are used.
"""
# Use the sizeof(pointer) as default number of bits if nothing
# else is given as default.
if not bits:
import struct
try:
size = struct.calcsize('P')
except struct.error:
# Older installations can only query longs
size = struct.calcsize('l')
bits = str(size*8) + 'bit'
# Get data from the 'file' system command
if executable:
fileout = _syscmd_file(executable, '')
else:
fileout = ''
if not fileout and \
executable == sys.executable:
# "file" command did not return anything; we'll try to provide
# some sensible defaults then...
if sys.platform in _default_architecture:
b,l = _default_architecture[sys.platform]
if b:
bits = b
if l:
linkage = l
return bits,linkage
if 'executable' not in fileout:
# Format not supported
return bits,linkage
# Bits
if '32-bit' in fileout:
bits = '32bit'
elif 'N32' in fileout:
# On Irix only
bits = 'n32bit'
elif '64-bit' in fileout:
bits = '64bit'
# Linkage
if 'ELF' in fileout:
linkage = 'ELF'
elif 'PE' in fileout:
# E.g. Windows uses this format
if 'Windows' in fileout:
linkage = 'WindowsPE'
else:
linkage = 'PE'
elif 'COFF' in fileout:
linkage = 'COFF'
elif 'MS-DOS' in fileout:
linkage = 'MSDOS'
else:
# XXX the A.OUT format also falls under this class...
pass
return bits,linkage
### Portable uname() interface
uname_result = collections.namedtuple("uname_result",
"system node release version machine processor")
_uname_cache = None
def uname():
""" Fairly portable uname interface. Returns a tuple
of strings (system,node,release,version,machine,processor)
identifying the underlying platform.
Note that unlike the os.uname function this also returns
possible processor information as an additional tuple entry.
Entries which cannot be determined are set to ''.
"""
global _uname_cache
no_os_uname = 0
if _uname_cache is not None:
return _uname_cache
processor = ''
# Get some infos from the builtin os.uname API...
try:
system,node,release,version,machine = os.uname()
except AttributeError:
no_os_uname = 1
if no_os_uname or not list(filter(None, (system, node, release, version, machine))):
# Hmm, no there is either no uname or uname has returned
#'unknowns'... we'll have to poke around the system then.
if no_os_uname:
system = sys.platform
release = ''
version = ''
node = _node()
machine = ''
use_syscmd_ver = 1
# Try win32_ver() on win32 platforms
if system == 'win32':
release,version,csd,ptype = win32_ver()
if release and version:
use_syscmd_ver = 0
# Try to use the PROCESSOR_* environment variables
# available on Win XP and later; see
# http://support.microsoft.com/kb/888731 and
# http://www.geocities.com/rick_lively/MANUALS/ENV/MSWIN/PROCESSI.HTM
if not machine:
# WOW64 processes mask the native architecture
if "PROCESSOR_ARCHITEW6432" in os.environ:
machine = os.environ.get("PROCESSOR_ARCHITEW6432", '')
else:
machine = os.environ.get('PROCESSOR_ARCHITECTURE', '')
if not processor:
processor = os.environ.get('PROCESSOR_IDENTIFIER', machine)
# Try the 'ver' system command available on some
# platforms
if use_syscmd_ver:
system,release,version = _syscmd_ver(system)
# Normalize system to what win32_ver() normally returns
# (_syscmd_ver() tends to return the vendor name as well)
if system == 'Microsoft Windows':
system = 'Windows'
elif system == 'Microsoft' and release == 'Windows':
# Under Windows Vista and Windows Server 2008,
# Microsoft changed the output of the ver command. The
# release is no longer printed. This causes the
# system and release to be misidentified.
system = 'Windows'
if '6.0' == version[:3]:
release = 'Vista'
else:
release = ''
# In case we still don't know anything useful, we'll try to
# help ourselves
if system in ('win32','win16'):
if not version:
if system == 'win32':
version = '32bit'
else:
version = '16bit'
system = 'Windows'
elif system[:4] == 'java':
release,vendor,vminfo,osinfo = java_ver()
system = 'Java'
version = ', '.join(vminfo)
if not version:
version = vendor
# System specific extensions
if system == 'OpenVMS':
# OpenVMS seems to have release and version mixed up
if not release or release == '0':
release = version
version = ''
# Get processor information
try:
import vms_lib
except ImportError:
pass
else:
csid, cpu_number = vms_lib.getsyi('SYI$_CPU',0)
if (cpu_number >= 128):
processor = 'Alpha'
else:
processor = 'VAX'
if not processor:
# Get processor information from the uname system command
processor = _syscmd_uname('-p','')
#If any unknowns still exist, replace them with ''s, which are more portable
if system == 'unknown':
system = ''
if node == 'unknown':
node = ''
if release == 'unknown':
release = ''
if version == 'unknown':
version = ''
if machine == 'unknown':
machine = ''
if processor == 'unknown':
processor = ''
# normalize name
if system == 'Microsoft' and release == 'Windows':
system = 'Windows'
release = 'Vista'
_uname_cache = uname_result(system,node,release,version,machine,processor)
return _uname_cache
### Direct interfaces to some of the uname() return values
def system():
""" Returns the system/OS name, e.g. 'Linux', 'Windows' or 'Java'.
An empty string is returned if the value cannot be determined.
"""
return uname().system
def node():
""" Returns the computer's network name (which may not be fully
qualified)
An empty string is returned if the value cannot be determined.
"""
return uname().node
def release():
""" Returns the system's release, e.g. '2.2.0' or 'NT'
An empty string is returned if the value cannot be determined.
"""
return uname().release
def version():
""" Returns the system's release version, e.g. '#3 on degas'
An empty string is returned if the value cannot be determined.
"""
return uname().version
def machine():
""" Returns the machine type, e.g. 'i386'
An empty string is returned if the value cannot be determined.
"""
return uname().machine
def processor():
""" Returns the (true) processor name, e.g. 'amdk6'
An empty string is returned if the value cannot be
determined. Note that many platforms do not provide this
information or simply return the same value as for machine(),
e.g. NetBSD does this.
"""
return uname().processor
### Various APIs for extracting information from sys.version
_sys_version_parser = re.compile(
r'([\w.+]+)\s*'
'\(#?([^,]+),\s*([\w ]+),\s*([\w :]+)\)\s*'
'\[([^\]]+)\]?', re.ASCII)
_ironpython_sys_version_parser = re.compile(
r'IronPython\s*'
'([\d\.]+)'
'(?: \(([\d\.]+)\))?'
' on (.NET [\d\.]+)', re.ASCII)
# IronPython covering 2.6 and 2.7
_ironpython26_sys_version_parser = re.compile(
r'([\d.]+)\s*'
'\(IronPython\s*'
'[\d.]+\s*'
'\(([\d.]+)\) on ([\w.]+ [\d.]+(?: \(\d+-bit\))?)\)'
)
_pypy_sys_version_parser = re.compile(
r'([\w.+]+)\s*'
'\(#?([^,]+),\s*([\w ]+),\s*([\w :]+)\)\s*'
'\[PyPy [^\]]+\]?')
_sys_version_cache = {}
def _sys_version(sys_version=None):
""" Returns a parsed version of Python's sys.version as tuple
(name, version, branch, revision, buildno, builddate, compiler)
referring to the Python implementation name, version, branch,
revision, build number, build date/time as string and the compiler
identification string.
Note that unlike the Python sys.version, the returned value
for the Python version will always include the patchlevel (it
defaults to '.0').
The function returns empty strings for tuple entries that
cannot be determined.
sys_version may be given to parse an alternative version
string, e.g. if the version was read from a different Python
interpreter.
"""
# Get the Python version
if sys_version is None:
sys_version = sys.version
# Try the cache first
result = _sys_version_cache.get(sys_version, None)
if result is not None:
return result
# Parse it
if 'Brython' in sys_version:
# IronPython
name = 'Brython'
_parser=re.compile("^(\d+\.\d+\.\d+)[^[]+\[(.*)\]")
match=_parser.match(sys_version)
if match is None:
raise ValueError(
'failed to parse Brython sys.version: %s' %
repr(sys_version))
#version, alt_version, compiler = match.groups()
version, compiler = match.groups()
alt_version = ''
buildno = ''
builddate = ''
elif 'IronPython' in sys_version:
# IronPython
name = 'IronPython'
if sys_version.startswith('IronPython'):
match = _ironpython_sys_version_parser.match(sys_version)
else:
match = _ironpython26_sys_version_parser.match(sys_version)
if match is None:
raise ValueError(
'failed to parse IronPython sys.version: %s' %
repr(sys_version))
version, alt_version, compiler = match.groups()
buildno = ''
builddate = ''
elif sys.platform.startswith('java'):
# Jython
name = 'Jython'
match = _sys_version_parser.match(sys_version)
if match is None:
raise ValueError(
'failed to parse Jython sys.version: %s' %
repr(sys_version))
version, buildno, builddate, buildtime, _ = match.groups()
compiler = sys.platform
elif "PyPy" in sys_version:
# PyPy
name = "PyPy"
match = _pypy_sys_version_parser.match(sys_version)
if match is None:
raise ValueError("failed to parse PyPy sys.version: %s" %
repr(sys_version))
version, buildno, builddate, buildtime = match.groups()
compiler = ""
else:
# CPython
match = _sys_version_parser.match(sys_version)
if match is None:
raise ValueError(
'failed to parse CPython sys.version: %s' %
repr(sys_version))
version, buildno, builddate, buildtime, compiler = \
match.groups()
name = 'CPython'
builddate = builddate + ' ' + buildtime
if hasattr(sys, '_mercurial'):
_, branch, revision = sys._mercurial
elif hasattr(sys, 'subversion'):
# sys.subversion was added in Python 2.5
_, branch, revision = sys.subversion
else:
branch = ''
revision = ''
# Add the patchlevel version if missing
l = version.split('.')
if len(l) == 2:
l.append('0')
version = '.'.join(l)
# Build and cache the result
result = (name, version, branch, revision, buildno, builddate, compiler)
_sys_version_cache[sys_version] = result
return result
def python_implementation():
""" Returns a string identifying the Python implementation.
Currently, the following implementations are identified:
'CPython' (C implementation of Python),
'IronPython' (.NET implementation of Python),
'Jython' (Java implementation of Python),
'PyPy' (Python implementation of Python).
"""
return _sys_version()[0]
def python_version():
""" Returns the Python version as string 'major.minor.patchlevel'
Note that unlike the Python sys.version, the returned value
will always include the patchlevel (it defaults to 0).
"""
return _sys_version()[1]
def python_version_tuple():
""" Returns the Python version as tuple (major, minor, patchlevel)
of strings.
Note that unlike the Python sys.version, the returned value
will always include the patchlevel (it defaults to 0).
"""
return tuple(_sys_version()[1].split('.'))
def python_branch():
""" Returns a string identifying the Python implementation
branch.
For CPython this is the Subversion branch from which the
Python binary was built.
If not available, an empty string is returned.
"""
return _sys_version()[2]
def python_revision():
""" Returns a string identifying the Python implementation
revision.
For CPython this is the Subversion revision from which the
Python binary was built.
If not available, an empty string is returned.
"""
return _sys_version()[3]
def python_build():
""" Returns a tuple (buildno, builddate) stating the Python
build number and date as strings.
"""
return _sys_version()[4:6]
def python_compiler():
""" Returns a string identifying the compiler used for compiling
Python.
"""
return _sys_version()[6]
### The Opus Magnum of platform strings :-)
_platform_cache = {}
def platform(aliased=0, terse=0):
""" Returns a single string identifying the underlying platform
with as much useful information as possible (but no more :).
The output is intended to be human readable rather than
machine parseable. It may look different on different
platforms and this is intended.
If "aliased" is true, the function will use aliases for
various platforms that report system names which differ from
their common names, e.g. SunOS will be reported as
Solaris. The system_alias() function is used to implement
this.
Setting terse to true causes the function to return only the
absolute minimum information needed to identify the platform.
"""
result = _platform_cache.get((aliased, terse), None)
if result is not None:
return result
# Get uname information and then apply platform specific cosmetics
# to it...
system,node,release,version,machine,processor = uname()
if machine == processor:
processor = ''
if aliased:
system,release,version = system_alias(system,release,version)
if system == 'Windows':
# MS platforms
rel,vers,csd,ptype = win32_ver(version)
if terse:
platform = _platform(system,release)
else:
platform = _platform(system,release,version,csd)
elif system in ('Linux',):
# Linux based systems
distname,distversion,distid = dist('')
if distname and not terse:
platform = _platform(system,release,machine,processor,
'with',
distname,distversion,distid)
else:
# If the distribution name is unknown check for libc vs. glibc
libcname,libcversion = libc_ver(sys.executable)
platform = _platform(system,release,machine,processor,
'with',
libcname+libcversion)
elif system == 'Java':
# Java platforms
r,v,vminfo,(os_name,os_version,os_arch) = java_ver()
if terse or not os_name:
platform = _platform(system,release,version)
else:
platform = _platform(system,release,version,
'on',
os_name,os_version,os_arch)
elif system == 'MacOS':
# MacOS platforms
if terse:
platform = _platform(system,release)
else:
platform = _platform(system,release,machine)
else:
# Generic handler
if terse:
platform = _platform(system,release)
else:
bits,linkage = architecture(sys.executable)
platform = _platform(system,release,machine,processor,bits,linkage)
_platform_cache[(aliased, terse)] = platform
return platform
### Command line interface
if __name__ == '__main__':
# Default is to print the aliased verbose platform string
terse = ('terse' in sys.argv or '--terse' in sys.argv)
aliased = (not 'nonaliased' in sys.argv and not '--nonaliased' in sys.argv)
print(platform(aliased,terse))
sys.exit(0)
| gpl-3.0 |
jbonofre/incubator-beam | sdks/python/apache_beam/runners/direct/watermark_manager.py | 2 | 10737 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Manages watermarks of PCollections and AppliedPTransforms."""
from __future__ import absolute_import
import threading
from apache_beam import pipeline
from apache_beam import pvalue
from apache_beam.runners.direct.util import TimerFiring
from apache_beam.utils.timestamp import MAX_TIMESTAMP
from apache_beam.utils.timestamp import MIN_TIMESTAMP
from apache_beam.utils.timestamp import TIME_GRANULARITY
class WatermarkManager(object):
"""For internal use only; no backwards-compatibility guarantees.
Tracks and updates watermarks for all AppliedPTransforms."""
WATERMARK_POS_INF = MAX_TIMESTAMP
WATERMARK_NEG_INF = MIN_TIMESTAMP
def __init__(self, clock, root_transforms, value_to_consumers,
transform_keyed_states):
self._clock = clock
self._root_transforms = root_transforms
self._value_to_consumers = value_to_consumers
self._transform_keyed_states = transform_keyed_states
# AppliedPTransform -> TransformWatermarks
self._transform_to_watermarks = {}
for root_transform in root_transforms:
self._transform_to_watermarks[root_transform] = _TransformWatermarks(
self._clock, transform_keyed_states[root_transform], root_transform)
for consumers in value_to_consumers.values():
for consumer in consumers:
self._transform_to_watermarks[consumer] = _TransformWatermarks(
self._clock, transform_keyed_states[consumer], consumer)
for consumers in value_to_consumers.values():
for consumer in consumers:
self._update_input_transform_watermarks(consumer)
def _update_input_transform_watermarks(self, applied_ptransform):
assert isinstance(applied_ptransform, pipeline.AppliedPTransform)
input_transform_watermarks = []
for input_pvalue in applied_ptransform.inputs:
assert input_pvalue.producer or isinstance(input_pvalue, pvalue.PBegin)
if input_pvalue.producer:
input_transform_watermarks.append(
self.get_watermarks(input_pvalue.producer))
self._transform_to_watermarks[
applied_ptransform].update_input_transform_watermarks(
input_transform_watermarks)
def get_watermarks(self, applied_ptransform):
"""Gets the input and output watermarks for an AppliedPTransform.
If the applied_ptransform has not processed any elements, return a
watermark with minimum value.
Args:
applied_ptransform: AppliedPTransform to get the watermarks for.
Returns:
A snapshot (TransformWatermarks) of the input watermark and output
watermark for the provided transform.
"""
# TODO(altay): Composite transforms should have a composite watermark. Until
# then they are represented by their last transform.
while applied_ptransform.parts:
applied_ptransform = applied_ptransform.parts[-1]
return self._transform_to_watermarks[applied_ptransform]
def update_watermarks(self, completed_committed_bundle, applied_ptransform,
completed_timers, outputs, unprocessed_bundles,
keyed_earliest_holds):
assert isinstance(applied_ptransform, pipeline.AppliedPTransform)
self._update_pending(
completed_committed_bundle, applied_ptransform, completed_timers,
outputs, unprocessed_bundles)
tw = self.get_watermarks(applied_ptransform)
tw.hold(keyed_earliest_holds)
self._refresh_watermarks(applied_ptransform)
def _update_pending(self, input_committed_bundle, applied_ptransform,
completed_timers, output_committed_bundles,
unprocessed_bundles):
"""Updated list of pending bundles for the given AppliedPTransform."""
# Update pending elements. Filter out empty bundles. They do not impact
# watermarks and should not trigger downstream execution.
for output in output_committed_bundles:
if output.has_elements():
if output.pcollection in self._value_to_consumers:
consumers = self._value_to_consumers[output.pcollection]
for consumer in consumers:
consumer_tw = self._transform_to_watermarks[consumer]
consumer_tw.add_pending(output)
completed_tw = self._transform_to_watermarks[applied_ptransform]
completed_tw.update_timers(completed_timers)
for unprocessed_bundle in unprocessed_bundles:
completed_tw.add_pending(unprocessed_bundle)
assert input_committed_bundle or applied_ptransform in self._root_transforms
if input_committed_bundle and input_committed_bundle.has_elements():
completed_tw.remove_pending(input_committed_bundle)
def _refresh_watermarks(self, applied_ptransform):
assert isinstance(applied_ptransform, pipeline.AppliedPTransform)
tw = self.get_watermarks(applied_ptransform)
if tw.refresh():
for pval in applied_ptransform.outputs.values():
if isinstance(pval, pvalue.DoOutputsTuple):
pvals = (v for v in pval)
else:
pvals = (pval,)
for v in pvals:
if v in self._value_to_consumers: # If there are downstream consumers
consumers = self._value_to_consumers[v]
for consumer in consumers:
self._refresh_watermarks(consumer)
def extract_all_timers(self):
"""Extracts fired timers for all transforms
and reports if there are any timers set."""
all_timers = []
has_realtime_timer = False
for applied_ptransform, tw in self._transform_to_watermarks.iteritems():
fired_timers, had_realtime_timer = tw.extract_transform_timers()
if fired_timers:
all_timers.append((applied_ptransform, fired_timers))
if had_realtime_timer:
has_realtime_timer = True
return all_timers, has_realtime_timer
class _TransformWatermarks(object):
"""Tracks input and output watermarks for an AppliedPTransform."""
def __init__(self, clock, keyed_states, transform):
self._clock = clock
self._keyed_states = keyed_states
self._input_transform_watermarks = []
self._input_watermark = WatermarkManager.WATERMARK_NEG_INF
self._output_watermark = WatermarkManager.WATERMARK_NEG_INF
self._keyed_earliest_holds = {}
self._pending = set() # Scheduled bundles targeted for this transform.
self._fired_timers = set()
self._lock = threading.Lock()
self._label = str(transform)
def update_input_transform_watermarks(self, input_transform_watermarks):
with self._lock:
self._input_transform_watermarks = input_transform_watermarks
def update_timers(self, completed_timers):
with self._lock:
for timer_firing in completed_timers:
self._fired_timers.remove(timer_firing)
@property
def input_watermark(self):
with self._lock:
return self._input_watermark
@property
def output_watermark(self):
with self._lock:
return self._output_watermark
def hold(self, keyed_earliest_holds):
with self._lock:
for key, hold_value in keyed_earliest_holds.iteritems():
self._keyed_earliest_holds[key] = hold_value
if (hold_value is None or
hold_value == WatermarkManager.WATERMARK_POS_INF):
del self._keyed_earliest_holds[key]
def add_pending(self, pending):
with self._lock:
self._pending.add(pending)
def remove_pending(self, completed):
with self._lock:
# Ignore repeated removes. This will happen if a transform has a repeated
# input.
if completed in self._pending:
self._pending.remove(completed)
def refresh(self):
with self._lock:
min_pending_timestamp = WatermarkManager.WATERMARK_POS_INF
has_pending_elements = False
for input_bundle in self._pending:
# TODO(ccy): we can have the Bundle class keep track of the minimum
# timestamp so we don't have to do an iteration here.
for wv in input_bundle.get_elements_iterable():
has_pending_elements = True
if wv.timestamp < min_pending_timestamp:
min_pending_timestamp = wv.timestamp
# If there is a pending element with a certain timestamp, we can at most
# advance our watermark to the maximum timestamp less than that
# timestamp.
pending_holder = WatermarkManager.WATERMARK_POS_INF
if has_pending_elements:
pending_holder = min_pending_timestamp - TIME_GRANULARITY
input_watermarks = [
tw.output_watermark for tw in self._input_transform_watermarks]
input_watermarks.append(WatermarkManager.WATERMARK_POS_INF)
producer_watermark = min(input_watermarks)
self._input_watermark = max(self._input_watermark,
min(pending_holder, producer_watermark))
earliest_hold = WatermarkManager.WATERMARK_POS_INF
for hold in self._keyed_earliest_holds.values():
if hold < earliest_hold:
earliest_hold = hold
new_output_watermark = min(self._input_watermark, earliest_hold)
advanced = new_output_watermark > self._output_watermark
self._output_watermark = new_output_watermark
return advanced
@property
def synchronized_processing_output_time(self):
return self._clock.time()
def extract_transform_timers(self):
"""Extracts fired timers and reports of any timers set per transform."""
with self._lock:
fired_timers = []
has_realtime_timer = False
for encoded_key, state in self._keyed_states.iteritems():
timers, had_realtime_timer = state.get_timers(
watermark=self._input_watermark,
processing_time=self._clock.time())
if had_realtime_timer:
has_realtime_timer = True
for expired in timers:
window, (name, time_domain, timestamp) = expired
fired_timers.append(
TimerFiring(encoded_key, window, name, time_domain, timestamp))
self._fired_timers.update(fired_timers)
return fired_timers, has_realtime_timer
| apache-2.0 |
baditaflorin/osm-tasking-manager2 | osmtm/tests/__init__.py | 3 | 3772 | # coding: utf8
import ConfigParser
import unittest
import transaction
from sqlalchemy import create_engine
from osmtm.models import (
Base,
User,
License,
Area,
Project,
DBSession,
)
from sqlalchemy_i18n.manager import translation_manager
local_settings_path = 'local.test.ini'
# raise an error if the file doesn't exist
with open(local_settings_path):
pass
USER1_ID = 1
USER2_ID = 2
ADMIN_USER_ID = 3
PROJECT_MANAGER_USER_ID = 4
translation_manager.options.update({
'locales': ['en', 'fr'],
'get_locale_fallback': True
})
def populate_db():
import geoalchemy2
import shapely
config = ConfigParser.ConfigParser()
config.read(local_settings_path)
db_url = config.get('app:main', 'sqlalchemy.url')
engine = create_engine(db_url)
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
DBSession.configure(bind=engine)
# those users are immutables ie. they're not suppose to change during tests
user = User(USER1_ID, u'user1')
DBSession.add(user)
user = User(USER2_ID, u'user2')
DBSession.add(user)
user = User(ADMIN_USER_ID, u'admin_user')
user.role = User.role_admin
DBSession.add(user)
user = User(PROJECT_MANAGER_USER_ID, u'project_manager_user')
user.role = User.role_project_manager
DBSession.add(user)
license = License()
license.name = u'LicenseBar'
license.description = u'the_description_for_license_bar'
license.plain_text = u'the_plain_text_for_license_bar'
DBSession.add(license)
shape = shapely.geometry.Polygon(
[(7.23, 41.25), (7.23, 41.12), (7.41, 41.20)])
geometry = geoalchemy2.shape.from_shape(shape, 4326)
area = Area(geometry)
project = Project(u'test project')
project.area = area
project.auto_fill(12)
project.status = Project.status_published
DBSession.add(project)
transaction.commit()
DBSession.remove()
populate_db()
class BaseTestCase(unittest.TestCase):
user1_id = USER1_ID
user2_id = USER2_ID
admin_user_id = ADMIN_USER_ID
project_manager_user_id = PROJECT_MANAGER_USER_ID
def setUp(self):
from osmtm import main
from webtest import TestApp
settings = {
'available_languages': 'en fr',
'available_languages_full': 'English, Français',
'local_settings_path': local_settings_path
}
self.app = main({}, **settings)
self.testapp = TestApp(self.app)
def tearDown(self):
del self.testapp
from osmtm.models import DBSession
DBSession.bind.dispose() # dispose engine
DBSession.remove()
# forget any remembered authentication
self.__forget()
def login_as_admin(self):
return self.__remember(self.admin_user_id)
def login_as_project_manager(self):
return self.__remember(self.project_manager_user_id)
def login_as_user1(self):
return self.__remember(self.user1_id)
def login_as_user2(self):
return self.__remember(self.user2_id)
def login_as_user(self, user_id):
return self.__remember(user_id)
def __remember(self, userid):
from pyramid.security import remember
from pyramid import testing
request = testing.DummyRequest(environ={'SERVER_NAME': 'servername'})
request.registry = self.app.registry
headers = remember(request, userid, max_age=2 * 7 * 24 * 60 * 60)
return {'Cookie': headers[0][1].split(';')[0]}
def __forget(self):
from pyramid.security import forget
from pyramid import testing
request = testing.DummyRequest(environ={'SERVER_NAME': 'servername'})
request.registry = self.app.registry
forget(request)
| bsd-2-clause |
myarjunar/inasafe | safe/common/exceptions.py | 3 | 9919 | # coding=utf-8
"""
InaSAFE Disaster risk assessment tool developed by AusAid -
**Exception Classes.**
Custom exception classes for the SAFE library
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
from safe.utilities.unicode import get_unicode, get_string
from safe.messaging.item.message_element import MessageElement
__author__ = 'tim@kartoza.com'
__revision__ = '$Format:%H$'
__date__ = '17/06/2011'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
class InaSAFEError(RuntimeError):
"""Base class for all user defined exceptions"""
suggestion = 'An unspecified error occurred.'
def __init__(self, message=None):
""""General constructor.
:param message: The optional error message.
:type message: str, unicode, MessageElement
"""""
if isinstance(message, unicode):
super(InaSAFEError, self).__init__(get_string(message))
self.message = message
elif isinstance(message, str):
super(InaSAFEError, self).__init__(message)
self.message = get_unicode(message)
elif isinstance(message, MessageElement):
super(InaSAFEError, self).__init__(message.to_text())
self.message = get_unicode(message.to_text())
elif message is None:
pass
elif isinstance(message, BaseException):
super(InaSAFEError, self).__init__(unicode(message))
self.message = unicode(message)
# This shouldn't happen...
else:
raise TypeError
def __unicode__(self):
"""Get the error message as unicode."""
return self.message
class ReadLayerError(InaSAFEError):
"""When a layer can't be read"""
suggestion = (
'Check that the file exists and you have permissions to read it')
class WriteLayerError(InaSAFEError):
"""When a layer can't be written"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class BoundingBoxError(InaSAFEError):
"""For errors relating to bboxes"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class VerificationError(InaSAFEError):
"""Exception thrown by verify()"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class PolygonInputError(InaSAFEError):
"""For invalid inputs to numeric polygon functions"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class PointsInputError(InaSAFEError):
"""For invalid inputs to numeric point functions"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class BoundsError(InaSAFEError):
"""For points falling outside interpolation grid"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class GetDataError(InaSAFEError):
"""When layer data cannot be obtained"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class WindowsError(InaSAFEError):
"""For windows specific errors."""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class GridXmlFileNotFoundError(InaSAFEError):
"""An exception for when an grid.xml could not be found"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class GridXmlParseError(InaSAFEError):
"""An exception for when something went wrong parsing the grid.xml """
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class ContourCreationError(InaSAFEError):
"""An exception for when creating contours from shakemaps goes wrong"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class InvalidLayerError(InaSAFEError):
"""Raised when a gis layer is invalid"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class ZeroImpactException(InaSAFEError):
"""Raised if an impact function return zero impact"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class WrongDataTypeException(InaSAFEError):
"""Raised if expected and received data types are different"""
suggestion = 'Please ask the developers of InaSAFE to add a suggestion.'
class InvalidClipGeometryError(InaSAFEError):
"""Custom exception for when clip geometry is invalid."""
pass
class FileNotFoundError(InaSAFEError):
"""Custom exception for when a file could not be found."""
pass
class FunctionParametersError(InaSAFEError):
"""Custom exception for when function parameters are not valid."""
pass
class KeywordDbError(InaSAFEError):
"""Custom exception for when an error is encountered with keyword cache db.
"""
pass
class WrongEarthquakeFunction(InaSAFEError):
"""Earthquake function to recognised."""
pass
class KeywordNotFoundError(InaSAFEError):
"""Custom exception for when a keyword's key (e.g. unit) cannot be found.
"""
def __init__(self, message, **kwargs):
# Call the base class constructor with the parameters it needs
super(KeywordNotFoundError, self).__init__(message)
self.layer_name = kwargs.get('layer_name', 'Missing Layer Name')
self.keyword = kwargs.get('keyword', 'Missing Keyword')
class HashNotFoundError(InaSAFEError):
"""Custom exception for when a no keyword hash can be found."""
pass
class InvalidParameterError(InaSAFEError):
"""Custom exception for when an invalid parameter is passed to a function.
"""
pass
class NoKeywordsFoundError(InaSAFEError):
"""Custom exception for when no keywords file exist for a layer."""
pass
class TranslationLoadError(InaSAFEError):
"""Custom exception handler for whe translation file fails to load."""
pass
class ComputationError(InaSAFEError):
"""Custom exception when a calculation went wrong."""
pass
class NoFeaturesInExtentError(InaSAFEError):
"""An exception that gets thrown when no features are within the extent
being clipped.
"""
pass
class InvalidProjectionError(InaSAFEError):
"""An exception raised if a layer needs to be reprojected."""
pass
class InsufficientOverlapError(InaSAFEError):
"""An exception raised if an error occurs during extent calculation
because the bounding boxes do not overlap.
"""
pass
class StyleError(InaSAFEError):
"""An exception relating to reading / generating GIS styles."""
pass
class MemoryLayerCreationError(InaSAFEError):
"""Raised if an error occurs creating the cities file."""
pass
class CallGDALError(InaSAFEError):
"""Raised if failed to call gdal command. Indicate by error message that is
not empty.
"""
pass
class FileMissingError(InaSAFEError):
"""Raised if a file cannot be found."""
pass
class CanceledImportDialogError(InaSAFEError):
"""Raised if import process canceled"""
pass
class InvalidGeometryError(InaSAFEError):
"""Custom exception for when a feature geometry is invalid or none."""
pass
class UnsupportedProviderError(InaSAFEError):
"""For unsupported provider (e.g. openlayers plugin) encountered."""
pass
class TemplateLoadingError(InaSAFEError):
"""Raised when loading the template is error."""
pass
class DownloadError(InaSAFEError):
"""Raised when downloading file is error."""
pass
class NoValidLayerError(InaSAFEError):
"""Raised when there no valid layer in inasafe."""
pass
class InsufficientMemoryWarning(InaSAFEError):
"""Raised when there is a possible insufficient memory."""
pass
class InvalidKeywordsForProcessingAlgorithm(InaSAFEError):
"""Raised if the algorithm don't have proper keywords to run."""
pass
class InvalidExtentError(InaSAFEError):
"""Raised if an extent is not valid."""
pass
class NoAttributeInLayerError(InaSAFEError):
"""Raised if the attribute not exists in the vector layer"""
pass
class MetadataLayerConstraintError(InaSAFEError):
"""Raised if the metadata does not match with the IF base class.
It means the layer constraint specified in the metadata is not supported
by the base class
"""
class MetadataReadError(InaSAFEError):
"""When a metadata xml is not correctly formatted can't be read"""
suggestion = (
'Check that the file is correct')
class MetadataInvalidPathError(InaSAFEError):
"""When a path for a metadata xml is not correct"""
suggestion = 'Check that the XML path of the property is correct'
class MetadataCastError(InaSAFEError):
"""When a path for a metadata xml is not correct"""
suggestion = 'Check that the XML value is of the correct type'
class InvalidProvenanceDataError(InaSAFEError):
"""When a path for a metadata xml is not correct."""
suggestion = 'Check that the IF produced all the required data'
class MissingMetadata(InaSAFEError):
"""When old version of metadata is not properly read."""
pass
class MissingImpactReport(InaSAFEError):
"""When Impact Report do not have proper input."""
pass
class ErrorDataStore(InaSAFEError):
"""When the datastore has an error."""
pass
class InvalidWizardStep(InaSAFEError):
"""When there is an invalid wizard step."""
pass
class ProcessingInstallationError(InaSAFEError):
"""When there is an error with Processing."""
pass
class AlignRastersError(Exception):
"""Raised if alignment of hazard and exposure rasters failed."""
pass
| gpl-3.0 |
wolfgangz2013/rt-thread | bsp/stm32f40x/rtconfig.py | 6 | 3672 | import os
# toolchains options
ARCH='arm'
CPU='cortex-m4'
CROSS_TOOL='keil'
if os.getenv('RTT_CC'):
CROSS_TOOL = os.getenv('RTT_CC')
# cross_tool provides the cross compiler
# EXEC_PATH is the compiler execute path, for example, CodeSourcery, Keil MDK, IAR
if CROSS_TOOL == 'gcc':
PLATFORM = 'gcc'
EXEC_PATH = r'E:/Program Files/CodeSourcery/Sourcery G++ Lite/bin'
elif CROSS_TOOL == 'keil':
PLATFORM = 'armcc'
EXEC_PATH = r'C:/Keil'
elif CROSS_TOOL == 'iar':
print('================ERROR============================')
print('Not support iar yet!')
print('=================================================')
exit(0)
if os.getenv('RTT_EXEC_PATH'):
EXEC_PATH = os.getenv('RTT_EXEC_PATH')
BUILD = 'debug'
STM32_TYPE = 'STM32F4XX'
if PLATFORM == 'gcc':
# toolchains
PREFIX = 'arm-none-eabi-'
CC = PREFIX + 'gcc'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
LINK = PREFIX + 'gcc'
TARGET_EXT = 'elf'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=cortex-m4 -mthumb -mfpu=fpv4-sp-d16 -mfloat-abi=hard -ffunction-sections -fdata-sections'
CFLAGS = DEVICE + ' -g -Wall -DSTM32F407ZG -DSTM32F4XX -DUSE_STDPERIPH_DRIVER -D__ASSEMBLY__ -D__FPU_USED'
AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp -Wa,-mimplicit-it=thumb '
LFLAGS = DEVICE + ' -lm -lgcc -lc' + ' -nostartfiles -Wl,--gc-sections,-Map=rtthread-stm32.map,-cref,-u,Reset_Handler -T stm32_rom.ld'
CPATH = ''
LPATH = ''
if BUILD == 'debug':
CFLAGS += ' -O0 -gdwarf-2'
AFLAGS += ' -gdwarf-2'
else:
CFLAGS += ' -O2'
POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n'
elif PLATFORM == 'armcc':
# toolchains
CC = 'armcc'
AS = 'armasm'
AR = 'armar'
LINK = 'armlink'
TARGET_EXT = 'axf'
DEVICE = ' --cpu=cortex-m4.fp'
CFLAGS = DEVICE + ' --apcs=interwork -DUSE_STDPERIPH_DRIVER -DSTM32F40_41xxx'
AFLAGS = DEVICE
LFLAGS = DEVICE + ' --info sizes --info totals --info unused --info veneers --list rtthread-stm32.map --scatter stm32_rom.sct'
CFLAGS += ' -I' + EXEC_PATH + '/ARM/RV31/INC'
LFLAGS += ' --libpath ' + EXEC_PATH + '/ARM/RV31/LIB'
EXEC_PATH += '/arm/bin40/'
if BUILD == 'debug':
CFLAGS += ' -g -O0'
AFLAGS += ' -g'
else:
CFLAGS += ' -O2'
POST_ACTION = 'fromelf --bin $TARGET --output rtthread.bin \nfromelf -z $TARGET'
elif PLATFORM == 'iar':
# toolchains
CC = 'iccarm'
AS = 'iasmarm'
AR = 'iarchive'
LINK = 'ilinkarm'
TARGET_EXT = 'out'
DEVICE = ' -D USE_STDPERIPH_DRIVER' + ' -D STM32F10X_HD'
CFLAGS = DEVICE
CFLAGS += ' --diag_suppress Pa050'
CFLAGS += ' --no_cse'
CFLAGS += ' --no_unroll'
CFLAGS += ' --no_inline'
CFLAGS += ' --no_code_motion'
CFLAGS += ' --no_tbaa'
CFLAGS += ' --no_clustering'
CFLAGS += ' --no_scheduling'
CFLAGS += ' --debug'
CFLAGS += ' --endian=little'
CFLAGS += ' --cpu=Cortex-M4'
CFLAGS += ' -e'
CFLAGS += ' --fpu=None'
CFLAGS += ' --dlib_config "' + EXEC_PATH + '/arm/INC/c/DLib_Config_Normal.h"'
CFLAGS += ' -Ol'
CFLAGS += ' --use_c++_inline'
AFLAGS = ''
AFLAGS += ' -s+'
AFLAGS += ' -w+'
AFLAGS += ' -r'
AFLAGS += ' --cpu Cortex-M4'
AFLAGS += ' --fpu None'
LFLAGS = ' --config stm32f10x_flash.icf'
LFLAGS += ' --redirect _Printf=_PrintfTiny'
LFLAGS += ' --redirect _Scanf=_ScanfSmall'
LFLAGS += ' --entry __iar_program_start'
EXEC_PATH = EXEC_PATH + '/arm/bin/'
POST_ACTION = ''
| apache-2.0 |
EntropyFactory/creativechain-core | contrib/devtools/github-merge.py | 46 | 10860 | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# This script will locally construct a merge commit for a pull request on a
# github repository, inspect it, sign it and optionally push it.
# The following temporary branches are created/overwritten and deleted:
# * pull/$PULL/base (the current master we're merging onto)
# * pull/$PULL/head (the current state of the remote pull request)
# * pull/$PULL/merge (github's merge)
# * pull/$PULL/local-merge (our merge)
# In case of a clean merge that is accepted by the user, the local branch with
# name $BRANCH is overwritten with the merged result, and optionally pushed.
from __future__ import division,print_function,unicode_literals
import os
from sys import stdin,stdout,stderr
import argparse
import subprocess
import json,codecs
try:
from urllib.request import Request,urlopen
except:
from urllib2 import Request,urlopen
# External tools (can be overridden using environment)
GIT = os.getenv('GIT','git')
BASH = os.getenv('BASH','bash')
# OS specific configuration for terminal attributes
ATTR_RESET = ''
ATTR_PR = ''
COMMIT_FORMAT = '%h %s (%an)%d'
if os.name == 'posix': # if posix, assume we can use basic terminal escapes
ATTR_RESET = '\033[0m'
ATTR_PR = '\033[1;36m'
COMMIT_FORMAT = '%C(bold blue)%h%Creset %s %C(cyan)(%an)%Creset%C(green)%d%Creset'
def git_config_get(option, default=None):
'''
Get named configuration option from git repository.
'''
try:
return subprocess.check_output([GIT,'config','--get',option]).rstrip().decode('utf-8')
except subprocess.CalledProcessError as e:
return default
def retrieve_pr_info(repo,pull):
'''
Retrieve pull request information from github.
Return None if no title can be found, or an error happens.
'''
try:
req = Request("https://api.github.com/repos/"+repo+"/pulls/"+pull)
result = urlopen(req)
reader = codecs.getreader('utf-8')
obj = json.load(reader(result))
return obj
except Exception as e:
print('Warning: unable to retrieve pull information from github: %s' % e)
return None
def ask_prompt(text):
print(text,end=" ",file=stderr)
stderr.flush()
reply = stdin.readline().rstrip()
print("",file=stderr)
return reply
def parse_arguments():
epilog = '''
In addition, you can set the following git configuration variables:
githubmerge.repository (mandatory),
user.signingkey (mandatory),
githubmerge.host (default: git@github.com),
githubmerge.branch (no default),
githubmerge.testcmd (default: none).
'''
parser = argparse.ArgumentParser(description='Utility to merge, sign and push github pull requests',
epilog=epilog)
parser.add_argument('pull', metavar='PULL', type=int, nargs=1,
help='Pull request ID to merge')
parser.add_argument('branch', metavar='BRANCH', type=str, nargs='?',
default=None, help='Branch to merge against (default: githubmerge.branch setting, or base branch for pull, or \'master\')')
return parser.parse_args()
def main():
# Extract settings from git repo
repo = git_config_get('githubmerge.repository')
host = git_config_get('githubmerge.host','git@github.com')
opt_branch = git_config_get('githubmerge.branch',None)
testcmd = git_config_get('githubmerge.testcmd')
signingkey = git_config_get('user.signingkey')
if repo is None:
print("ERROR: No repository configured. Use this command to set:", file=stderr)
print("git config githubmerge.repository <owner>/<repo>", file=stderr)
exit(1)
if signingkey is None:
print("ERROR: No GPG signing key set. Set one using:",file=stderr)
print("git config --global user.signingkey <key>",file=stderr)
exit(1)
host_repo = host+":"+repo # shortcut for push/pull target
# Extract settings from command line
args = parse_arguments()
pull = str(args.pull[0])
# Receive pull information from github
info = retrieve_pr_info(repo,pull)
if info is None:
exit(1)
title = info['title']
# precedence order for destination branch argument:
# - command line argument
# - githubmerge.branch setting
# - base branch for pull (as retrieved from github)
# - 'master'
branch = args.branch or opt_branch or info['base']['ref'] or 'master'
# Initialize source branches
head_branch = 'pull/'+pull+'/head'
base_branch = 'pull/'+pull+'/base'
merge_branch = 'pull/'+pull+'/merge'
local_merge_branch = 'pull/'+pull+'/local-merge'
devnull = open(os.devnull,'w')
try:
subprocess.check_call([GIT,'checkout','-q',branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot check out branch %s." % (branch), file=stderr)
exit(3)
try:
subprocess.check_call([GIT,'fetch','-q',host_repo,'+refs/pull/'+pull+'/*:refs/heads/pull/'+pull+'/*'])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find pull request #%s on %s." % (pull,host_repo), file=stderr)
exit(3)
try:
subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+head_branch], stdout=devnull, stderr=stdout)
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find head of pull request #%s on %s." % (pull,host_repo), file=stderr)
exit(3)
try:
subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+merge_branch], stdout=devnull, stderr=stdout)
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find merge of pull request #%s on %s." % (pull,host_repo), file=stderr)
exit(3)
try:
subprocess.check_call([GIT,'fetch','-q',host_repo,'+refs/heads/'+branch+':refs/heads/'+base_branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot find branch %s on %s." % (branch,host_repo), file=stderr)
exit(3)
subprocess.check_call([GIT,'checkout','-q',base_branch])
subprocess.call([GIT,'branch','-q','-D',local_merge_branch], stderr=devnull)
subprocess.check_call([GIT,'checkout','-q','-b',local_merge_branch])
try:
# Create unsigned merge commit.
if title:
firstline = 'Merge #%s: %s' % (pull,title)
else:
firstline = 'Merge #%s' % (pull,)
message = firstline + '\n\n'
message += subprocess.check_output([GIT,'log','--no-merges','--topo-order','--pretty=format:%h %s (%an)',base_branch+'..'+head_branch]).decode('utf-8')
try:
subprocess.check_call([GIT,'merge','-q','--commit','--no-edit','--no-ff','-m',message.encode('utf-8'),head_branch])
except subprocess.CalledProcessError as e:
print("ERROR: Cannot be merged cleanly.",file=stderr)
subprocess.check_call([GIT,'merge','--abort'])
exit(4)
logmsg = subprocess.check_output([GIT,'log','--pretty=format:%s','-n','1']).decode('utf-8')
if logmsg.rstrip() != firstline.rstrip():
print("ERROR: Creating merge failed (already merged?).",file=stderr)
exit(4)
print('%s#%s%s %s %sinto %s%s' % (ATTR_RESET+ATTR_PR,pull,ATTR_RESET,title,ATTR_RESET+ATTR_PR,branch,ATTR_RESET))
subprocess.check_call([GIT,'log','--graph','--topo-order','--pretty=format:'+COMMIT_FORMAT,base_branch+'..'+head_branch])
print()
# Run test command if configured.
if testcmd:
# Go up to the repository's root.
toplevel = subprocess.check_output([GIT,'rev-parse','--show-toplevel']).strip()
os.chdir(toplevel)
if subprocess.call(testcmd,shell=True):
print("ERROR: Running %s failed." % testcmd,file=stderr)
exit(5)
# Show the created merge.
diff = subprocess.check_output([GIT,'diff',merge_branch+'..'+local_merge_branch])
subprocess.check_call([GIT,'diff',base_branch+'..'+local_merge_branch])
if diff:
print("WARNING: merge differs from github!",file=stderr)
reply = ask_prompt("Type 'ignore' to continue.")
if reply.lower() == 'ignore':
print("Difference with github ignored.",file=stderr)
else:
exit(6)
reply = ask_prompt("Press 'd' to accept the diff.")
if reply.lower() == 'd':
print("Diff accepted.",file=stderr)
else:
print("ERROR: Diff rejected.",file=stderr)
exit(6)
else:
# Verify the result manually.
print("Dropping you on a shell so you can try building/testing the merged source.",file=stderr)
print("Run 'git diff HEAD~' to show the changes being merged.",file=stderr)
print("Type 'exit' when done.",file=stderr)
if os.path.isfile('/etc/debian_version'): # Show pull number on Debian default prompt
os.putenv('debian_chroot',pull)
subprocess.call([BASH,'-i'])
reply = ask_prompt("Type 'm' to accept the merge.")
if reply.lower() == 'm':
print("Merge accepted.",file=stderr)
else:
print("ERROR: Merge rejected.",file=stderr)
exit(7)
# Sign the merge commit.
reply = ask_prompt("Type 's' to sign off on the merge.")
if reply == 's':
try:
subprocess.check_call([GIT,'commit','-q','--gpg-sign','--amend','--no-edit'])
except subprocess.CalledProcessError as e:
print("Error signing, exiting.",file=stderr)
exit(1)
else:
print("Not signing off on merge, exiting.",file=stderr)
exit(1)
# Put the result in branch.
subprocess.check_call([GIT,'checkout','-q',branch])
subprocess.check_call([GIT,'reset','-q','--hard',local_merge_branch])
finally:
# Clean up temporary branches.
subprocess.call([GIT,'checkout','-q',branch])
subprocess.call([GIT,'branch','-q','-D',head_branch],stderr=devnull)
subprocess.call([GIT,'branch','-q','-D',base_branch],stderr=devnull)
subprocess.call([GIT,'branch','-q','-D',merge_branch],stderr=devnull)
subprocess.call([GIT,'branch','-q','-D',local_merge_branch],stderr=devnull)
# Push the result.
reply = ask_prompt("Type 'push' to push the result to %s, branch %s." % (host_repo,branch))
if reply.lower() == 'push':
subprocess.check_call([GIT,'push',host_repo,'refs/heads/'+branch])
if __name__ == '__main__':
main()
| mit |
hansent/kivy | kivy/adapters/dictadapter.py | 7 | 5763 | '''
DictAdapter
===========
.. versionadded:: 1.5
.. warning::
This code is still experimental, and its API is subject to change in a
future version.
A :class:`~kivy.adapters.dictadapter.DictAdapter` is an adapter around a
python dictionary of records. It extends the list-like capabilities of the
:class:`~kivy.adapters.listadapter.ListAdapter`.
If you wish to have a bare-bones list adapter, without selection, use the
:class:`~kivy.adapters.simplelistadapter.SimpleListAdapter`.
'''
__all__ = ('DictAdapter', )
from kivy.properties import ListProperty, DictProperty
from kivy.adapters.listadapter import ListAdapter
class DictAdapter(ListAdapter):
'''A :class:`~kivy.adapters.dictadapter.DictAdapter` is an adapter around a
python dictionary of records. It extends the list-like capabilities of
the :class:`~kivy.adapters.listadapter.ListAdapter`.
'''
sorted_keys = ListProperty([])
'''The sorted_keys list property contains a list of hashable objects (can
be strings) that will be used directly if no args_converter function is
provided. If there is an args_converter, the record received from a
lookup of the data, using keys from sorted_keys, will be passed
to it for instantiation of list item view class instances.
:data:`sorted_keys` is a :class:`~kivy.properties.ListProperty` and
defaults to [].
'''
data = DictProperty(None)
'''A dict that indexes records by keys that are equivalent to the keys in
sorted_keys, or they are a superset of the keys in sorted_keys.
The values can be strings, class instances, dicts, etc.
:data:`data` is a :class:`~kivy.properties.DictProperty` and defaults
to None.
'''
def __init__(self, **kwargs):
if 'sorted_keys' in kwargs:
if type(kwargs['sorted_keys']) not in (tuple, list):
msg = 'DictAdapter: sorted_keys must be tuple or list'
raise Exception(msg)
else:
self.sorted_keys = sorted(kwargs['data'].keys())
super(DictAdapter, self).__init__(**kwargs)
self.bind(sorted_keys=self.initialize_sorted_keys)
def bind_triggers_to_view(self, func):
self.bind(sorted_keys=func)
self.bind(data=func)
# self.data is paramount to self.sorted_keys. If sorted_keys is reset to
# mismatch data, force a reset of sorted_keys to data.keys(). So, in order
# to do a complete reset of data and sorted_keys, data must be reset
# first, followed by a reset of sorted_keys, if needed.
def initialize_sorted_keys(self, *args):
stale_sorted_keys = False
for key in self.sorted_keys:
if not key in self.data:
stale_sorted_keys = True
break
if stale_sorted_keys:
self.sorted_keys = sorted(self.data.keys())
self.delete_cache()
self.initialize_selection()
# Override ListAdapter.update_for_new_data().
def update_for_new_data(self, *args):
self.initialize_sorted_keys()
# Note: this is not len(self.data).
def get_count(self):
return len(self.sorted_keys)
def get_data_item(self, index):
if index < 0 or index >= len(self.sorted_keys):
return None
return self.data[self.sorted_keys[index]]
# [TODO] Also make methods for scroll_to_sel_start, scroll_to_sel_end,
# scroll_to_sel_middle.
def trim_left_of_sel(self, *args):
'''Cut list items with indices in sorted_keys that are less than the
index of the first selected item, if there is a selection.
sorted_keys will be updated by update_for_new_data().
'''
if len(self.selection) > 0:
selected_keys = [sel.text for sel in self.selection]
first_sel_index = self.sorted_keys.index(selected_keys[0])
desired_keys = self.sorted_keys[first_sel_index:]
self.data = dict([(key, self.data[key]) for key in desired_keys])
def trim_right_of_sel(self, *args):
'''Cut list items with indices in sorted_keys that are greater than
the index of the last selected item, if there is a selection.
sorted_keys will be updated by update_for_new_data().
'''
if len(self.selection) > 0:
selected_keys = [sel.text for sel in self.selection]
last_sel_index = self.sorted_keys.index(selected_keys[-1])
desired_keys = self.sorted_keys[:last_sel_index + 1]
self.data = dict([(key, self.data[key]) for key in desired_keys])
def trim_to_sel(self, *args):
'''Cut list items with indices in sorted_keys that are les than or
greater than the index of the last selected item, if there is a
selection. This preserves intervening list items within the selected
range.
sorted_keys will be updated by update_for_new_data().
'''
if len(self.selection) > 0:
selected_keys = [sel.text for sel in self.selection]
first_sel_index = self.sorted_keys.index(selected_keys[0])
last_sel_index = self.sorted_keys.index(selected_keys[-1])
desired_keys = self.sorted_keys[first_sel_index:last_sel_index + 1]
self.data = dict([(key, self.data[key]) for key in desired_keys])
def cut_to_sel(self, *args):
'''Same as trim_to_sel, but intervening list items within the selected
range are also cut, leaving only list items that are selected.
sorted_keys will be updated by update_for_new_data().
'''
if len(self.selection) > 0:
selected_keys = [sel.text for sel in self.selection]
self.data = dict([(key, self.data[key]) for key in selected_keys])
| mit |
gyglim/Recipes | modelzoo/vgg_cnn_s.py | 4 | 2898 | # VGG_CNN_S, model from the paper:
# "Return of the Devil in the Details: Delving Deep into Convolutional Nets"
# 13.1% top-5 error on ILSVRC-2012-val
# Original source: https://gist.github.com/ksimonyan/fd8800eeb36e276cd6f9
# License: non-commercial use only
# Download pretrained weights from:
# https://s3.amazonaws.com/lasagne/recipes/pretrained/imagenet/vgg_cnn_s.pkl
from lasagne.layers import DenseLayer
from lasagne.layers import DropoutLayer
from lasagne.layers import InputLayer
from lasagne.layers import LocalResponseNormalization2DLayer as NormLayer
from lasagne.layers import NonlinearityLayer
from lasagne.layers.dnn import Conv2DDNNLayer as ConvLayer
from lasagne.layers import MaxPool2DLayer as PoolLayer
from lasagne.nonlinearities import softmax
def build_model():
net = {}
net['input'] = InputLayer((None, 3, 224, 224))
net['conv1'] = ConvLayer(net['input'],
num_filters=96,
filter_size=7,
stride=2,
flip_filters=False)
# caffe has alpha = alpha * pool_size
net['norm1'] = NormLayer(net['conv1'], alpha=0.0001)
net['pool1'] = PoolLayer(net['norm1'],
pool_size=3,
stride=3,
ignore_border=False)
net['conv2'] = ConvLayer(net['pool1'],
num_filters=256,
filter_size=5,
flip_filters=False)
net['pool2'] = PoolLayer(net['conv2'],
pool_size=2,
stride=2,
ignore_border=False)
net['conv3'] = ConvLayer(net['pool2'],
num_filters=512,
filter_size=3,
pad=1,
flip_filters=False)
net['conv4'] = ConvLayer(net['conv3'],
num_filters=512,
filter_size=3,
pad=1,
flip_filters=False)
net['conv5'] = ConvLayer(net['conv4'],
num_filters=512,
filter_size=3,
pad=1,
flip_filters=False)
net['pool5'] = PoolLayer(net['conv5'],
pool_size=3,
stride=3,
ignore_border=False)
net['fc6'] = DenseLayer(net['pool5'], num_units=4096)
net['drop6'] = DropoutLayer(net['fc6'], p=0.5)
net['fc7'] = DenseLayer(net['drop6'], num_units=4096)
net['drop7'] = DropoutLayer(net['fc7'], p=0.5)
net['fc8'] = DenseLayer(net['drop7'], num_units=1000, nonlinearity=None)
net['prob'] = NonlinearityLayer(net['fc8'], softmax)
return net
| mit |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/gtk/gdk/__init__/DisplayManager.py | 1 | 1186 | # encoding: utf-8
# module gtk.gdk
# from /usr/lib/python2.7/dist-packages/gtk-2.0/pynotify/_pynotify.so
# by generator 1.135
# no doc
# imports
from exceptions import Warning
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
import pango as __pango
import pangocairo as __pangocairo
class DisplayManager(__gobject__gobject.GObject):
"""
Object GdkDisplayManager
Signals from GdkDisplayManager:
display-opened (GdkDisplay)
Properties from GdkDisplayManager:
default-display -> GdkDisplay: Default Display
The default display for GDK
Signals from GObject:
notify (GParam)
"""
@classmethod
def do_display_opened(cls, *args, **kwargs): # real signature unknown
pass
def get_default_display(self, *args, **kwargs): # real signature unknown
pass
def list_displays(self, *args, **kwargs): # real signature unknown
pass
def set_default_display(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__gtype__ = None # (!) real value is ''
| gpl-2.0 |
jonyroda97/redbot-amigosprovaveis | lib/youtube_dl/extractor/reverbnation.py | 64 | 1627 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
qualities,
str_or_none,
)
class ReverbNationIE(InfoExtractor):
_VALID_URL = r'^https?://(?:www\.)?reverbnation\.com/.*?/song/(?P<id>\d+).*?$'
_TESTS = [{
'url': 'http://www.reverbnation.com/alkilados/song/16965047-mona-lisa',
'md5': 'c0aaf339bcee189495fdf5a8c8ba8645',
'info_dict': {
'id': '16965047',
'ext': 'mp3',
'title': 'MONA LISA',
'uploader': 'ALKILADOS',
'uploader_id': '216429',
'thumbnail': r're:^https?://.*\.jpg',
},
}]
def _real_extract(self, url):
song_id = self._match_id(url)
api_res = self._download_json(
'https://api.reverbnation.com/song/%s' % song_id,
song_id,
note='Downloading information of song %s' % song_id
)
THUMBNAILS = ('thumbnail', 'image')
quality = qualities(THUMBNAILS)
thumbnails = []
for thumb_key in THUMBNAILS:
if api_res.get(thumb_key):
thumbnails.append({
'url': api_res[thumb_key],
'preference': quality(thumb_key)
})
return {
'id': song_id,
'title': api_res['name'],
'url': api_res['url'],
'uploader': api_res.get('artist', {}).get('name'),
'uploader_id': str_or_none(api_res.get('artist', {}).get('id')),
'thumbnails': thumbnails,
'ext': 'mp3',
'vcodec': 'none',
}
| gpl-3.0 |
bcui6611/healthchecker | Cheetah/DirectiveAnalyzer.py | 15 | 2467 | #!/usr/bin/env python
import os
import pprint
try:
from functools import reduce
except ImportError:
# Assume we have reduce
pass
from Cheetah import Parser
from Cheetah import Compiler
from Cheetah import Template
class Analyzer(Parser.Parser):
def __init__(self, *args, **kwargs):
self.calls = {}
super(Analyzer, self).__init__(*args, **kwargs)
def eatDirective(self):
directive = self.matchDirective()
try:
self.calls[directive] += 1
except KeyError:
self.calls[directive] = 1
super(Analyzer, self).eatDirective()
class AnalysisCompiler(Compiler.ModuleCompiler):
parserClass = Analyzer
def analyze(source):
klass = Template.Template.compile(source, compilerClass=AnalysisCompiler)
return klass._CHEETAH_compilerInstance._parser.calls
def main_file(f):
fd = open(f, 'r')
try:
print u'>>> Analyzing %s' % f
calls = analyze(fd.read())
return calls
finally:
fd.close()
def _find_templates(directory, suffix):
for root, dirs, files in os.walk(directory):
for f in files:
if not f.endswith(suffix):
continue
yield root + os.path.sep + f
def _analyze_templates(iterable):
for template in iterable:
yield main_file(template)
def main_dir(opts):
results = _analyze_templates(_find_templates(opts.dir, opts.suffix))
totals = {}
for series in results:
if not series:
continue
for k, v in series.iteritems():
try:
totals[k] += v
except KeyError:
totals[k] = v
return totals
def main():
from optparse import OptionParser
op = OptionParser()
op.add_option('-f', '--file', dest='file', default=None,
help='Specify a single file to analyze')
op.add_option('-d', '--dir', dest='dir', default=None,
help='Specify a directory of templates to analyze')
op.add_option('--suffix', default='tmpl', dest='suffix',
help='Specify a custom template file suffix for the -d option (default: "tmpl")')
opts, args = op.parse_args()
if not opts.file and not opts.dir:
op.print_help()
return
results = None
if opts.file:
results = main_file(opts.file)
if opts.dir:
results = main_dir(opts)
pprint.pprint(results)
if __name__ == '__main__':
main()
| apache-2.0 |
mcvidomi/poim2motif | run_svm_real.py | 1 | 1483 | '''
Created on 08.06.2015
@author: marinavidovic
'''
import os
import pdb
import utils_svm
import pickle
import numpy as np
import copy
import genQ
import makePOIM
import view
import matplotlib
matplotlib.use('Agg')
if __name__ == '__main__':
read_data = 1
datapath = "/home/mvidovic/POIMall/data/real/human_acceptor_splice_data.txt"
savepath = "/home/mvidovic/POIMall/data/real/human_acceptor_splice_data0.pkl"
lines=1000
if read_data:
x,y=utils_svm.extractRealData(datapath,savepath,lines)
else:
fobj=open(savepath,'rb')
x,y=pickle.load(fobj)
fobj.close()
num_pos = 100
num_neg = 4*num_pos
print "reduce samples"
x_red,y_red = utils_svm.reduce_samples(copy.deepcopy(x),copy.deepcopy(y),num_pos,num_neg)
nploci_letters,nploci_positions = utils_svm.non_polymorphic_loci(x_red)
#read data
experiment_name = "real1"
if not os.path.exists(experiment_name):
os.makedirs(experiment_name)
poimpath=experiment_name+"/poim.pkl"
tally=30
positives=25
sequenceno=100
mutation_prob=0.0
motif="ATTTT"
mu=13
x,y = makePOIM.gensequences(tally,positives,sequenceno,mutation_prob,motif,mu)
#compute POIM
poim_degree = 6
kernel_degree = 8
print "start poim computation"
poims = makePOIM.computePOIM(x,y,poim_degree,kernel_degree,poimpath)
Q2 = poims[0][1]
#view.test()
view.figurepoimsimple(Q2, "poim_pic", 0)
| mit |
larsbergstrom/servo | tests/wpt/web-platform-tests/tools/third_party/h2/test/test_complex_logic.py | 39 | 21420 | # -*- coding: utf-8 -*-
"""
test_complex_logic
~~~~~~~~~~~~~~~~
More complex tests that try to do more.
Certain tests don't really eliminate incorrect behaviour unless they do quite
a bit. These tests should live here, to keep the pain in once place rather than
hide it in the other parts of the test suite.
"""
import pytest
import h2
import h2.config
import h2.connection
class TestComplexClient(object):
"""
Complex tests for client-side stacks.
"""
example_request_headers = [
(':authority', 'example.com'),
(':path', '/'),
(':scheme', 'https'),
(':method', 'GET'),
]
example_response_headers = [
(':status', '200'),
('server', 'fake-serv/0.1.0')
]
def test_correctly_count_server_streams(self, frame_factory):
"""
We correctly count the number of server streams, both inbound and
outbound.
"""
# This test makes no sense unless you do both inbound and outbound,
# because it's important to confirm that we count them correctly.
c = h2.connection.H2Connection()
c.initiate_connection()
expected_inbound_streams = expected_outbound_streams = 0
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
for stream_id in range(1, 15, 2):
# Open an outbound stream
c.send_headers(stream_id, self.example_request_headers)
expected_outbound_streams += 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
# Receive a pushed stream (to create an inbound one). This doesn't
# open until we also receive headers.
f = frame_factory.build_push_promise_frame(
stream_id=stream_id,
promised_stream_id=stream_id+1,
headers=self.example_request_headers,
)
c.receive_data(f.serialize())
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
f = frame_factory.build_headers_frame(
stream_id=stream_id+1,
headers=self.example_response_headers,
)
c.receive_data(f.serialize())
expected_inbound_streams += 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
for stream_id in range(13, 0, -2):
# Close an outbound stream.
c.end_stream(stream_id)
# Stream doesn't close until both sides close it.
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
f = frame_factory.build_headers_frame(
stream_id=stream_id,
headers=self.example_response_headers,
flags=['END_STREAM'],
)
c.receive_data(f.serialize())
expected_outbound_streams -= 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
# Pushed streams can only be closed remotely.
f = frame_factory.build_data_frame(
stream_id=stream_id+1,
data=b'the content',
flags=['END_STREAM'],
)
c.receive_data(f.serialize())
expected_inbound_streams -= 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
assert c.open_inbound_streams == 0
assert c.open_outbound_streams == 0
class TestComplexServer(object):
"""
Complex tests for server-side stacks.
"""
example_request_headers = [
(b':authority', b'example.com'),
(b':path', b'/'),
(b':scheme', b'https'),
(b':method', b'GET'),
]
example_response_headers = [
(b':status', b'200'),
(b'server', b'fake-serv/0.1.0')
]
server_config = h2.config.H2Configuration(client_side=False)
def test_correctly_count_server_streams(self, frame_factory):
"""
We correctly count the number of server streams, both inbound and
outbound.
"""
# This test makes no sense unless you do both inbound and outbound,
# because it's important to confirm that we count them correctly.
c = h2.connection.H2Connection(config=self.server_config)
c.receive_data(frame_factory.preamble())
expected_inbound_streams = expected_outbound_streams = 0
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
for stream_id in range(1, 15, 2):
# Receive an inbound stream.
f = frame_factory.build_headers_frame(
headers=self.example_request_headers,
stream_id=stream_id,
)
c.receive_data(f.serialize())
expected_inbound_streams += 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
# Push a stream (to create a outbound one). This doesn't open
# until we send our response headers.
c.push_stream(stream_id, stream_id+1, self.example_request_headers)
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
c.send_headers(stream_id+1, self.example_response_headers)
expected_outbound_streams += 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
for stream_id in range(13, 0, -2):
# Close an inbound stream.
f = frame_factory.build_data_frame(
data=b'',
flags=['END_STREAM'],
stream_id=stream_id,
)
c.receive_data(f.serialize())
# Stream doesn't close until both sides close it.
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
c.send_data(stream_id, b'', end_stream=True)
expected_inbound_streams -= 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
# Pushed streams, however, we can close ourselves.
c.send_data(
stream_id=stream_id+1,
data=b'',
end_stream=True,
)
expected_outbound_streams -= 1
assert c.open_inbound_streams == expected_inbound_streams
assert c.open_outbound_streams == expected_outbound_streams
assert c.open_inbound_streams == 0
assert c.open_outbound_streams == 0
class TestContinuationFrames(object):
"""
Tests for the relatively complex CONTINUATION frame logic.
"""
example_request_headers = [
(b':authority', b'example.com'),
(b':path', b'/'),
(b':scheme', b'https'),
(b':method', b'GET'),
]
server_config = h2.config.H2Configuration(client_side=False)
def _build_continuation_sequence(self, headers, block_size, frame_factory):
f = frame_factory.build_headers_frame(headers)
header_data = f.data
chunks = [
header_data[x:x+block_size]
for x in range(0, len(header_data), block_size)
]
f.data = chunks.pop(0)
frames = [
frame_factory.build_continuation_frame(c) for c in chunks
]
f.flags = {'END_STREAM'}
frames[-1].flags.add('END_HEADERS')
frames.insert(0, f)
return frames
def test_continuation_frame_basic(self, frame_factory):
"""
Test that we correctly decode a header block split across continuation
frames.
"""
c = h2.connection.H2Connection(config=self.server_config)
c.initiate_connection()
c.receive_data(frame_factory.preamble())
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
data = b''.join(f.serialize() for f in frames)
events = c.receive_data(data)
assert len(events) == 2
first_event, second_event = events
assert isinstance(first_event, h2.events.RequestReceived)
assert first_event.headers == self.example_request_headers
assert first_event.stream_id == 1
assert isinstance(second_event, h2.events.StreamEnded)
assert second_event.stream_id == 1
@pytest.mark.parametrize('stream_id', [3, 1])
def test_continuation_cannot_interleave_headers(self,
frame_factory,
stream_id):
"""
We cannot interleave a new headers block with a CONTINUATION sequence.
"""
c = h2.connection.H2Connection(config=self.server_config)
c.initiate_connection()
c.receive_data(frame_factory.preamble())
c.clear_outbound_data_buffer()
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_headers_frame(
headers=self.example_request_headers,
stream_id=stream_id,
flags=['END_STREAM'],
)
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
def test_continuation_cannot_interleave_data(self, frame_factory):
"""
We cannot interleave a data frame with a CONTINUATION sequence.
"""
c = h2.connection.H2Connection(config=self.server_config)
c.initiate_connection()
c.receive_data(frame_factory.preamble())
c.clear_outbound_data_buffer()
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_data_frame(
data=b'hello',
stream_id=1,
)
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
def test_continuation_cannot_interleave_unknown_frame(self, frame_factory):
"""
We cannot interleave an unknown frame with a CONTINUATION sequence.
"""
c = h2.connection.H2Connection(config=self.server_config)
c.initiate_connection()
c.receive_data(frame_factory.preamble())
c.clear_outbound_data_buffer()
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_data_frame(
data=b'hello',
stream_id=1,
)
bogus_frame.type = 88
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
def test_continuation_frame_multiple_blocks(self, frame_factory):
"""
Test that we correctly decode several header blocks split across
continuation frames.
"""
c = h2.connection.H2Connection(config=self.server_config)
c.initiate_connection()
c.receive_data(frame_factory.preamble())
for stream_id in range(1, 7, 2):
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=2,
frame_factory=frame_factory,
)
for frame in frames:
frame.stream_id = stream_id
data = b''.join(f.serialize() for f in frames)
events = c.receive_data(data)
assert len(events) == 2
first_event, second_event = events
assert isinstance(first_event, h2.events.RequestReceived)
assert first_event.headers == self.example_request_headers
assert first_event.stream_id == stream_id
assert isinstance(second_event, h2.events.StreamEnded)
assert second_event.stream_id == stream_id
class TestContinuationFramesPushPromise(object):
"""
Tests for the relatively complex CONTINUATION frame logic working with
PUSH_PROMISE frames.
"""
example_request_headers = [
(b':authority', b'example.com'),
(b':path', b'/'),
(b':scheme', b'https'),
(b':method', b'GET'),
]
example_response_headers = [
(b':status', b'200'),
(b'server', b'fake-serv/0.1.0')
]
def _build_continuation_sequence(self, headers, block_size, frame_factory):
f = frame_factory.build_push_promise_frame(
stream_id=1, promised_stream_id=2, headers=headers
)
header_data = f.data
chunks = [
header_data[x:x+block_size]
for x in range(0, len(header_data), block_size)
]
f.data = chunks.pop(0)
frames = [
frame_factory.build_continuation_frame(c) for c in chunks
]
f.flags = {'END_STREAM'}
frames[-1].flags.add('END_HEADERS')
frames.insert(0, f)
return frames
def test_continuation_frame_basic_push_promise(self, frame_factory):
"""
Test that we correctly decode a header block split across continuation
frames when that header block is initiated with a PUSH_PROMISE.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(stream_id=1, headers=self.example_request_headers)
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
data = b''.join(f.serialize() for f in frames)
events = c.receive_data(data)
assert len(events) == 1
event = events[0]
assert isinstance(event, h2.events.PushedStreamReceived)
assert event.headers == self.example_request_headers
assert event.parent_stream_id == 1
assert event.pushed_stream_id == 2
@pytest.mark.parametrize('stream_id', [3, 1, 2])
def test_continuation_cannot_interleave_headers_pp(self,
frame_factory,
stream_id):
"""
We cannot interleave a new headers block with a CONTINUATION sequence
when the headers block is based on a PUSH_PROMISE frame.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(stream_id=1, headers=self.example_request_headers)
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_headers_frame(
headers=self.example_response_headers,
stream_id=stream_id,
flags=['END_STREAM'],
)
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
def test_continuation_cannot_interleave_data(self, frame_factory):
"""
We cannot interleave a data frame with a CONTINUATION sequence when
that sequence began with a PUSH_PROMISE frame.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(stream_id=1, headers=self.example_request_headers)
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_data_frame(
data=b'hello',
stream_id=1,
)
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
def test_continuation_cannot_interleave_unknown_frame(self, frame_factory):
"""
We cannot interleave an unknown frame with a CONTINUATION sequence when
that sequence began with a PUSH_PROMISE frame.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(stream_id=1, headers=self.example_request_headers)
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=5,
frame_factory=frame_factory,
)
assert len(frames) > 2 # This is mostly defensive.
bogus_frame = frame_factory.build_data_frame(
data=b'hello',
stream_id=1,
)
bogus_frame.type = 88
frames.insert(len(frames) - 2, bogus_frame)
data = b''.join(f.serialize() for f in frames)
with pytest.raises(h2.exceptions.ProtocolError) as e:
c.receive_data(data)
assert "invalid frame" in str(e.value).lower()
@pytest.mark.parametrize('evict', [True, False])
def test_stream_remotely_closed_disallows_push_promise(self,
evict,
frame_factory):
"""
Streams closed normally by the remote peer disallow PUSH_PROMISE
frames, and cause a GOAWAY.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(
stream_id=1,
headers=self.example_request_headers,
end_stream=True
)
f = frame_factory.build_headers_frame(
stream_id=1,
headers=self.example_response_headers,
flags=['END_STREAM']
)
c.receive_data(f.serialize())
c.clear_outbound_data_buffer()
if evict:
# This is annoyingly stateful, but enumerating the list of open
# streams will force us to flush state.
assert not c.open_outbound_streams
f = frame_factory.build_push_promise_frame(
stream_id=1,
promised_stream_id=2,
headers=self.example_request_headers,
)
with pytest.raises(h2.exceptions.ProtocolError):
c.receive_data(f.serialize())
f = frame_factory.build_goaway_frame(
last_stream_id=0,
error_code=h2.errors.ErrorCodes.PROTOCOL_ERROR,
)
assert c.data_to_send() == f.serialize()
def test_continuation_frame_multiple_push_promise(self, frame_factory):
"""
Test that we correctly decode header blocks split across continuation
frames when those header block is initiated with a PUSH_PROMISE, for
more than one pushed stream.
"""
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(stream_id=1, headers=self.example_request_headers)
for promised_stream_id in range(2, 8, 2):
frames = self._build_continuation_sequence(
headers=self.example_request_headers,
block_size=2,
frame_factory=frame_factory,
)
frames[0].promised_stream_id = promised_stream_id
data = b''.join(f.serialize() for f in frames)
events = c.receive_data(data)
assert len(events) == 1
event = events[0]
assert isinstance(event, h2.events.PushedStreamReceived)
assert event.headers == self.example_request_headers
assert event.parent_stream_id == 1
assert event.pushed_stream_id == promised_stream_id
| mpl-2.0 |
SevereOverfl0w/MCDirectory | migrations/versions/10723b632a87_.py | 1 | 1036 | """empty message
Revision ID: 10723b632a87
Revises: 3d7ce850941c
Create Date: 2013-11-12 22:18:26.482191
"""
# revision identifiers, used by Alembic.
revision = '10723b632a87'
down_revision = '3d7ce850941c'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('comment',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('commenter_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('time', sa.DateTime(), nullable=False),
sa.Column('comment', sa.Text(), nullable=False),
sa.Column('stars', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['commenter_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('comment')
### end Alembic commands ###
| mit |
Thapelo-Tsotetsi/rapidpro | temba/values/tests.py | 2 | 18321 | from __future__ import unicode_literals
from datetime import timedelta
from mock import patch
import json
from django.core.urlresolvers import reverse
from django.utils import timezone
from temba.contacts.models import ContactField
from temba.flows.models import RuleSet
from temba.tests import FlowFileTest
from temba.values.models import Value, STATE, DISTRICT, DECIMAL, TEXT, DATETIME
class ResultTest(FlowFileTest):
def assertResult(self, result, index, category, count):
self.assertEquals(count, result['categories'][index]['count'])
self.assertEquals(category, result['categories'][index]['label'])
def test_field_results(self):
(c1, c2, c3, c4) = (self.create_contact("Contact1", '0788111111'),
self.create_contact("Contact2", '0788222222'),
self.create_contact("Contact3", '0788333333'),
self.create_contact("Contact4", '0788444444'))
# create a gender field that uses strings
gender = ContactField.get_or_create(self.org, 'gender', label="Gender", value_type=TEXT)
c1.set_field('gender', "Male")
c2.set_field('gender', "Female")
c3.set_field('gender', "Female")
result = Value.get_value_summary(contact_field=gender)[0]
self.assertEquals(2, len(result['categories']))
self.assertEquals(3, result['set'])
self.assertEquals(2, result['unset']) # this is two as we have the default contact created by our unit tests
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "Female", 2)
self.assertResult(result, 1, "Male", 1)
# create an born field that uses decimals
born = ContactField.get_or_create(self.org, 'born', label="Born", value_type=DECIMAL)
c1.set_field('born', 1977)
c2.set_field('born', 1990)
c3.set_field('born', 1977)
result = Value.get_value_summary(contact_field=born)[0]
self.assertEquals(2, len(result['categories']))
self.assertEquals(3, result['set'])
self.assertEquals(2, result['unset'])
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "1977", 2)
self.assertResult(result, 1, "1990", 1)
# ok, state field!
state = ContactField.get_or_create(self.org, 'state', label="State", value_type=STATE)
c1.set_field('state', "Kigali City")
c2.set_field('state', "Kigali City")
result = Value.get_value_summary(contact_field=state)[0]
self.assertEquals(1, len(result['categories']))
self.assertEquals(2, result['set'])
self.assertEquals(3, result['unset'])
self.assertResult(result, 0, "1708283", 2)
reg_date = ContactField.get_or_create(self.org, 'reg_date', label="Registration Date", value_type=DATETIME)
now = timezone.now()
c1.set_field('reg_date', now.replace(hour=9))
c2.set_field('reg_date', now.replace(hour=4))
c3.set_field('reg_date', now - timedelta(days=1))
result = Value.get_value_summary(contact_field=reg_date)[0]
self.assertEquals(2, len(result['categories']))
self.assertEquals(3, result['set'])
self.assertEquals(2, result['unset'])
self.assertResult(result, 0, now.replace(hour=0, minute=0, second=0, microsecond=0), 2)
self.assertResult(result, 1, (now - timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0), 1)
# make sure categories returned are sorted by count, not name
c2.set_field('gender', "Male")
result = Value.get_value_summary(contact_field=gender)[0]
self.assertEquals(2, len(result['categories']))
self.assertEquals(3, result['set'])
self.assertEquals(2, result['unset']) # this is two as we have the default contact created by our unit tests
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "Male", 2)
self.assertResult(result, 1, "Female", 1)
def run_color_gender_flow(self, contact, color, gender, age):
self.assertEquals("What is your gender?", self.send_message(self.flow, color, contact=contact, restart_participants=True))
self.assertEquals("What is your age?", self.send_message(self.flow, gender, contact=contact))
self.assertEquals("Thanks.", self.send_message(self.flow, age, contact=contact))
def setup_color_gender_flow(self):
self.flow = self.get_flow('color_gender_age')
(self.c1, self.c2, self.c3, self.c4) = (self.create_contact("Contact1", '0788111111'),
self.create_contact("Contact2", '0788222222'),
self.create_contact("Contact3", '0788333333'),
self.create_contact("Contact4", '0788444444'))
def test_category_results(self):
self.setup_color_gender_flow()
# create a state field:
# assign c1 and c2 to Kigali
state = ContactField.get_or_create(self.org, 'state', label="State", value_type=STATE)
district = ContactField.get_or_create(self.org, 'district', label="District", value_type=DISTRICT)
self.c1.set_field('state', "Kigali City")
self.c1.set_field('district', "Kigali")
self.c2.set_field('state', "Kigali City")
self.c2.set_field('district', "Kigali")
self.run_color_gender_flow(self.c1, "red", "male", "16")
self.run_color_gender_flow(self.c2, "blue", "female", "19")
self.run_color_gender_flow(self.c3, "green", "male", "75")
self.run_color_gender_flow(self.c4, "maroon", "female", "50")
# create a group of the women
ladies = self.create_group("Ladies", [self.c2, self.c4])
# get our rulesets
color = RuleSet.objects.get(flow=self.flow, label="Color")
gender = RuleSet.objects.get(flow=self.flow, label="Gender")
age = RuleSet.objects.get(flow=self.flow, label="Age")
# categories should be in the same order as our rules, should have correct counts
result = Value.get_value_summary(ruleset=color)[0]
self.assertEquals(3, len(result['categories']))
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "Red", 2)
self.assertResult(result, 1, "Blue", 1)
self.assertResult(result, 2, "Green", 1)
# check our age category as well
result = Value.get_value_summary(ruleset=age)[0]
self.assertEquals(3, len(result['categories']))
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "Child", 1)
self.assertResult(result, 1, "Adult", 2)
self.assertResult(result, 2, "Senior", 1)
# and our gender categories
result = Value.get_value_summary(ruleset=gender)[0]
self.assertEquals(2, len(result['categories']))
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "Male", 2)
self.assertResult(result, 1, "Female", 2)
# now filter the results and only get responses by men
result = Value.get_value_summary(ruleset=color, filters=[dict(ruleset=gender.pk, categories=["Male"])])[0]
self.assertResult(result, 0, "Red", 1)
self.assertResult(result, 1, "Blue", 0)
self.assertResult(result, 2, "Green", 1)
# what about men that are adults?
result = Value.get_value_summary(ruleset=color, filters=[dict(ruleset=gender.pk, categories=["Male"]),
dict(ruleset=age.pk, categories=["Adult"])])[0]
self.assertResult(result, 0, "Red", 0)
self.assertResult(result, 1, "Blue", 0)
self.assertResult(result, 2, "Green", 0)
# union of all genders
result = Value.get_value_summary(ruleset=color, filters=[dict(ruleset=gender.pk, categories=["Male", "Female"]),
dict(ruleset=age.pk, categories=["Adult"])])[0]
self.assertResult(result, 0, "Red", 1)
self.assertResult(result, 1, "Blue", 1)
self.assertResult(result, 2, "Green", 0)
# just women adults by group
result = Value.get_value_summary(ruleset=color, filters=[dict(groups=[ladies.pk]), dict(ruleset=age.pk, categories="Adult")])[0]
self.assertResult(result, 0, "Red", 1)
self.assertResult(result, 1, "Blue", 1)
self.assertResult(result, 2, "Green", 0)
# remove one of the women from the group
ladies.update_contacts([self.c2], False)
# get a new summary
result = Value.get_value_summary(ruleset=color, filters=[dict(groups=[ladies.pk]), dict(ruleset=age.pk, categories="Adult")])[0]
self.assertResult(result, 0, "Red", 1)
self.assertResult(result, 1, "Blue", 0)
self.assertResult(result, 2, "Green", 0)
# ok, back in she goes
ladies.update_contacts([self.c2], True)
# do another run for contact 1
run5 = self.run_color_gender_flow(self.c1, "blue", "male", "16")
# totals should reflect the new value, not the old
result = Value.get_value_summary(ruleset=color)[0]
self.assertResult(result, 0, "Red", 1)
self.assertResult(result, 1, "Blue", 2)
self.assertResult(result, 2, "Green", 1)
# what if we do a partial run?
self.send_message(self.flow, "red", contact=self.c1, restart_participants=True)
# should change our male/female breakdown since c1 now no longer has a gender
result = Value.get_value_summary(ruleset=gender)[0]
self.assertEquals(2, len(result['categories']))
self.assertResult(result, 0, "Male", 1)
self.assertResult(result, 1, "Female", 2)
# back to a full flow
run5 = self.run_color_gender_flow(self.c1, "blue", "male", "16")
# ok, now segment by gender
result = Value.get_value_summary(ruleset=color, filters=[], segment=dict(ruleset=gender.pk, categories=["Male", "Female"]))
male_result = result[0]
self.assertResult(male_result, 0, "Red", 0)
self.assertResult(male_result, 1, "Blue", 1)
self.assertResult(male_result, 2, "Green", 1)
female_result = result[1]
self.assertResult(female_result, 0, "Red", 1)
self.assertResult(female_result, 1, "Blue", 1)
self.assertResult(female_result, 2, "Green", 0)
# segment by gender again, but use the contact field to do so
result = Value.get_value_summary(ruleset=color, filters=[], segment=dict(contact_field="Gender", values=["MALE", "Female"]))
male_result = result[0]
self.assertResult(male_result, 0, "Red", 0)
self.assertResult(male_result, 1, "Blue", 1)
self.assertResult(male_result, 2, "Green", 1)
female_result = result[1]
self.assertResult(female_result, 0, "Red", 1)
self.assertResult(female_result, 1, "Blue", 1)
self.assertResult(female_result, 2, "Green", 0)
# add in a filter at the same time
result = Value.get_value_summary(ruleset=color, filters=[dict(ruleset=color.pk, categories=["Blue"])],
segment=dict(ruleset=gender.pk, categories=["Male", "Female"]))
male_result = result[0]
self.assertResult(male_result, 0, "Red", 0)
self.assertResult(male_result, 1, "Blue", 1)
self.assertResult(male_result, 2, "Green", 0)
female_result = result[1]
self.assertResult(female_result, 0, "Red", 0)
self.assertResult(female_result, 1, "Blue", 1)
self.assertResult(female_result, 2, "Green", 0)
# ok, try segmenting by location instead
result = Value.get_value_summary(ruleset=color, segment=dict(location="State"))
eastern_result = result[0]
self.assertEquals('171591', eastern_result['boundary'])
self.assertEquals('Eastern Province', eastern_result['label'])
self.assertResult(eastern_result, 0, "Red", 0)
self.assertResult(eastern_result, 1, "Blue", 0)
self.assertResult(eastern_result, 2, "Green", 0)
kigali_result = result[1]
self.assertEquals('1708283', kigali_result['boundary'])
self.assertEquals('Kigali City', kigali_result['label'])
self.assertResult(kigali_result, 0, "Red", 0)
self.assertResult(kigali_result, 1, "Blue", 2)
self.assertResult(kigali_result, 2, "Green", 0)
# updating state location leads to updated data
self.c2.set_field('state', "Eastern Province")
result = Value.get_value_summary(ruleset=color, segment=dict(location="State"))
eastern_result = result[0]
self.assertEquals('171591', eastern_result['boundary'])
self.assertEquals('Eastern Province', eastern_result['label'])
self.assertResult(eastern_result, 0, "Red", 0)
self.assertResult(eastern_result, 1, "Blue", 1)
self.assertResult(eastern_result, 2, "Green", 0)
kigali_result = result[1]
self.assertEquals('1708283', kigali_result['boundary'])
self.assertEquals('Kigali City', kigali_result['label'])
self.assertResult(kigali_result, 0, "Red", 0)
self.assertResult(kigali_result, 1, "Blue", 1)
self.assertResult(kigali_result, 2, "Green", 0)
# segment by district instead
result = Value.get_value_summary(ruleset=color, segment=dict(parent="1708283", location="District"))
# only on district in kigali
self.assertEquals(1, len(result))
kigali_result = result[0]
self.assertEquals('60485579', kigali_result['boundary'])
self.assertEquals('Kigali', kigali_result['label'])
self.assertResult(kigali_result, 0, "Red", 0)
self.assertResult(kigali_result, 1, "Blue", 2)
self.assertResult(kigali_result, 2, "Green", 0)
# do a sanity check on our choropleth view
self.login(self.admin)
response = self.client.get(reverse('flows.ruleset_choropleth', args=[color.pk]) +
"?_format=json&boundary=" + self.org.country.osm_id)
# response should be valid json
response = json.loads(response.content)
# should have breaks
self.assertTrue('breaks' in response)
# should have two categories, Blue and Others
self.assertEquals(2, len(response['categories']))
self.assertEquals("Blue", response['categories'][0])
self.assertEquals("Others", response['categories'][1])
# assert our kigali result
kigali_result = response['scores']['1708283']
self.assertEquals(1, kigali_result['score'])
self.assertEquals("Kigali City", kigali_result['name'])
self.assertEquals("Blue", kigali_result['results'][0]['label'])
self.assertEquals("Others", kigali_result['results'][1]['label'])
self.assertEquals(1, kigali_result['results'][0]['count'])
self.assertEquals(0, kigali_result['results'][1]['count'])
self.assertEquals(100, kigali_result['results'][0]['percentage'])
self.assertEquals(0, kigali_result['results'][1]['percentage'])
with patch('temba.values.models.Value.get_value_summary') as mock:
mock.return_value = []
response = self.client.get(reverse('flows.ruleset_choropleth', args=[color.pk]) +
"?_format=json&boundary=" + self.org.country.osm_id)
# response should be valid json
response = json.loads(response.content)
# should have two categories, Blue and Others
self.assertEquals(2, len(response['categories']))
self.assertEquals("", response['categories'][0])
self.assertEquals("", response['categories'][1])
# all counts and percentage are 0
self.assertEquals(0, response['totals']['count'])
self.assertEquals(0, response['totals']['results'][0]['count'])
self.assertEquals(0, response['totals']['results'][0]['percentage'])
self.assertEquals(0, response['totals']['results'][1]['count'])
self.assertEquals(0, response['totals']['results'][1]['percentage'])
# and empty string labels
self.assertEquals("", response['totals']['results'][0]['label'])
self.assertEquals("", response['totals']['results'][1]['label'])
# also check our analytics view
response = self.client.get(reverse('flows.ruleset_analytics'))
# make sure we have only one flow in it
flows = json.loads(response.context['flows'])
self.assertEquals(1, len(flows))
self.assertEquals(3, len(flows[0]['rules']))
def test_open_ended_word_frequencies(self):
flow = self.get_flow('random_word')
def run_flow(contact, word):
self.assertEquals("Thank you", self.send_message(flow, word, contact=contact, restart_participants=True))
(c1, c2, c3, c4, c5) = (self.create_contact("Contact1", '0788111111'),
self.create_contact("Contact2", '0788222222'),
self.create_contact("Contact3", '0788333333'),
self.create_contact("Contact4", '0788444444'),
self.create_contact("Contact4", '0788555555'))
run_flow(c1, "1 better place")
run_flow(c2, "the great coffee")
run_flow(c3, "1 cup of black tea")
run_flow(c4, "awesome than this")
run_flow(c5, "from an awesome place in kigali")
random = RuleSet.objects.get(flow=flow, label="Random")
result = Value.get_value_summary(ruleset=random)[0]
self.assertEquals(9, len(result['categories']))
self.assertTrue(result['open_ended'])
self.assertResult(result, 0, "awesome", 2)
self.assertResult(result, 1, "place", 2)
self.assertResult(result, 2, "better", 1)
self.assertResult(result, 3, "black", 1)
self.assertResult(result, 4, "coffee", 1)
self.assertResult(result, 5, "cup", 1)
self.assertResult(result, 6, "great", 1)
self.assertResult(result, 7, "kigali", 1)
self.assertResult(result, 8, "tea", 1)
| agpl-3.0 |
Meriipu/quodlibet | quodlibet/util/logging.py | 2 | 2096 | # Copyright 2014 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
from __future__ import absolute_import
import collections
import threading
class Logs:
"""Thread safe log store"""
MAX_LOG_SIZE_DEFAULT = 500
def __init__(self, max_log_size=MAX_LOG_SIZE_DEFAULT):
self._iter_lock = threading.Lock()
self._log = collections.deque(maxlen=max_log_size)
def _save_iter(self):
# only pop/append/len are threadsafe, implement iter with them
with self._iter_lock:
temp = collections.deque()
for i in range(len(self._log)):
item = self._log.popleft()
yield item
temp.append(item)
while temp:
self._log.appendleft(temp.pop())
def log(self, string, category=None):
"""Log str/unicode.
Thread safe.
"""
self._log.append((category, string))
def clear(self):
"""Remove all entries.
Thread safe.
"""
with self._iter_lock:
for i in range(len(self._log)):
self._log.popleft()
def get_content(self, category=None, limit=None):
"""Get a list of unicode strings for the specified category.
Oldest entry first. Passing no category will return all content.
If `limit` is specified, the last `limit` items will be returned.
Thread safe.
"""
content = []
for cat, string in self._save_iter():
if category is None or category == cat:
if isinstance(string, bytes):
string = string.decode("utf-8", "replace")
content.append(string)
if limit is not None:
assert limit > 0
return content[-limit:]
return content
_logs = Logs()
log = _logs.log
get_content = _logs.get_content
| gpl-2.0 |
madrang/pyTSon-AudioBot | requests/packages/chardet/sbcsgroupprober.py | 273 | 3546 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetgroupprober import CharSetGroupProber
from .sbcharsetprober import SingleByteCharSetProber
from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,
Latin5CyrillicModel, MacCyrillicModel,
Ibm866Model, Ibm855Model)
from .langgreekmodel import Latin7GreekModel, Win1253GreekModel
from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
from .langthaimodel import TIS620ThaiModel
from .langhebrewmodel import Win1255HebrewModel
from .hebrewprober import HebrewProber
from .langturkishmodel import Latin5TurkishModel
class SBCSGroupProber(CharSetGroupProber):
def __init__(self):
super(SBCSGroupProber, self).__init__()
self.probers = [
SingleByteCharSetProber(Win1251CyrillicModel),
SingleByteCharSetProber(Koi8rModel),
SingleByteCharSetProber(Latin5CyrillicModel),
SingleByteCharSetProber(MacCyrillicModel),
SingleByteCharSetProber(Ibm866Model),
SingleByteCharSetProber(Ibm855Model),
SingleByteCharSetProber(Latin7GreekModel),
SingleByteCharSetProber(Win1253GreekModel),
SingleByteCharSetProber(Latin5BulgarianModel),
SingleByteCharSetProber(Win1251BulgarianModel),
# TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250)
# after we retrain model.
# SingleByteCharSetProber(Latin2HungarianModel),
# SingleByteCharSetProber(Win1250HungarianModel),
SingleByteCharSetProber(TIS620ThaiModel),
SingleByteCharSetProber(Latin5TurkishModel),
]
hebrew_prober = HebrewProber()
logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel,
False, hebrew_prober)
visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True,
hebrew_prober)
hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober)
self.probers.extend([hebrew_prober, logical_hebrew_prober,
visual_hebrew_prober])
self.reset()
| gpl-3.0 |
israeltobias/DownMedia | youtube-dl/youtube_dl/extractor/litv.py | 64 | 6254 | # coding: utf-8
from __future__ import unicode_literals
import json
from .common import InfoExtractor
from ..utils import (
ExtractorError,
int_or_none,
smuggle_url,
unsmuggle_url,
)
class LiTVIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?litv\.tv/(?:vod|promo)/[^/]+/(?:content\.do)?\?.*?\b(?:content_)?id=(?P<id>[^&]+)'
_URL_TEMPLATE = 'https://www.litv.tv/vod/%s/content.do?id=%s'
_TESTS = [{
'url': 'https://www.litv.tv/vod/drama/content.do?brc_id=root&id=VOD00041610&isUHEnabled=true&autoPlay=1',
'info_dict': {
'id': 'VOD00041606',
'title': '花千骨',
},
'playlist_count': 50,
}, {
'url': 'https://www.litv.tv/vod/drama/content.do?brc_id=root&id=VOD00041610&isUHEnabled=true&autoPlay=1',
'md5': '969e343d9244778cb29acec608e53640',
'info_dict': {
'id': 'VOD00041610',
'ext': 'mp4',
'title': '花千骨第1集',
'thumbnail': r're:https?://.*\.jpg$',
'description': 'md5:c7017aa144c87467c4fb2909c4b05d6f',
'episode_number': 1,
},
'params': {
'noplaylist': True,
},
'skip': 'Georestricted to Taiwan',
}, {
'url': 'https://www.litv.tv/promo/miyuezhuan/?content_id=VOD00044841&',
'md5': '88322ea132f848d6e3e18b32a832b918',
'info_dict': {
'id': 'VOD00044841',
'ext': 'mp4',
'title': '芈月傳第1集 霸星芈月降世楚國',
'description': '楚威王二年,太史令唐昧夜觀星象,發現霸星即將現世。王后得知霸星的預言後,想盡辦法不讓孩子順利出生,幸得莒姬相護化解危機。沒想到眾人期待下出生的霸星卻是位公主,楚威王對此失望至極。楚王后命人將女嬰丟棄河中,居然奇蹟似的被少司命像攔下,楚威王認為此女非同凡響,為她取名芈月。',
},
'skip': 'Georestricted to Taiwan',
}]
def _extract_playlist(self, season_list, video_id, program_info, prompt=True):
episode_title = program_info['title']
content_id = season_list['contentId']
if prompt:
self.to_screen('Downloading playlist %s - add --no-playlist to just download video %s' % (content_id, video_id))
all_episodes = [
self.url_result(smuggle_url(
self._URL_TEMPLATE % (program_info['contentType'], episode['contentId']),
{'force_noplaylist': True})) # To prevent infinite recursion
for episode in season_list['episode']]
return self.playlist_result(all_episodes, content_id, episode_title)
def _real_extract(self, url):
url, data = unsmuggle_url(url, {})
video_id = self._match_id(url)
noplaylist = self._downloader.params.get('noplaylist')
noplaylist_prompt = True
if 'force_noplaylist' in data:
noplaylist = data['force_noplaylist']
noplaylist_prompt = False
webpage = self._download_webpage(url, video_id)
program_info = self._parse_json(self._search_regex(
r'var\s+programInfo\s*=\s*([^;]+)', webpage, 'VOD data', default='{}'),
video_id)
season_list = list(program_info.get('seasonList', {}).values())
if season_list:
if not noplaylist:
return self._extract_playlist(
season_list[0], video_id, program_info,
prompt=noplaylist_prompt)
if noplaylist_prompt:
self.to_screen('Downloading just video %s because of --no-playlist' % video_id)
# In browsers `getMainUrl` request is always issued. Usually this
# endpoint gives the same result as the data embedded in the webpage.
# If georestricted, there are no embedded data, so an extra request is
# necessary to get the error code
if 'assetId' not in program_info:
program_info = self._download_json(
'https://www.litv.tv/vod/ajax/getProgramInfo', video_id,
query={'contentId': video_id},
headers={'Accept': 'application/json'})
video_data = self._parse_json(self._search_regex(
r'uiHlsUrl\s*=\s*testBackendData\(([^;]+)\);',
webpage, 'video data', default='{}'), video_id)
if not video_data:
payload = {
'assetId': program_info['assetId'],
'watchDevices': program_info['watchDevices'],
'contentType': program_info['contentType'],
}
video_data = self._download_json(
'https://www.litv.tv/vod/getMainUrl', video_id,
data=json.dumps(payload).encode('utf-8'),
headers={'Content-Type': 'application/json'})
if not video_data.get('fullpath'):
error_msg = video_data.get('errorMessage')
if error_msg == 'vod.error.outsideregionerror':
self.raise_geo_restricted('This video is available in Taiwan only')
if error_msg:
raise ExtractorError('%s said: %s' % (self.IE_NAME, error_msg), expected=True)
raise ExtractorError('Unexpected result from %s' % self.IE_NAME)
formats = self._extract_m3u8_formats(
video_data['fullpath'], video_id, ext='mp4',
entry_protocol='m3u8_native', m3u8_id='hls')
for a_format in formats:
# LiTV HLS segments doesn't like compressions
a_format.setdefault('http_headers', {})['Youtubedl-no-compression'] = True
title = program_info['title'] + program_info.get('secondaryMark', '')
description = program_info.get('description')
thumbnail = program_info.get('imageFile')
categories = [item['name'] for item in program_info.get('category', [])]
episode = int_or_none(program_info.get('episode'))
return {
'id': video_id,
'formats': formats,
'title': title,
'description': description,
'thumbnail': thumbnail,
'categories': categories,
'episode_number': episode,
}
| gpl-3.0 |
kmike/scikit-learn | sklearn/linear_model/logistic.py | 2 | 5309 | import numpy as np
from .base import LinearClassifierMixin, SparseCoefMixin
from ..feature_selection.selector_mixin import SelectorMixin
from ..svm.base import BaseLibLinear
class LogisticRegression(BaseLibLinear, LinearClassifierMixin, SelectorMixin,
SparseCoefMixin):
"""Logistic Regression (aka logit, MaxEnt) classifier.
In the multiclass case, the training algorithm uses a one-vs.-all (OvA)
scheme, rather than the "true" multinomial LR.
This class implements L1 and L2 regularized logistic regression using the
`liblinear` library. It can handle both dense and sparse input. Use
C-ordered arrays or CSR matrices containing 64-bit floats for optimal
performance; any other input format will be converted (and copied).
Parameters
----------
penalty : string, 'l1' or 'l2'
Used to specify the norm used in the penalization.
dual : boolean
Dual or primal formulation. Dual formulation is only
implemented for l2 penalty. Prefer dual=False when
n_samples > n_features.
C : float, optional (default=1.0)
Inverse of regularization strength; must be a positive float.
Like in support vector machines, smaller values specify stronger
regularization.
fit_intercept : bool, default: True
Specifies if a constant (a.k.a. bias or intercept) should be
added the decision function.
intercept_scaling : float, default: 1
when self.fit_intercept is True, instance vector x becomes
[x, self.intercept_scaling],
i.e. a "synthetic" feature with constant value equals to
intercept_scaling is appended to the instance vector.
The intercept becomes intercept_scaling * synthetic feature weight
Note! the synthetic feature weight is subject to l1/l2 regularization
as all other features.
To lessen the effect of regularization on synthetic feature weight
(and therefore on the intercept) intercept_scaling has to be increased
class_weight : {dict, 'auto'}, optional
Set the parameter C of class i to class_weight[i]*C for
SVC. If not given, all classes are supposed to have
weight one. The 'auto' mode uses the values of y to
automatically adjust weights inversely proportional to
class frequencies.
tol: float, optional
Tolerance for stopping criteria.
Attributes
----------
`coef_` : array, shape = [n_classes-1, n_features]
Coefficient of the features in the decision function.
`coef_` is readonly property derived from `raw_coef_` that \
follows the internal memory layout of liblinear.
`intercept_` : array, shape = [n_classes-1]
Intercept (a.k.a. bias) added to the decision function.
It is available only when parameter intercept is set to True.
random_state: int seed, RandomState instance, or None (default)
The seed of the pseudo random number generator to use when
shuffling the data.
See also
--------
LinearSVC
Notes
-----
The underlying C implementation uses a random number generator to
select features when fitting the model. It is thus not uncommon,
to have slightly different results for the same input data. If
that happens, try with a smaller tol parameter.
References:
LIBLINEAR -- A Library for Large Linear Classification
http://www.csie.ntu.edu.tw/~cjlin/liblinear/
Hsiang-Fu Yu, Fang-Lan Huang, Chih-Jen Lin (2011). Dual coordinate descent
methods for logistic regression and maximum entropy models.
Machine Learning 85(1-2):41-75.
http://www.csie.ntu.edu.tw/~cjlin/papers/maxent_dual.pdf
"""
def __init__(self, penalty='l2', dual=False, tol=1e-4, C=1.0,
fit_intercept=True, intercept_scaling=1, class_weight=None,
random_state=None):
super(LogisticRegression, self).__init__(
penalty=penalty, dual=dual, loss='lr', tol=tol, C=C,
fit_intercept=fit_intercept, intercept_scaling=intercept_scaling,
class_weight=class_weight, random_state=None)
def predict_proba(self, X):
"""Probability estimates.
The returned estimates for all classes are ordered by the
label of classes.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
T : array-like, shape = [n_samples, n_classes]
Returns the probability of the sample for each class in the model,
where classes are ordered as they are in ``self.classes_``.
"""
return self._predict_proba_lr(X)
def predict_log_proba(self, X):
"""Log of probability estimates.
The returned estimates for all classes are ordered by the
label of classes.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
T : array-like, shape = [n_samples, n_classes]
Returns the log-probability of the sample for each class in the
model, where classes are ordered as they are in ``self.classes_``.
"""
return np.log(self.predict_proba(X))
| bsd-3-clause |
sgraham/nope | third_party/cython/src/Cython/Compiler/Pipeline.py | 90 | 13171 | import itertools
from time import time
import Errors
import DebugFlags
import Options
from Visitor import CythonTransform
from Errors import CompileError, InternalError, AbortError
import Naming
#
# Really small pipeline stages
#
def dumptree(t):
# For quick debugging in pipelines
print t.dump()
return t
def abort_on_errors(node):
# Stop the pipeline if there are any errors.
if Errors.num_errors != 0:
raise AbortError("pipeline break")
return node
def parse_stage_factory(context):
def parse(compsrc):
source_desc = compsrc.source_desc
full_module_name = compsrc.full_module_name
initial_pos = (source_desc, 1, 0)
saved_cimport_from_pyx, Options.cimport_from_pyx = Options.cimport_from_pyx, False
scope = context.find_module(full_module_name, pos = initial_pos, need_pxd = 0,
check_module_name = not Options.embed)
Options.cimport_from_pyx = saved_cimport_from_pyx
tree = context.parse(source_desc, scope, pxd = 0, full_module_name = full_module_name)
tree.compilation_source = compsrc
tree.scope = scope
tree.is_pxd = False
return tree
return parse
def parse_pxd_stage_factory(context, scope, module_name):
def parse(source_desc):
tree = context.parse(source_desc, scope, pxd=True,
full_module_name=module_name)
tree.scope = scope
tree.is_pxd = True
return tree
return parse
def generate_pyx_code_stage_factory(options, result):
def generate_pyx_code_stage(module_node):
module_node.process_implementation(options, result)
result.compilation_source = module_node.compilation_source
return result
return generate_pyx_code_stage
def inject_pxd_code_stage_factory(context):
def inject_pxd_code_stage(module_node):
from textwrap import dedent
stats = module_node.body.stats
for name, (statlistnode, scope) in context.pxds.iteritems():
module_node.merge_in(statlistnode, scope)
return module_node
return inject_pxd_code_stage
def use_utility_code_definitions(scope, target, seen=None):
if seen is None:
seen = set()
for entry in scope.entries.itervalues():
if entry in seen:
continue
seen.add(entry)
if entry.used and entry.utility_code_definition:
target.use_utility_code(entry.utility_code_definition)
for required_utility in entry.utility_code_definition.requires:
target.use_utility_code(required_utility)
elif entry.as_module:
use_utility_code_definitions(entry.as_module, target, seen)
def inject_utility_code_stage_factory(context):
def inject_utility_code_stage(module_node):
use_utility_code_definitions(context.cython_scope, module_node.scope)
added = []
# Note: the list might be extended inside the loop (if some utility code
# pulls in other utility code, explicitly or implicitly)
for utilcode in module_node.scope.utility_code_list:
if utilcode in added: continue
added.append(utilcode)
if utilcode.requires:
for dep in utilcode.requires:
if not dep in added and not dep in module_node.scope.utility_code_list:
module_node.scope.utility_code_list.append(dep)
tree = utilcode.get_tree()
if tree:
module_node.merge_in(tree.body, tree.scope, merge_scope=True)
return module_node
return inject_utility_code_stage
class UseUtilityCodeDefinitions(CythonTransform):
# Temporary hack to use any utility code in nodes' "utility_code_definitions".
# This should be moved to the code generation phase of the relevant nodes once
# it is safe to generate CythonUtilityCode at code generation time.
def __call__(self, node):
self.scope = node.scope
return super(UseUtilityCodeDefinitions, self).__call__(node)
def process_entry(self, entry):
if entry:
for utility_code in (entry.utility_code, entry.utility_code_definition):
if utility_code:
self.scope.use_utility_code(utility_code)
def visit_AttributeNode(self, node):
self.process_entry(node.entry)
return node
def visit_NameNode(self, node):
self.process_entry(node.entry)
self.process_entry(node.type_entry)
return node
#
# Pipeline factories
#
def create_pipeline(context, mode, exclude_classes=()):
assert mode in ('pyx', 'py', 'pxd')
from Visitor import PrintTree
from ParseTreeTransforms import WithTransform, NormalizeTree, PostParse, PxdPostParse
from ParseTreeTransforms import ForwardDeclareTypes, AnalyseDeclarationsTransform
from ParseTreeTransforms import AnalyseExpressionsTransform, FindInvalidUseOfFusedTypes
from ParseTreeTransforms import CreateClosureClasses, MarkClosureVisitor, DecoratorTransform
from ParseTreeTransforms import InterpretCompilerDirectives, TransformBuiltinMethods
from ParseTreeTransforms import ExpandInplaceOperators, ParallelRangeTransform
from ParseTreeTransforms import CalculateQualifiedNamesTransform
from TypeInference import MarkParallelAssignments, MarkOverflowingArithmetic
from ParseTreeTransforms import AdjustDefByDirectives, AlignFunctionDefinitions
from ParseTreeTransforms import RemoveUnreachableCode, GilCheck
from FlowControl import ControlFlowAnalysis
from AnalysedTreeTransforms import AutoTestDictTransform
from AutoDocTransforms import EmbedSignature
from Optimize import FlattenInListTransform, SwitchTransform, IterationTransform
from Optimize import EarlyReplaceBuiltinCalls, OptimizeBuiltinCalls
from Optimize import InlineDefNodeCalls
from Optimize import ConstantFolding, FinalOptimizePhase
from Optimize import DropRefcountingTransform
from Optimize import ConsolidateOverflowCheck
from Buffer import IntroduceBufferAuxiliaryVars
from ModuleNode import check_c_declarations, check_c_declarations_pxd
if mode == 'pxd':
_check_c_declarations = check_c_declarations_pxd
_specific_post_parse = PxdPostParse(context)
else:
_check_c_declarations = check_c_declarations
_specific_post_parse = None
if mode == 'py':
_align_function_definitions = AlignFunctionDefinitions(context)
else:
_align_function_definitions = None
# NOTE: This is the "common" parts of the pipeline, which is also
# code in pxd files. So it will be run multiple times in a
# compilation stage.
stages = [
NormalizeTree(context),
PostParse(context),
_specific_post_parse,
InterpretCompilerDirectives(context, context.compiler_directives),
ParallelRangeTransform(context),
AdjustDefByDirectives(context),
MarkClosureVisitor(context),
_align_function_definitions,
RemoveUnreachableCode(context),
ConstantFolding(),
FlattenInListTransform(),
WithTransform(context),
DecoratorTransform(context),
ForwardDeclareTypes(context),
AnalyseDeclarationsTransform(context),
AutoTestDictTransform(context),
EmbedSignature(context),
EarlyReplaceBuiltinCalls(context), ## Necessary?
TransformBuiltinMethods(context), ## Necessary?
MarkParallelAssignments(context),
ControlFlowAnalysis(context),
RemoveUnreachableCode(context),
# MarkParallelAssignments(context),
MarkOverflowingArithmetic(context),
IntroduceBufferAuxiliaryVars(context),
_check_c_declarations,
InlineDefNodeCalls(context),
AnalyseExpressionsTransform(context),
FindInvalidUseOfFusedTypes(context),
ExpandInplaceOperators(context),
OptimizeBuiltinCalls(context), ## Necessary?
CreateClosureClasses(context), ## After all lookups and type inference
CalculateQualifiedNamesTransform(context),
ConsolidateOverflowCheck(context),
IterationTransform(context),
SwitchTransform(),
DropRefcountingTransform(),
FinalOptimizePhase(context),
GilCheck(),
UseUtilityCodeDefinitions(context),
]
filtered_stages = []
for s in stages:
if s.__class__ not in exclude_classes:
filtered_stages.append(s)
return filtered_stages
def create_pyx_pipeline(context, options, result, py=False, exclude_classes=()):
if py:
mode = 'py'
else:
mode = 'pyx'
test_support = []
if options.evaluate_tree_assertions:
from Cython.TestUtils import TreeAssertVisitor
test_support.append(TreeAssertVisitor())
if options.gdb_debug:
from Cython.Debugger import DebugWriter # requires Py2.5+
from ParseTreeTransforms import DebugTransform
context.gdb_debug_outputwriter = DebugWriter.CythonDebugWriter(
options.output_dir)
debug_transform = [DebugTransform(context, options, result)]
else:
debug_transform = []
return list(itertools.chain(
[parse_stage_factory(context)],
create_pipeline(context, mode, exclude_classes=exclude_classes),
test_support,
[inject_pxd_code_stage_factory(context),
inject_utility_code_stage_factory(context),
abort_on_errors],
debug_transform,
[generate_pyx_code_stage_factory(options, result)]))
def create_pxd_pipeline(context, scope, module_name):
from CodeGeneration import ExtractPxdCode
# The pxd pipeline ends up with a CCodeWriter containing the
# code of the pxd, as well as a pxd scope.
return [
parse_pxd_stage_factory(context, scope, module_name)
] + create_pipeline(context, 'pxd') + [
ExtractPxdCode()
]
def create_py_pipeline(context, options, result):
return create_pyx_pipeline(context, options, result, py=True)
def create_pyx_as_pxd_pipeline(context, result):
from ParseTreeTransforms import AlignFunctionDefinitions, \
MarkClosureVisitor, WithTransform, AnalyseDeclarationsTransform
from Optimize import ConstantFolding, FlattenInListTransform
from Nodes import StatListNode
pipeline = []
pyx_pipeline = create_pyx_pipeline(context, context.options, result,
exclude_classes=[
AlignFunctionDefinitions,
MarkClosureVisitor,
ConstantFolding,
FlattenInListTransform,
WithTransform
])
for stage in pyx_pipeline:
pipeline.append(stage)
if isinstance(stage, AnalyseDeclarationsTransform):
# This is the last stage we need.
break
def fake_pxd(root):
for entry in root.scope.entries.values():
if not entry.in_cinclude:
entry.defined_in_pxd = 1
if entry.name == entry.cname and entry.visibility != 'extern':
# Always mangle non-extern cimported entries.
entry.cname = entry.scope.mangle(Naming.func_prefix, entry.name)
return StatListNode(root.pos, stats=[]), root.scope
pipeline.append(fake_pxd)
return pipeline
def insert_into_pipeline(pipeline, transform, before=None, after=None):
"""
Insert a new transform into the pipeline after or before an instance of
the given class. e.g.
pipeline = insert_into_pipeline(pipeline, transform,
after=AnalyseDeclarationsTransform)
"""
assert before or after
cls = before or after
for i, t in enumerate(pipeline):
if isinstance(t, cls):
break
if after:
i += 1
return pipeline[:i] + [transform] + pipeline[i:]
#
# Running a pipeline
#
def run_pipeline(pipeline, source, printtree=True):
from Cython.Compiler.Visitor import PrintTree
error = None
data = source
try:
try:
for phase in pipeline:
if phase is not None:
if DebugFlags.debug_verbose_pipeline:
t = time()
print "Entering pipeline phase %r" % phase
if not printtree and isinstance(phase, PrintTree):
continue
data = phase(data)
if DebugFlags.debug_verbose_pipeline:
print " %.3f seconds" % (time() - t)
except CompileError, err:
# err is set
Errors.report_error(err)
error = err
except InternalError, err:
# Only raise if there was not an earlier error
if Errors.num_errors == 0:
raise
error = err
except AbortError, err:
error = err
return (error, data)
| bsd-3-clause |
klmitch/python-keystoneclient | keystoneclient/tests/unit/v3/test_users.py | 2 | 11616 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import uuid
from keystoneclient import exceptions
from keystoneclient.tests.unit.v3 import utils
from keystoneclient.v3 import users
class UserTests(utils.ClientTestCase, utils.CrudTests):
def setUp(self):
super(UserTests, self).setUp()
self.key = 'user'
self.collection_key = 'users'
self.model = users.User
self.manager = self.client.users
def new_ref(self, **kwargs):
kwargs = super(UserTests, self).new_ref(**kwargs)
kwargs.setdefault('description', uuid.uuid4().hex)
kwargs.setdefault('domain_id', uuid.uuid4().hex)
kwargs.setdefault('enabled', True)
kwargs.setdefault('name', uuid.uuid4().hex)
kwargs.setdefault('default_project_id', uuid.uuid4().hex)
return kwargs
def test_add_user_to_group(self):
group_id = uuid.uuid4().hex
ref = self.new_ref()
self.stub_url('PUT',
['groups', group_id, self.collection_key, ref['id']],
status_code=204)
self.manager.add_to_group(user=ref['id'], group=group_id)
self.assertRaises(exceptions.ValidationError,
self.manager.remove_from_group,
user=ref['id'],
group=None)
def test_list_users_in_group(self):
group_id = uuid.uuid4().hex
ref_list = [self.new_ref(), self.new_ref()]
self.stub_entity('GET',
['groups', group_id, self.collection_key],
entity=ref_list)
returned_list = self.manager.list(group=group_id)
self.assertEqual(len(ref_list), len(returned_list))
[self.assertIsInstance(r, self.model) for r in returned_list]
def test_check_user_in_group(self):
group_id = uuid.uuid4().hex
ref = self.new_ref()
self.stub_url('HEAD',
['groups', group_id, self.collection_key, ref['id']],
status_code=204)
self.manager.check_in_group(user=ref['id'], group=group_id)
self.assertRaises(exceptions.ValidationError,
self.manager.check_in_group,
user=ref['id'],
group=None)
def test_remove_user_from_group(self):
group_id = uuid.uuid4().hex
ref = self.new_ref()
self.stub_url('DELETE',
['groups', group_id, self.collection_key, ref['id']],
status_code=204)
self.manager.remove_from_group(user=ref['id'], group=group_id)
self.assertRaises(exceptions.ValidationError,
self.manager.remove_from_group,
user=ref['id'],
group=None)
def test_create_doesnt_log_password(self):
password = uuid.uuid4().hex
ref = self.new_ref()
self.stub_entity('POST', [self.collection_key],
status_code=201, entity=ref)
req_ref = ref.copy()
req_ref.pop('id')
param_ref = req_ref.copy()
param_ref['password'] = password
params = utils.parameterize(param_ref)
self.manager.create(**params)
self.assertNotIn(password, self.logger.output)
def test_create_with_project(self):
# Can create a user with the deprecated project option rather than
# default_project_id.
self.deprecations.expect_deprecations()
ref = self.new_ref()
self.stub_entity('POST', [self.collection_key],
status_code=201, entity=ref)
req_ref = ref.copy()
req_ref.pop('id')
param_ref = req_ref.copy()
# Use deprecated project_id rather than new default_project_id.
param_ref['project_id'] = param_ref.pop('default_project_id')
params = utils.parameterize(param_ref)
returned = self.manager.create(**params)
self.assertIsInstance(returned, self.model)
for attr in ref:
self.assertEqual(
getattr(returned, attr),
ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
def test_create_with_project_and_default_project(self):
# Can create a user with the deprecated project and default_project_id.
# The backend call should only pass the default_project_id.
self.deprecations.expect_deprecations()
ref = self.new_ref()
self.stub_entity('POST',
[self.collection_key],
status_code=201, entity=ref)
req_ref = ref.copy()
req_ref.pop('id')
param_ref = req_ref.copy()
# Add the deprecated project_id in the call, the value will be ignored.
param_ref['project_id'] = 'project'
params = utils.parameterize(param_ref)
returned = self.manager.create(**params)
self.assertIsInstance(returned, self.model)
for attr in ref:
self.assertEqual(
getattr(returned, attr),
ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
def test_update_doesnt_log_password(self):
password = uuid.uuid4().hex
ref = self.new_ref()
req_ref = ref.copy()
req_ref.pop('id')
param_ref = req_ref.copy()
self.stub_entity('PATCH',
[self.collection_key, ref['id']],
status_code=200, entity=ref)
param_ref['password'] = password
params = utils.parameterize(param_ref)
self.manager.update(ref['id'], **params)
self.assertNotIn(password, self.logger.output)
def test_update_with_project(self):
# Can update a user with the deprecated project option rather than
# default_project_id.
self.deprecations.expect_deprecations()
ref = self.new_ref()
req_ref = ref.copy()
req_ref.pop('id')
param_ref = req_ref.copy()
self.stub_entity('PATCH',
[self.collection_key, ref['id']],
status_code=200, entity=ref)
# Use deprecated project_id rather than new default_project_id.
param_ref['project_id'] = param_ref.pop('default_project_id')
params = utils.parameterize(param_ref)
returned = self.manager.update(ref['id'], **params)
self.assertIsInstance(returned, self.model)
for attr in ref:
self.assertEqual(
getattr(returned, attr),
ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
def test_update_with_project_and_default_project(self, ref=None):
self.deprecations.expect_deprecations()
ref = self.new_ref()
req_ref = ref.copy()
req_ref.pop('id')
param_ref = req_ref.copy()
self.stub_entity('PATCH',
[self.collection_key, ref['id']],
status_code=200, entity=ref)
# Add the deprecated project_id in the call, the value will be ignored.
param_ref['project_id'] = 'project'
params = utils.parameterize(param_ref)
returned = self.manager.update(ref['id'], **params)
self.assertIsInstance(returned, self.model)
for attr in ref:
self.assertEqual(
getattr(returned, attr),
ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
def test_update_password(self):
old_password = uuid.uuid4().hex
new_password = uuid.uuid4().hex
self.stub_url('POST',
[self.collection_key, self.TEST_USER_ID, 'password'])
self.client.user_id = self.TEST_USER_ID
self.manager.update_password(old_password, new_password)
exp_req_body = {
'user': {
'password': new_password, 'original_password': old_password
}
}
self.assertEqual(
'%s/users/%s/password' % (self.TEST_URL, self.TEST_USER_ID),
self.requests_mock.last_request.url)
self.assertRequestBodyIs(json=exp_req_body)
self.assertNotIn(old_password, self.logger.output)
self.assertNotIn(new_password, self.logger.output)
def test_update_password_with_no_hardcoded_endpoint_filter(self):
# test to ensure the 'endpoint_filter' parameter is not being
# passed from the manager. Endpoint filtering should be done at
# the Session, not the individual managers.
old_password = uuid.uuid4().hex
new_password = uuid.uuid4().hex
expected_params = {'user': {'password': new_password,
'original_password': old_password}}
user_password_update_path = '/users/%s/password' % self.TEST_USER_ID
self.client.user_id = self.TEST_USER_ID
# NOTE(gyee): user manager subclass keystoneclient.base.Manager
# and utilize the _update() method in the base class to interface
# with the client session to perform the update. In the case, we
# just need to make sure the 'endpoint_filter' parameter is not
# there.
with mock.patch('keystoneclient.base.Manager._update') as m:
self.manager.update_password(old_password, new_password)
m.assert_called_with(user_password_update_path, expected_params,
method='POST', log=False)
def test_update_password_with_bad_inputs(self):
old_password = uuid.uuid4().hex
new_password = uuid.uuid4().hex
# users can't unset their password
self.assertRaises(exceptions.ValidationError,
self.manager.update_password,
old_password, None)
self.assertRaises(exceptions.ValidationError,
self.manager.update_password,
old_password, '')
# users can't start with empty passwords
self.assertRaises(exceptions.ValidationError,
self.manager.update_password,
None, new_password)
self.assertRaises(exceptions.ValidationError,
self.manager.update_password,
'', new_password)
# this wouldn't result in any change anyway
self.assertRaises(exceptions.ValidationError,
self.manager.update_password,
None, None)
self.assertRaises(exceptions.ValidationError,
self.manager.update_password,
'', '')
password = uuid.uuid4().hex
self.assertRaises(exceptions.ValidationError,
self.manager.update_password,
password, password)
| apache-2.0 |
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/requests/packages/chardet/mbcsgroupprober.py | 2769 | 1967 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetgroupprober import CharSetGroupProber
from .utf8prober import UTF8Prober
from .sjisprober import SJISProber
from .eucjpprober import EUCJPProber
from .gb2312prober import GB2312Prober
from .euckrprober import EUCKRProber
from .cp949prober import CP949Prober
from .big5prober import Big5Prober
from .euctwprober import EUCTWProber
class MBCSGroupProber(CharSetGroupProber):
def __init__(self):
CharSetGroupProber.__init__(self)
self._mProbers = [
UTF8Prober(),
SJISProber(),
EUCJPProber(),
GB2312Prober(),
EUCKRProber(),
CP949Prober(),
Big5Prober(),
EUCTWProber()
]
self.reset()
| agpl-3.0 |
idjaw/horizon | openstack_dashboard/dashboards/project/containers/views.py | 48 | 13326 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing Swift containers.
"""
import os
import django
from django import http
from django.utils.functional import cached_property # noqa
from django.utils.translation import ugettext_lazy as _
from django.views import generic
from horizon import browsers
from horizon import exceptions
from horizon import forms
from horizon.utils import memoized
from horizon.utils.urlresolvers import reverse # noqa
from openstack_dashboard import api
from openstack_dashboard.api import swift
from openstack_dashboard.dashboards.project.containers \
import browsers as project_browsers
from openstack_dashboard.dashboards.project.containers \
import forms as project_forms
from openstack_dashboard.dashboards.project.containers import utils
class ContainerView(browsers.ResourceBrowserView):
browser_class = project_browsers.ContainerBrowser
template_name = "project/containers/index.html"
def get_containers_data(self):
containers = []
self._more = None
marker = self.request.GET.get('marker', None)
try:
containers, self._more = api.swift.swift_get_containers(
self.request, marker=marker)
except Exception:
msg = _('Unable to retrieve container list.')
exceptions.handle(self.request, msg)
return containers
@cached_property
def objects(self):
"""Returns a list of objects given the subfolder's path.
The path is from the kwargs of the request.
"""
objects = []
self._more = None
marker = self.request.GET.get('marker', None)
container_name = self.kwargs['container_name']
subfolder = self.kwargs['subfolder_path']
prefix = None
if container_name:
self.navigation_selection = True
if subfolder:
prefix = subfolder
try:
objects, self._more = api.swift.swift_get_objects(
self.request,
container_name,
marker=marker,
prefix=prefix)
except Exception:
self._more = None
objects = []
msg = _('Unable to retrieve object list.')
exceptions.handle(self.request, msg)
return objects
def is_subdir(self, item):
content_type = "application/pseudo-folder"
return getattr(item, "content_type", None) == content_type
def is_placeholder(self, item):
object_name = getattr(item, "name", "")
return object_name.endswith(api.swift.FOLDER_DELIMITER)
def get_objects_data(self):
"""Returns a list of objects within the current folder."""
filtered_objects = [item for item in self.objects
if (not self.is_subdir(item) and
not self.is_placeholder(item))]
return filtered_objects
def get_subfolders_data(self):
"""Returns a list of subfolders within the current folder."""
filtered_objects = [item for item in self.objects
if self.is_subdir(item)]
return filtered_objects
def get_context_data(self, **kwargs):
context = super(ContainerView, self).get_context_data(**kwargs)
context['container_name'] = self.kwargs["container_name"]
context['subfolders'] = []
if self.kwargs["subfolder_path"]:
(parent, slash, folder) = self.kwargs["subfolder_path"] \
.strip('/').rpartition('/')
while folder:
path = "%s%s%s/" % (parent, slash, folder)
context['subfolders'].insert(0, (folder, path))
(parent, slash, folder) = parent.rpartition('/')
return context
class CreateView(forms.ModalFormView):
form_class = project_forms.CreateContainer
template_name = 'project/containers/create.html'
success_url = "horizon:project:containers:index"
page_title = _("Create Container")
def get_success_url(self):
parent = self.request.POST.get('parent', None)
if parent:
container, slash, remainder = parent.partition(
swift.FOLDER_DELIMITER)
args = (utils.wrap_delimiter(container),
utils.wrap_delimiter(remainder))
return reverse(self.success_url, args=args)
else:
container = utils.wrap_delimiter(self.request.POST['name'])
return reverse(self.success_url, args=[container])
def get_initial(self):
initial = super(CreateView, self).get_initial()
initial['parent'] = self.kwargs['container_name']
return initial
class CreatePseudoFolderView(forms.ModalFormView):
form_class = project_forms.CreatePseudoFolder
template_name = 'project/containers/create_pseudo_folder.html'
success_url = "horizon:project:containers:index"
page_title = _("Create Pseudo-folder")
def get_success_url(self):
container_name = self.request.POST['container_name']
return reverse(self.success_url,
args=(utils.wrap_delimiter(container_name),
self.request.POST.get('path', '')))
def get_initial(self):
return {"container_name": self.kwargs["container_name"],
"path": self.kwargs['subfolder_path']}
def get_context_data(self, **kwargs):
context = super(CreatePseudoFolderView, self). \
get_context_data(**kwargs)
context['container_name'] = self.kwargs["container_name"]
return context
class UploadView(forms.ModalFormView):
form_class = project_forms.UploadObject
template_name = 'project/containers/upload.html'
success_url = "horizon:project:containers:index"
page_title = _("Upload Objects")
def get_success_url(self):
container = utils.wrap_delimiter(self.request.POST['container_name'])
path = utils.wrap_delimiter(self.request.POST.get('path', ''))
args = (container, path)
return reverse(self.success_url, args=args)
def get_initial(self):
return {"container_name": self.kwargs["container_name"],
"path": self.kwargs['subfolder_path']}
def get_context_data(self, **kwargs):
context = super(UploadView, self).get_context_data(**kwargs)
context['container_name'] = self.kwargs["container_name"]
return context
def object_download(request, container_name, object_path):
try:
obj = api.swift.swift_get_object(request, container_name, object_path,
resp_chunk_size=swift.CHUNK_SIZE)
except Exception:
redirect = reverse("horizon:project:containers:index")
exceptions.handle(request,
_("Unable to retrieve object."),
redirect=redirect)
# Add the original file extension back on if it wasn't preserved in the
# name given to the object.
filename = object_path.rsplit(swift.FOLDER_DELIMITER)[-1]
if not os.path.splitext(obj.name)[1] and obj.orig_name:
name, ext = os.path.splitext(obj.orig_name)
filename = "%s%s" % (filename, ext)
# NOTE(tsufiev): StreamingHttpResponse class had been introduced in
# Django 1.5 specifically for the purpose streaming and/or transferring
# large files, it's less fragile than standard HttpResponse and should be
# used when available.
if django.VERSION >= (1, 5):
response = http.StreamingHttpResponse(obj.data)
else:
response = http.HttpResponse(obj.data)
safe_name = filename.replace(",", "").encode('utf-8')
response['Content-Disposition'] = 'attachment; filename="%s"' % safe_name
response['Content-Type'] = 'application/octet-stream'
response['Content-Length'] = obj.bytes
return response
class CopyView(forms.ModalFormView):
form_class = project_forms.CopyObject
template_name = 'project/containers/copy.html'
success_url = "horizon:project:containers:index"
page_title = _("Copy Object")
def get_success_url(self):
container = utils.wrap_delimiter(
self.request.POST['new_container_name'])
path = utils.wrap_delimiter(self.request.POST.get('path', ''))
args = (container, path)
return reverse(self.success_url, args=args)
def get_form_kwargs(self):
kwargs = super(CopyView, self).get_form_kwargs()
try:
containers = api.swift.swift_get_containers(self.request)
except Exception:
redirect = reverse("horizon:project:containers:index")
exceptions.handle(self.request,
_('Unable to list containers.'),
redirect=redirect)
kwargs['containers'] = [(c.name, c.name) for c in containers[0]]
return kwargs
@staticmethod
def get_copy_name(object_name):
filename, ext = os.path.splitext(object_name)
return "%s.copy%s" % (filename, ext)
def get_initial(self):
path = self.kwargs["subfolder_path"]
object_name = self.kwargs["object_name"]
orig = "%s%s" % (path or '', object_name)
return {"new_container_name": self.kwargs["container_name"],
"orig_container_name": self.kwargs["container_name"],
"orig_object_name": orig,
"path": path,
"new_object_name": self.get_copy_name(object_name)}
def get_context_data(self, **kwargs):
context = super(CopyView, self).get_context_data(**kwargs)
context['container_name'] = self.kwargs["container_name"]
context['object_name'] = self.kwargs["object_name"]
return context
class ContainerDetailView(forms.ModalFormMixin, generic.TemplateView):
template_name = 'project/containers/container_detail.html'
page_title = _("Container Details")
@memoized.memoized_method
def get_object(self):
try:
return api.swift.swift_get_container(
self.request,
self.kwargs["container_name"],
with_data=False)
except Exception:
redirect = reverse("horizon:project:containers:index")
exceptions.handle(self.request,
_('Unable to retrieve details.'),
redirect=redirect)
def get_context_data(self, **kwargs):
context = super(ContainerDetailView, self).get_context_data(**kwargs)
context['container'] = self.get_object()
return context
class ObjectDetailView(forms.ModalFormMixin, generic.TemplateView):
template_name = 'project/containers/object_detail.html'
page_title = _("Object Details")
@memoized.memoized_method
def get_object(self):
try:
return api.swift.swift_get_object(
self.request,
self.kwargs["container_name"],
self.kwargs["object_path"],
with_data=False)
except Exception:
redirect = reverse("horizon:project:containers:index")
exceptions.handle(self.request,
_('Unable to retrieve details.'),
redirect=redirect)
def get_context_data(self, **kwargs):
context = super(ObjectDetailView, self).get_context_data(**kwargs)
context['object'] = self.get_object()
return context
class UpdateObjectView(forms.ModalFormView):
form_class = project_forms.UpdateObject
template_name = 'project/containers/update.html'
success_url = "horizon:project:containers:index"
page_title = _("Update Object")
def get_success_url(self):
container = utils.wrap_delimiter(self.request.POST['container_name'])
path = utils.wrap_delimiter(self.request.POST.get('path', ''))
args = (container, path)
return reverse(self.success_url, args=args)
def get_initial(self):
return {"container_name": self.kwargs["container_name"],
"path": self.kwargs["subfolder_path"],
"name": self.kwargs["object_name"]}
def get_context_data(self, **kwargs):
context = super(UpdateObjectView, self).get_context_data(**kwargs)
context['container_name'] = self.kwargs["container_name"]
context['subfolder_path'] = self.kwargs["subfolder_path"]
context['object_name'] = self.kwargs["object_name"]
return context
| apache-2.0 |
oculusstorystudio/kraken | Python/kraken/ui/GraphView/pyflowgraph/node.py | 1 | 13604 |
#
# Copyright 2015-2017 Eric Thivierge
#
import math
import json
from kraken.ui.Qt import QtWidgets, QtGui, QtCore
from port import InputPort, OutputPort, IOPort
class NodeTitle(QtWidgets.QGraphicsWidget):
__color = QtGui.QColor(25, 25, 25)
__font = QtGui.QFont('Roboto', 14)
__font.setLetterSpacing(QtGui.QFont.PercentageSpacing, 115)
__labelBottomSpacing = 12
def __init__(self, text, parent=None):
super(NodeTitle, self).__init__(parent)
self.setSizePolicy(QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed))
self.__textItem = QtWidgets.QGraphicsTextItem(text, self)
self.__textItem.setDefaultTextColor(self.__color)
self.__textItem.setFont(self.__font)
self.__textItem.setPos(0, -2)
option = self.__textItem.document().defaultTextOption()
option.setWrapMode(QtGui.QTextOption.NoWrap)
self.__textItem.document().setDefaultTextOption(option)
self.__textItem.adjustSize()
self.setPreferredSize(self.textSize())
def setText(self, text):
self.__textItem.setPlainText(text)
self.__textItem.adjustSize()
self.setPreferredSize(self.textSize())
def textSize(self):
return QtCore.QSizeF(
self.__textItem.textWidth(),
self.__font.pointSizeF() + self.__labelBottomSpacing
)
# def paint(self, painter, option, widget):
# super(NodeTitle, self).paint(painter, option, widget)
# painter.setPen(QtGui.QPen(QtGui.QColor(0, 255, 0)))
# painter.drawRect(self.windowFrameRect())
class NodeHeader(QtWidgets.QGraphicsWidget):
def __init__(self, text, parent=None):
super(NodeHeader, self).__init__(parent)
self.setSizePolicy(QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding))
layout = QtWidgets.QGraphicsLinearLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(3)
layout.setOrientation(QtCore.Qt.Horizontal)
self.setLayout(layout)
self._titleWidget = NodeTitle(text, self)
layout.addItem(self._titleWidget)
layout.setAlignment(self._titleWidget, QtCore.Qt.AlignCenter | QtCore.Qt.AlignTop)
def setText(self, text):
self._titleWidget.setText(text)
# def paint(self, painter, option, widget):
# super(NodeHeader, self).paint(painter, option, widget)
# painter.setPen(QtGui.QPen(QtGui.QColor(0, 255, 100)))
# painter.drawRect(self.windowFrameRect())
class PortList(QtWidgets.QGraphicsWidget):
def __init__(self, parent):
super(PortList, self).__init__(parent)
layout = QtWidgets.QGraphicsLinearLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(4)
layout.setOrientation(QtCore.Qt.Vertical)
self.setLayout(layout)
def addPort(self, port, alignment):
layout = self.layout()
layout.addItem(port)
layout.setAlignment(port, alignment)
self.adjustSize()
return port
# def paint(self, painter, option, widget):
# super(PortList, self).paint(painter, option, widget)
# painter.setPen(QtGui.QPen(QtGui.QColor(255, 255, 0)))
# painter.drawRect(self.windowFrameRect())
class Node(QtWidgets.QGraphicsWidget):
nameChanged = QtCore.Signal(str, str)
__defaultColor = QtGui.QColor(154, 205, 50, 255)
__defaultUnselectedColor = QtGui.QColor(25, 25, 25)
__defaultSelectedColor = QtGui.QColor(255, 255, 255, 255)
__defaultUnselectedPen = QtGui.QPen(__defaultUnselectedColor, 1.6)
__defaultSelectedPen = QtGui.QPen(__defaultSelectedColor, 1.6)
__defaultLinePen = QtGui.QPen(QtGui.QColor(25, 25, 25, 255), 1.25)
def __init__(self, graph, name):
super(Node, self).__init__()
self.__name = name
self.__graph = graph
self.__color = self.__defaultColor
self.__unselectedColor = self.__defaultUnselectedColor
self.__selectedColor = self.__defaultSelectedColor
self.__unselectedPen = QtGui.QPen(self.__defaultUnselectedPen)
self.__selectedPen = QtGui.QPen(self.__defaultSelectedPen)
self.__linePen = QtGui.QPen(self.__defaultLinePen)
self.setMinimumWidth(60)
self.setMinimumHeight(20)
self.setSizePolicy(QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding))
layout = QtWidgets.QGraphicsLinearLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
layout.setOrientation(QtCore.Qt.Vertical)
self.setLayout(layout)
self.__headerItem = NodeHeader(self.__name, self)
layout.addItem(self.__headerItem)
layout.setAlignment(self.__headerItem, QtCore.Qt.AlignCenter | QtCore.Qt.AlignTop)
self.__ports = []
self.__ioPortsHolder = PortList(self)
self.__inputPortsHolder = PortList(self)
self.__outputPortsHolder = PortList(self)
self.__outputPortsHolder.layout().setContentsMargins(0, 0, 0, 10)
layout.addItem(self.__ioPortsHolder)
layout.addItem(self.__inputPortsHolder)
layout.addItem(self.__outputPortsHolder)
self.__selected = False
self.__dragging = False
# =====
# Name
# =====
def getName(self):
return self.__name
def setName(self, name):
if name != self.__name:
origName = self.__name
self.__name = name
self.__headerItem.setText(self.__name)
# Emit an event, so that the graph can update itsself.
self.nameChanged.emit(origName, name)
# Update the node so that the size is computed.
self.adjustSize()
# =======
# Colors
# =======
def getColor(self):
return self.__color
def setColor(self, color):
self.__color = color
self.update()
def getUnselectedColor(self):
return self.__unselectedColor
def setUnselectedColor(self, color):
self.__unselectedColor = color
self.__unselectedPen.setColor(self.__unselectedColor)
self.update()
def getSelectedColor(self):
return self.__selectedColor
def setSelectedColor(self, color):
self.__selectedColor = color
self.__selectedPen.setColor(self.__selectedColor)
self.update()
# =============
# Misc Methods
# =============
def getGraph(self):
return self.__graph
def getHeader(self):
return self.__headerItem
# ==========
# Selection
# ==========
def isSelected(self):
return self.__selected
def setSelected(self, selected=True):
self.__selected = selected
self.setZValue(20.0)
self.update()
#########################
## Graph Pos
def getGraphPos(self):
transform = self.transform()
size = self.size()
return QtCore.QPointF(transform.dx()+(size.width()*0.5), transform.dy()+(size.height()*0.5))
def setGraphPos(self, graphPos):
self.prepareConnectionGeometryChange()
size = self.size()
self.setTransform(QtGui.QTransform.fromTranslate(graphPos.x(), graphPos.y()), False)
def translate(self, x, y):
self.prepareConnectionGeometryChange()
super(Node, self).moveBy(x, y)
# Prior to moving the node, we need to tell the connections to prepare for a geometry change.
# This method must be called preior to moving a node.
def prepareConnectionGeometryChange(self):
for port in self.__ports:
if port.inCircle():
for connection in port.inCircle().getConnections():
connection.prepareGeometryChange()
if port.outCircle():
for connection in port.outCircle().getConnections():
connection.prepareGeometryChange()
#########################
## Ports
def addPort(self, port):
if isinstance(port, InputPort):
self.__inputPortsHolder.addPort(port, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
elif isinstance(port, OutputPort):
self.__outputPortsHolder.addPort(port, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
else:
self.__ioPortsHolder.addPort(port, QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
self.__ports.append(port)
self.adjustSize()
return port
def getPort(self, name):
for port in self.__ports:
if port.getName() == name:
return port
return None
def getInputPort(self, name):
for port in self.__ports:
if port.getName() == name and isinstance(port, (InputPort, IOPort)):
return port
return None
def getOutputPort(self, name):
for port in self.__ports:
if port.getName() == name and isinstance(port, (OutputPort, IOPort)):
return port
return None
def paint(self, painter, option, widget):
rect = self.windowFrameRect()
painter.setBrush(self.__color)
painter.setPen(QtGui.QPen(QtGui.QColor(0, 0, 0, 0), 0))
roundingY = 8
roundingX = 8
painter.drawRoundedRect(rect, roundingX, roundingY, mode=QtCore.Qt.AbsoluteSize)
# Title BG
titleHeight = self.__headerItem.size().height() - 3
painter.setBrush(self.__color.darker(125))
roundingY = rect.width() * roundingX / titleHeight
painter.drawRoundedRect(0, 0, rect.width(), titleHeight, roundingX, roundingY, mode=QtCore.Qt.AbsoluteSize)
painter.drawRect(0, titleHeight * 0.5 + 2, rect.width(), titleHeight * 0.5)
painter.setBrush(QtGui.QColor(0, 0, 0, 0))
if self.__selected:
painter.setPen(self.__selectedPen)
else:
painter.setPen(self.__unselectedPen)
roundingY = 8
roundingX = 8
painter.drawRoundedRect(rect, roundingX, roundingY, mode=QtCore.Qt.AbsoluteSize)
#########################
## Events
def mousePressEvent(self, event):
if event.button() is QtCore.Qt.MouseButton.LeftButton:
modifiers = event.modifiers()
if modifiers == QtCore.Qt.ControlModifier:
if not self.isSelected():
self.__graph.selectNode(self, clearSelection=False)
else:
self.__graph.deselectNode(self)
elif modifiers == QtCore.Qt.ShiftModifier:
if not self.isSelected():
self.__graph.selectNode(self, clearSelection=False)
else:
if not self.isSelected():
self.__graph.selectNode(self, clearSelection=True)
# Push all nodes back 1 level in z depth to bring selected
# node to front
for node in [x for x in self.__graph.getNodes().values()]:
if node == self:
continue
if node.zValue() != 0.0:
node.setZValue(node.zValue() - 1)
self.__dragging = True
self._mouseDownPoint = self.mapToScene(event.pos())
self._mouseDelta = self._mouseDownPoint - self.getGraphPos()
self._lastDragPoint = self._mouseDownPoint
self._nodesMoved = False
else:
super(Node, self).mousePressEvent(event)
def mouseMoveEvent(self, event):
if self.__dragging:
newPos = self.mapToScene(event.pos())
graph = self.getGraph()
if graph.getSnapToGrid() is True:
gridSize = graph.getGridSize()
newNodePos = newPos - self._mouseDelta
snapPosX = math.floor(newNodePos.x() / gridSize) * gridSize
snapPosY = math.floor(newNodePos.y() / gridSize) * gridSize
snapPos = QtCore.QPointF(snapPosX, snapPosY)
newPosOffset = snapPos - newNodePos
newPos = newPos + newPosOffset
delta = newPos - self._lastDragPoint
self.__graph.moveSelectedNodes(delta)
self._lastDragPoint = newPos
self._nodesMoved = True
else:
super(Node, self).mouseMoveEvent(event)
def mouseReleaseEvent(self, event):
if self.__dragging:
if self._nodesMoved:
newPos = self.mapToScene(event.pos())
delta = newPos - self._mouseDownPoint
self.__graph.endMoveSelectedNodes(delta)
self.setCursor(QtCore.Qt.ArrowCursor)
self.__dragging = False
else:
super(Node, self).mouseReleaseEvent(event)
#########################
## shut down
def disconnectAllPorts(self):
# gather all the connections into a list, and then remove them from the graph.
# This is because we can't remove connections from ports while
# iterating over the set.
connections = []
for port in self.__ports:
if port.inCircle():
for connection in port.inCircle().getConnections():
connections.append(connection)
if port.outCircle():
for connection in port.outCircle().getConnections():
connections.append(connection)
for connection in connections:
self.__graph.removeConnection(connection) | bsd-3-clause |
edx/edx-platform | lms/djangoapps/program_enrollments/api/tests/test_linking.py | 3 | 20400 | """
Tests for account linking Python API.
"""
from unittest.mock import patch
from uuid import uuid4
from django.test import TestCase
from edx_django_utils.cache import RequestCache
from opaque_keys.edx.keys import CourseKey
from testfixtures import LogCapture
from common.djangoapps.student.api import get_course_access_role
from common.djangoapps.student.roles import CourseStaffRole
from common.djangoapps.student.tests.factories import CourseAccessRoleFactory, UserFactory
from lms.djangoapps.program_enrollments.tests.factories import (
CourseAccessRoleAssignmentFactory,
ProgramCourseEnrollmentFactory,
ProgramEnrollmentFactory
)
from openedx.core.djangoapps.content.course_overviews.tests.factories import CourseOverviewFactory
from ..linking import (
NO_LMS_USER_TEMPLATE,
NO_PROGRAM_ENROLLMENT_TEMPLATE,
_user_already_linked_message,
link_program_enrollments
)
LOG_PATH = 'lms.djangoapps.program_enrollments.api.linking'
class TestLinkProgramEnrollmentsMixin:
""" Utility methods and test data for testing linking """
@classmethod
def setUpTestData(cls): # pylint: disable=missing-function-docstring
cls.program = uuid4()
cls.curriculum = uuid4()
cls.other_program = uuid4()
cls.fruit_course = CourseKey.from_string('course-v1:edX+Oranges+Apples')
cls.animal_course = CourseKey.from_string('course-v1:edX+Cats+Dogs')
CourseOverviewFactory.create(id=cls.fruit_course)
CourseOverviewFactory.create(id=cls.animal_course)
def setUp(self):
self.user_1 = UserFactory.create()
self.user_2 = UserFactory.create()
def tearDown(self):
RequestCache.clear_all_namespaces()
def _create_waiting_enrollment(self, program_uuid, external_user_key):
"""
Create a waiting program enrollment for the given program and external user key.
"""
return ProgramEnrollmentFactory.create(
user=None,
program_uuid=program_uuid,
curriculum_uuid=self.curriculum,
external_user_key=external_user_key,
)
def _create_waiting_course_enrollment(self, program_enrollment, course_key, status='active'):
"""
Create a waiting program course enrollment for the given program enrollment,
course key, and optionally status.
"""
return ProgramCourseEnrollmentFactory.create(
program_enrollment=program_enrollment,
course_key=course_key,
course_enrollment=None,
status=status,
)
def _assert_no_user(self, program_enrollment, refresh=True):
"""
Assert that the given program enrollment has no LMS user associated with it
"""
if refresh:
program_enrollment.refresh_from_db()
assert program_enrollment.user is None
def _assert_no_program_enrollment(self, user, program_uuid, refresh=True):
"""
Assert that the given user is not enrolled in the given program
"""
if refresh:
user.refresh_from_db()
assert not user.programenrollment_set.filter(program_uuid=program_uuid).exists()
def _assert_program_enrollment(self, user, program_uuid, external_user_key, refresh=True):
"""
Assert that the given user is enrolled in the given program with the
given external user key.
"""
if refresh:
user.refresh_from_db()
enrollment = user.programenrollment_set.get(
program_uuid=program_uuid, external_user_key=external_user_key
)
assert enrollment is not None
def _assert_user_enrolled_in_program_courses(self, user, program_uuid, *course_keys):
"""
Assert that the given user has active enrollments in the given courses
through the given program.
"""
user.refresh_from_db()
program_enrollment = user.programenrollment_set.get(
user=user, program_uuid=program_uuid
)
all_course_enrollments = program_enrollment.program_course_enrollments
program_course_enrollments = all_course_enrollments.select_related(
'course_enrollment__course'
).filter(
course_enrollment__isnull=False
)
course_enrollments = [
program_course_enrollment.course_enrollment
for program_course_enrollment in program_course_enrollments
]
assert all(course_enrollment.is_active for course_enrollment in course_enrollments)
self.assertCountEqual(
course_keys,
[course_enrollment.course.id for course_enrollment in course_enrollments]
)
class TestLinkProgramEnrollments(TestLinkProgramEnrollmentsMixin, TestCase):
""" Tests for linking behavior """
def test_link_only_specified_program(self):
"""
Test that when there are two waiting program enrollments with the same external user key,
only the specified program's program enrollment will be linked
"""
program_enrollment = self._create_waiting_enrollment(self.program, '0001')
self._create_waiting_course_enrollment(program_enrollment, self.fruit_course)
self._create_waiting_course_enrollment(program_enrollment, self.animal_course)
another_program_enrollment = self._create_waiting_enrollment(self.other_program, '0001')
self._create_waiting_course_enrollment(another_program_enrollment, self.fruit_course)
self._create_waiting_course_enrollment(another_program_enrollment, self.animal_course)
link_program_enrollments(self.program, {'0001': self.user_1.username})
self._assert_program_enrollment(self.user_1, self.program, '0001')
self._assert_user_enrolled_in_program_courses(
self.user_1, self.program, self.fruit_course, self.animal_course
)
self._assert_no_user(another_program_enrollment)
def test_inactive_waiting_course_enrollment(self):
"""
Test that when a waiting program enrollment has waiting program course enrollments with a
status of 'inactive' the course enrollment created after calling link_program_enrollments
will be inactive.
"""
program_enrollment = self._create_waiting_enrollment(self.program, '0001')
active_enrollment = self._create_waiting_course_enrollment(
program_enrollment,
self.fruit_course
)
inactive_enrollment = self._create_waiting_course_enrollment(
program_enrollment,
self.animal_course,
status='inactive'
)
link_program_enrollments(self.program, {'0001': self.user_1.username})
self._assert_program_enrollment(self.user_1, self.program, '0001')
active_enrollment.refresh_from_db()
assert active_enrollment.course_enrollment is not None
assert active_enrollment.course_enrollment.course.id == self.fruit_course
assert active_enrollment.course_enrollment.is_active
inactive_enrollment.refresh_from_db()
assert inactive_enrollment.course_enrollment is not None
assert inactive_enrollment.course_enrollment.course.id == self.animal_course
assert not inactive_enrollment.course_enrollment.is_active
def test_realize_course_access_roles(self):
program_enrollment = self._create_waiting_enrollment(self.program, '0001')
active_enrollment_1 = self._create_waiting_course_enrollment(
program_enrollment,
self.fruit_course,
status='active'
)
active_enrollment_2 = self._create_waiting_course_enrollment(
program_enrollment,
self.animal_course,
status='active'
)
CourseAccessRoleAssignmentFactory(enrollment=active_enrollment_1)
CourseAccessRoleAssignmentFactory(enrollment=active_enrollment_2)
link_program_enrollments(self.program, {'0001': self.user_1.username})
# assert that staff CourseAccessRoles are created for the user in the courses
fruit_course_staff_role = get_course_access_role(
self.user_1,
self.fruit_course.org,
self.fruit_course,
CourseStaffRole.ROLE
)
assert fruit_course_staff_role is not None
animal_course_staff_role = get_course_access_role(
self.user_1,
self.animal_course.org,
self.animal_course,
CourseStaffRole.ROLE
)
assert animal_course_staff_role is not None
# assert that all CourseAccessRoleAssignment objects are deleted
assert not active_enrollment_1.courseaccessroleassignment_set.all().exists()
assert not active_enrollment_2.courseaccessroleassignment_set.all().exists()
def test_realize_course_access_roles_user_with_existing_course_access_role(self):
"""
This test asserts that, given a user that already has a staff CourseAccessRole in a course,
if that user has a CourseAccessRoleAssignment that describes a staff role in that same course,
that we do not mistakenly violate the unique_together constraint on the CourseAccessRole model by
creating a duplicate. As of now, this is handled by the CourseStaffRole code itself, which silently
ignores such duplicates, but this test is to ensure we do not regress.
"""
program_enrollment = self._create_waiting_enrollment(self.program, '0001')
active_enrollment_1 = self._create_waiting_course_enrollment(
program_enrollment,
self.fruit_course,
status='active'
)
# create an CourseAccessRole for the user
CourseAccessRoleFactory(user=self.user_1, course_id=self.fruit_course, role=CourseStaffRole.ROLE)
# create a corresponding CourseAccessRoleAssignmentFactory that would, theoretically, cause a
# duplicate object to be created, violating the CourseAccessRole integrity constraints
CourseAccessRoleAssignmentFactory(enrollment=active_enrollment_1)
link_program_enrollments(self.program, {'0001': self.user_1.username})
# assert that staff CourseAccessRoles remains
fruit_course_staff_role = get_course_access_role(
self.user_1,
self.fruit_course.org,
self.fruit_course,
CourseStaffRole.ROLE
)
assert fruit_course_staff_role is not None
# assert that all CourseAccessRoleAssignment objects are deleted
assert not active_enrollment_1.courseaccessroleassignment_set.all().exists()
@staticmethod
def _assert_course_enrollments_in_mode(course_enrollments, course_keys_to_mode):
"""
Assert that all program course enrollments are in the correct modes as
described by course_keys_to_mode.
Arguments:
user: the user whose course enrollments we are checking
program_uuid: the UUID of the program in which the user is enrolled
course_keys_to_mode: a mapping from course keys to the the mode
slug the user's enrollment should be in
"""
assert len(course_enrollments) == len(course_keys_to_mode)
for course_enrollment in course_enrollments:
assert course_enrollment.mode == course_keys_to_mode[course_enrollment.course.id]
@patch('lms.djangoapps.program_enrollments.api.linking.CourseMode.modes_for_course_dict')
def test_update_linking_enrollment_to_another_user(self, mock_modes_for_course_dict):
"""
Test that when link_program_enrollments is called with a program and an external_user_key,
user pair and that program is already linked to a different user with the same external_user_key
that the original user's link is removed and replaced by a link with the new user.
"""
program_enrollment = self._create_waiting_enrollment(self.program, '0001')
self._create_waiting_course_enrollment(program_enrollment, self.fruit_course)
self._create_waiting_course_enrollment(program_enrollment, self.animal_course)
# in order to test what happens to a learner's enrollment in a course without an audit mode
# (e.g. Master's only course), we need to mock out the course modes that exist for our courses;
# doing it this way helps to avoid needing to use the modulestore when using the CourseModeFactory
def mocked_modes_for_course_dict(course_key):
if course_key == self.animal_course:
return {'masters': 'masters'}
else:
return {'audit': 'audit'}
mock_modes_for_course_dict.side_effect = mocked_modes_for_course_dict
# do the initial link of user_1 to the program enrollment
link_program_enrollments(self.program, {'0001': self.user_1.username})
self._assert_program_enrollment(self.user_1, self.program, '0001', refresh=False)
self._assert_no_program_enrollment(self.user_2, self.program, refresh=False)
# grab the user's original course enrollment before the link between the program
# and the course enrollments is severed
course_enrollments_for_user_1 = [pce.course_enrollment
for pce
in program_enrollment.program_course_enrollments.all()]
errors = link_program_enrollments(
self.program,
{
'0001': self.user_2.username,
}
)
assert errors == {}
self._assert_program_enrollment(self.user_2, self.program, '0001')
self._assert_no_program_enrollment(self.user_1, self.program)
# assert that all of user_1's course enrollments as part of the program
# are inactive
for course_enrollment in course_enrollments_for_user_1:
course_enrollment.refresh_from_db()
assert not course_enrollment.is_active
# assert that user_1's course enrollments are in the expected mode
# after unlinking
course_keys_to_mode = {
self.fruit_course: 'audit',
self.animal_course: 'masters',
}
self._assert_course_enrollments_in_mode(course_enrollments_for_user_1, course_keys_to_mode)
# assert that user_2 has been successfully linked to the program
self._assert_program_enrollment(self.user_2, self.program, '0001')
self._assert_user_enrolled_in_program_courses(self.user_2, self.program, self.fruit_course, self.animal_course)
class TestLinkProgramEnrollmentsErrors(TestLinkProgramEnrollmentsMixin, TestCase):
""" Tests for linking error behavior """
def test_program_enrollment_not_found__nonexistant(self):
self._create_waiting_enrollment(self.program, '0001')
self._program_enrollment_not_found()
def test_program_enrollment_not_found__different_program(self):
self._create_waiting_enrollment(self.program, '0001')
self._create_waiting_enrollment(self.other_program, '0002')
self._program_enrollment_not_found()
def _program_enrollment_not_found(self):
"""
Helper for test_program_not_found_* tests.
tries to link user_1 to '0001' and user_2 to '0002' in program
asserts that user_2 was not linked because the enrollment was not found
"""
with LogCapture() as logger:
errors = link_program_enrollments(
self.program,
{
'0001': self.user_1.username,
'0002': self.user_2.username,
}
)
expected_error_msg = NO_PROGRAM_ENROLLMENT_TEMPLATE.format(
program_uuid=self.program,
external_user_key='0002'
)
logger.check_present((LOG_PATH, 'WARNING', expected_error_msg))
self.assertDictEqual(errors, {'0002': expected_error_msg})
self._assert_program_enrollment(self.user_1, self.program, '0001')
self._assert_no_program_enrollment(self.user_2, self.program)
def test_user_not_found(self):
self._create_waiting_enrollment(self.program, '0001')
enrollment_2 = self._create_waiting_enrollment(self.program, '0002')
with LogCapture() as logger:
errors = link_program_enrollments(
self.program,
{
'0001': self.user_1.username,
'0002': 'nonexistant-user',
}
)
expected_error_msg = NO_LMS_USER_TEMPLATE.format('nonexistant-user')
logger.check_present((LOG_PATH, 'WARNING', expected_error_msg))
self.assertDictEqual(errors, {'0002': expected_error_msg})
self._assert_program_enrollment(self.user_1, self.program, '0001')
self._assert_no_user(enrollment_2)
def test_enrollment_already_linked_to_target_user(self):
self._create_waiting_enrollment(self.program, '0001')
program_enrollment = ProgramEnrollmentFactory.create(
user=self.user_2,
program_uuid=self.program,
external_user_key='0002',
)
self._assert_no_program_enrollment(self.user_1, self.program, refresh=False)
self._assert_program_enrollment(self.user_2, self.program, '0002', refresh=False)
with LogCapture() as logger:
errors = link_program_enrollments(
self.program,
{
'0001': self.user_1.username,
'0002': self.user_2.username
}
)
expected_error_msg = _user_already_linked_message(program_enrollment, self.user_2)
logger.check_present((LOG_PATH, 'WARNING', expected_error_msg))
self.assertDictEqual(errors, {'0002': expected_error_msg})
self._assert_program_enrollment(self.user_1, self.program, '0001')
self._assert_program_enrollment(self.user_2, self.program, '0002')
def test_error_enrolling_in_course(self):
nonexistant_course = CourseKey.from_string('course-v1:edX+Zilch+Bupkis')
program_enrollment_1 = self._create_waiting_enrollment(self.program, '0001')
course_enrollment_1 = self._create_waiting_course_enrollment(
program_enrollment_1, nonexistant_course
)
course_enrollment_2 = self._create_waiting_course_enrollment(
program_enrollment_1, self.animal_course
)
program_enrollment_2 = self._create_waiting_enrollment(self.program, '0002')
self._create_waiting_course_enrollment(program_enrollment_2, self.fruit_course)
self._create_waiting_course_enrollment(program_enrollment_2, self.animal_course)
errors = link_program_enrollments(
self.program,
{
'0001': self.user_1.username,
'0002': self.user_2.username
}
)
assert errors['0001'] in 'NonExistentCourseError: '
self._assert_no_program_enrollment(self.user_1, self.program)
self._assert_no_user(program_enrollment_1)
course_enrollment_1.refresh_from_db()
assert course_enrollment_1.course_enrollment is None
course_enrollment_2.refresh_from_db()
assert course_enrollment_2.course_enrollment is None
self._assert_user_enrolled_in_program_courses(
self.user_2, self.program, self.animal_course, self.fruit_course
)
def test_integrity_error(self):
existing_program_enrollment = self._create_waiting_enrollment(self.program, 'learner-0')
existing_program_enrollment.user = self.user_1
existing_program_enrollment.save()
program_enrollment_1 = self._create_waiting_enrollment(self.program, '0001')
self._create_waiting_enrollment(self.program, '0002')
errors = link_program_enrollments(
self.program,
{
'0001': self.user_1.username,
'0002': self.user_2.username,
}
)
assert len(errors) == 1
assert 'UNIQUE constraint failed' in errors['0001']
self._assert_no_user(program_enrollment_1)
self._assert_program_enrollment(self.user_2, self.program, '0002')
| agpl-3.0 |
simplyguru-dot/ansible-modules-extras | notification/pushover.py | 47 | 3403 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2012, Jim Richardson <weaselkeeper@gmail.com>
# All rights reserved.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
###
DOCUMENTATION = '''
---
module: pushover
version_added: "2.0"
short_description: Send notifications via u(https://pushover.net)
description:
- Send notifications via pushover, to subscriber list of devices, and email
addresses. Requires pushover app on devices.
notes:
- You will require a pushover.net account to use this module. But no account
is required to receive messages.
options:
msg:
description:
What message you wish to send.
required: true
app_token:
description:
Pushover issued token identifying your pushover app.
required: true
user_key:
description:
Pushover issued authentication key for your user.
required: true
pri:
description: Message priority (see u(https://pushover.net) for details.)
required: false
author: "Jim Richardson (@weaselkeeper)"
'''
EXAMPLES = '''
- local_action: pushover msg="{{inventory_hostname}} has exploded in flames,
It is now time to panic" app_token=wxfdksl user_key=baa5fe97f2c5ab3ca8f0bb59
'''
import urllib
class Pushover(object):
''' Instantiates a pushover object, use it to send notifications '''
base_uri = 'https://api.pushover.net'
port = 443
def __init__(self, module, user, token):
self.module = module
self.user = user
self.token = token
def run(self, priority, msg):
''' Do, whatever it is, we do. '''
url = '%s:%s/1/messages.json' % (self.base_uri, self.port)
# parse config
options = dict(user=self.user,
token=self.token,
priority=priority,
message=msg)
data = urllib.urlencode(options)
headers = { "Content-type": "application/x-www-form-urlencoded"}
r, info = fetch_url(self.module, url, method='POST', data=data, headers=headers)
if info['status'] != 200:
raise Exception(info)
return r.read()
def main():
module = AnsibleModule(
argument_spec=dict(
msg=dict(required=True),
app_token=dict(required=True),
user_key=dict(required=True),
pri=dict(required=False, default=0),
),
)
msg_object = Pushover(module, module.params['user_key'], module.params['app_token'])
try:
msg_object.run(module.params['pri'], module.params['msg'])
except:
module.fail_json(msg='Unable to send msg via pushover')
module.exit_json(msg=msg, changed=False)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| gpl-3.0 |
VarnaSuresh/oneanddone | oneanddone/users/migrations/0001_initial.py | 7 | 1130 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('consent_to_email', models.BooleanField(default=True)),
('name', models.CharField(max_length=255, verbose_name='Display Name:')),
('privacy_policy_accepted', models.BooleanField(default=False)),
('username', models.CharField(max_length=30, unique=True, null=True, verbose_name='Username')),
('personal_url', models.URLField(null=True, blank=True)),
('user', models.OneToOneField(related_name='profile', to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
]
| mpl-2.0 |
thaumos/ansible | lib/ansible/modules/storage/zfs/zfs.py | 33 | 7963 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2013, Johan Wiren <johan.wiren.se@gmail.com>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: zfs
short_description: Manage zfs
description:
- Manages ZFS file systems, volumes, clones and snapshots
version_added: "1.1"
options:
name:
description:
- File system, snapshot or volume name e.g. C(rpool/myfs).
required: true
state:
description:
- Whether to create (C(present)), or remove (C(absent)) a
file system, snapshot or volume. All parents/children
will be created/destroyed as needed to reach the desired state.
choices: [ absent, present ]
required: true
origin:
description:
- Snapshot from which to create a clone.
extra_zfs_properties:
description:
- A dictionary of zfs properties to be set.
- See the zfs(8) man page for more information.
version_added: "2.5"
author:
- Johan Wiren (@johanwiren)
'''
EXAMPLES = '''
- name: Create a new file system called myfs in pool rpool with the setuid property turned off
zfs:
name: rpool/myfs
state: present
extra_zfs_properties:
setuid: off
- name: Create a new volume called myvol in pool rpool.
zfs:
name: rpool/myvol
state: present
extra_zfs_properties:
volsize: 10M
- name: Create a snapshot of rpool/myfs file system.
zfs:
name: rpool/myfs@mysnapshot
state: present
- name: Create a new file system called myfs2 with snapdir enabled
zfs:
name: rpool/myfs2
state: present
extra_zfs_properties:
snapdir: enabled
- name: Create a new file system by cloning a snapshot
zfs:
name: rpool/cloned_fs
state: present
origin: rpool/myfs@mysnapshot
- name: Destroy a filesystem
zfs:
name: rpool/myfs
state: absent
'''
import os
from ansible.module_utils.basic import AnsibleModule
class Zfs(object):
def __init__(self, module, name, properties):
self.module = module
self.name = name
self.properties = properties
self.changed = False
self.zfs_cmd = module.get_bin_path('zfs', True)
self.zpool_cmd = module.get_bin_path('zpool', True)
self.pool = name.split('/')[0]
self.is_solaris = os.uname()[0] == 'SunOS'
self.is_openzfs = self.check_openzfs()
self.enhanced_sharing = self.check_enhanced_sharing()
def check_openzfs(self):
cmd = [self.zpool_cmd]
cmd.extend(['get', 'version'])
cmd.append(self.pool)
(rc, out, err) = self.module.run_command(cmd, check_rc=True)
version = out.splitlines()[-1].split()[2]
if version == '-':
return True
if int(version) == 5000:
return True
return False
def check_enhanced_sharing(self):
if self.is_solaris and not self.is_openzfs:
cmd = [self.zpool_cmd]
cmd.extend(['get', 'version'])
cmd.append(self.pool)
(rc, out, err) = self.module.run_command(cmd, check_rc=True)
version = out.splitlines()[-1].split()[2]
if int(version) >= 34:
return True
return False
def exists(self):
cmd = [self.zfs_cmd, 'list', '-t', 'all', self.name]
(rc, out, err) = self.module.run_command(' '.join(cmd))
if rc == 0:
return True
else:
return False
def create(self):
if self.module.check_mode:
self.changed = True
return
properties = self.properties
origin = self.module.params.get('origin', None)
cmd = [self.zfs_cmd]
if "@" in self.name:
action = 'snapshot'
elif origin:
action = 'clone'
else:
action = 'create'
cmd.append(action)
if action in ['create', 'clone']:
cmd += ['-p']
if properties:
for prop, value in properties.items():
if prop == 'volsize':
cmd += ['-V', value]
elif prop == 'volblocksize':
cmd += ['-b', value]
else:
cmd += ['-o', '%s="%s"' % (prop, value)]
if origin and action == 'clone':
cmd.append(origin)
cmd.append(self.name)
(rc, out, err) = self.module.run_command(' '.join(cmd))
if rc == 0:
self.changed = True
else:
self.module.fail_json(msg=err)
def destroy(self):
if self.module.check_mode:
self.changed = True
return
cmd = [self.zfs_cmd, 'destroy', '-R', self.name]
(rc, out, err) = self.module.run_command(' '.join(cmd))
if rc == 0:
self.changed = True
else:
self.module.fail_json(msg=err)
def set_property(self, prop, value):
if self.module.check_mode:
self.changed = True
return
cmd = [self.zfs_cmd, 'set', prop + '=' + str(value), self.name]
(rc, out, err) = self.module.run_command(cmd)
if rc == 0:
self.changed = True
else:
self.module.fail_json(msg=err)
def set_properties_if_changed(self):
current_properties = self.get_current_properties()
for prop, value in self.properties.items():
if current_properties.get(prop, None) != value:
self.set_property(prop, value)
def get_current_properties(self):
cmd = [self.zfs_cmd, 'get', '-H']
if self.enhanced_sharing:
cmd += ['-e']
cmd += ['all', self.name]
rc, out, err = self.module.run_command(" ".join(cmd))
properties = dict()
for prop, value, source in [l.split('\t')[1:4] for l in out.splitlines()]:
if source == 'local':
properties[prop] = value
# Add alias for enhanced sharing properties
if self.enhanced_sharing:
properties['sharenfs'] = properties.get('share.nfs', None)
properties['sharesmb'] = properties.get('share.smb', None)
return properties
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', required=True),
state=dict(type='str', required=True, choices=['absent', 'present']),
origin=dict(type='str', default=None),
extra_zfs_properties=dict(type='dict', default={}),
),
supports_check_mode=True,
)
state = module.params.get('state')
name = module.params.get('name')
if module.params.get('origin') and '@' in name:
module.fail_json(msg='cannot specify origin when operating on a snapshot')
# Reverse the boolification of zfs properties
for prop, value in module.params['extra_zfs_properties'].items():
if isinstance(value, bool):
if value is True:
module.params['extra_zfs_properties'][prop] = 'on'
else:
module.params['extra_zfs_properties'][prop] = 'off'
else:
module.params['extra_zfs_properties'][prop] = value
result = dict(
name=name,
state=state,
)
zfs = Zfs(module, name, module.params['extra_zfs_properties'])
if state == 'present':
if zfs.exists():
zfs.set_properties_if_changed()
else:
zfs.create()
elif state == 'absent':
if zfs.exists():
zfs.destroy()
result.update(zfs.properties)
result['changed'] = zfs.changed
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
hexxter/home-assistant | homeassistant/components/sensor/worldclock.py | 5 | 1805 | """
Support for showing the time in a different time zone.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.worldclock/
"""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (CONF_NAME, CONF_TIME_ZONE)
import homeassistant.util.dt as dt_util
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'Worldclock Sensor'
ICON = 'mdi:clock'
TIME_STR_FORMAT = "%H:%M"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_TIME_ZONE): cv.time_zone,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Worldclock sensor."""
name = config.get(CONF_NAME)
time_zone = dt_util.get_time_zone(config.get(CONF_TIME_ZONE))
add_devices([WorldClockSensor(time_zone, name)])
class WorldClockSensor(Entity):
"""Representation of a Worldclock sensor."""
def __init__(self, time_zone, name):
"""Initialize the sensor."""
self._name = name
self._time_zone = time_zone
self._state = None
self.update()
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the time and updates the states."""
self._state = dt_util.now(time_zone=self._time_zone).strftime(
TIME_STR_FORMAT)
| mit |
sbellem/django | tests/utils_tests/test_text.py | 243 | 9471 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.test import SimpleTestCase
from django.utils import six, text
from django.utils.encoding import force_text
from django.utils.functional import lazy
from django.utils.translation import override
lazystr = lazy(force_text, six.text_type)
IS_WIDE_BUILD = (len('\U0001F4A9') == 1)
class TestUtilsText(SimpleTestCase):
def test_get_text_list(self):
self.assertEqual(text.get_text_list(['a', 'b', 'c', 'd']), 'a, b, c or d')
self.assertEqual(text.get_text_list(['a', 'b', 'c'], 'and'), 'a, b and c')
self.assertEqual(text.get_text_list(['a', 'b'], 'and'), 'a and b')
self.assertEqual(text.get_text_list(['a']), 'a')
self.assertEqual(text.get_text_list([]), '')
with override('ar'):
self.assertEqual(text.get_text_list(['a', 'b', 'c']), "a، b أو c")
def test_smart_split(self):
testdata = [
('This is "a person" test.',
['This', 'is', '"a person"', 'test.']),
('This is "a person\'s" test.',
['This', 'is', '"a person\'s"', 'test.']),
('This is "a person\\"s" test.',
['This', 'is', '"a person\\"s"', 'test.']),
('"a \'one',
['"a', "'one"]),
('all friends\' tests',
['all', 'friends\'', 'tests']),
('url search_page words="something else"',
['url', 'search_page', 'words="something else"']),
("url search_page words='something else'",
['url', 'search_page', "words='something else'"]),
('url search_page words "something else"',
['url', 'search_page', 'words', '"something else"']),
('url search_page words-"something else"',
['url', 'search_page', 'words-"something else"']),
('url search_page words=hello',
['url', 'search_page', 'words=hello']),
('url search_page words="something else',
['url', 'search_page', 'words="something', 'else']),
("cut:','|cut:' '",
["cut:','|cut:' '"]),
(lazystr("a b c d"), # Test for #20231
['a', 'b', 'c', 'd']),
]
for test, expected in testdata:
self.assertEqual(list(text.smart_split(test)), expected)
def test_truncate_chars(self):
truncator = text.Truncator(
'The quick brown fox jumped over the lazy dog.'
)
self.assertEqual('The quick brown fox jumped over the lazy dog.',
truncator.chars(100)),
self.assertEqual('The quick brown fox ...',
truncator.chars(23)),
self.assertEqual('The quick brown fo.....',
truncator.chars(23, '.....')),
# Ensure that we normalize our unicode data first
nfc = text.Truncator('o\xfco\xfco\xfco\xfc')
nfd = text.Truncator('ou\u0308ou\u0308ou\u0308ou\u0308')
self.assertEqual('oüoüoüoü', nfc.chars(8))
self.assertEqual('oüoüoüoü', nfd.chars(8))
self.assertEqual('oü...', nfc.chars(5))
self.assertEqual('oü...', nfd.chars(5))
# Ensure the final length is calculated correctly when there are
# combining characters with no precomposed form, and that combining
# characters are not split up.
truncator = text.Truncator('-B\u030AB\u030A----8')
self.assertEqual('-B\u030A...', truncator.chars(5))
self.assertEqual('-B\u030AB\u030A-...', truncator.chars(7))
self.assertEqual('-B\u030AB\u030A----8', truncator.chars(8))
# Ensure the length of the end text is correctly calculated when it
# contains combining characters with no precomposed form.
truncator = text.Truncator('-----')
self.assertEqual('---B\u030A', truncator.chars(4, 'B\u030A'))
self.assertEqual('-----', truncator.chars(5, 'B\u030A'))
# Make a best effort to shorten to the desired length, but requesting
# a length shorter than the ellipsis shouldn't break
self.assertEqual('...', text.Truncator('asdf').chars(1))
def test_truncate_words(self):
truncator = text.Truncator('The quick brown fox jumped over the lazy '
'dog.')
self.assertEqual('The quick brown fox jumped over the lazy dog.',
truncator.words(10))
self.assertEqual('The quick brown fox...', truncator.words(4))
self.assertEqual('The quick brown fox[snip]',
truncator.words(4, '[snip]'))
def test_truncate_html_words(self):
truncator = text.Truncator('<p id="par"><strong><em>The quick brown fox'
' jumped over the lazy dog.</em></strong></p>')
self.assertEqual('<p id="par"><strong><em>The quick brown fox jumped over'
' the lazy dog.</em></strong></p>', truncator.words(10, html=True))
self.assertEqual('<p id="par"><strong><em>The quick brown fox...</em>'
'</strong></p>', truncator.words(4, html=True))
self.assertEqual('<p id="par"><strong><em>The quick brown fox....</em>'
'</strong></p>', truncator.words(4, '....', html=True))
self.assertEqual('<p id="par"><strong><em>The quick brown fox</em>'
'</strong></p>', truncator.words(4, '', html=True))
# Test with new line inside tag
truncator = text.Truncator('<p>The quick <a href="xyz.html"\n'
'id="mylink">brown fox</a> jumped over the lazy dog.</p>')
self.assertEqual('<p>The quick <a href="xyz.html"\n'
'id="mylink">brown...</a></p>', truncator.words(3, '...', html=True))
# Test self-closing tags
truncator = text.Truncator('<br/>The <hr />quick brown fox jumped over'
' the lazy dog.')
self.assertEqual('<br/>The <hr />quick brown...',
truncator.words(3, '...', html=True))
truncator = text.Truncator('<br>The <hr/>quick <em>brown fox</em> '
'jumped over the lazy dog.')
self.assertEqual('<br>The <hr/>quick <em>brown...</em>',
truncator.words(3, '...', html=True))
# Test html entities
truncator = text.Truncator('<i>Buenos días!'
' ¿Cómo está?</i>')
self.assertEqual('<i>Buenos días! ¿Cómo...</i>',
truncator.words(3, '...', html=True))
truncator = text.Truncator('<p>I <3 python, what about you?</p>')
self.assertEqual('<p>I <3 python...</p>',
truncator.words(3, '...', html=True))
def test_wrap(self):
digits = '1234 67 9'
self.assertEqual(text.wrap(digits, 100), '1234 67 9')
self.assertEqual(text.wrap(digits, 9), '1234 67 9')
self.assertEqual(text.wrap(digits, 8), '1234 67\n9')
self.assertEqual(text.wrap('short\na long line', 7),
'short\na long\nline')
self.assertEqual(text.wrap('do-not-break-long-words please? ok', 8),
'do-not-break-long-words\nplease?\nok')
long_word = 'l%sng' % ('o' * 20)
self.assertEqual(text.wrap(long_word, 20), long_word)
self.assertEqual(text.wrap('a %s word' % long_word, 10),
'a\n%s\nword' % long_word)
def test_normalize_newlines(self):
self.assertEqual(text.normalize_newlines("abc\ndef\rghi\r\n"),
"abc\ndef\nghi\n")
self.assertEqual(text.normalize_newlines("\n\r\r\n\r"), "\n\n\n\n")
self.assertEqual(text.normalize_newlines("abcdefghi"), "abcdefghi")
self.assertEqual(text.normalize_newlines(""), "")
def test_normalize_newlines_bytes(self):
"""normalize_newlines should be able to handle bytes too"""
normalized = text.normalize_newlines(b"abc\ndef\rghi\r\n")
self.assertEqual(normalized, "abc\ndef\nghi\n")
self.assertIsInstance(normalized, six.text_type)
def test_slugify(self):
items = (
# given - expected - unicode?
('Hello, World!', 'hello-world', False),
('spam & eggs', 'spam-eggs', False),
('spam & ıçüş', 'spam-ıçüş', True),
('foo ıç bar', 'foo-ıç-bar', True),
(' foo ıç bar', 'foo-ıç-bar', True),
('你好', '你好', True),
)
for value, output, is_unicode in items:
self.assertEqual(text.slugify(value, allow_unicode=is_unicode), output)
def test_unescape_entities(self):
items = [
('', ''),
('foo', 'foo'),
('&', '&'),
('&', '&'),
('&', '&'),
('foo & bar', 'foo & bar'),
('foo & bar', 'foo & bar'),
]
for value, output in items:
self.assertEqual(text.unescape_entities(value), output)
def test_get_valid_filename(self):
filename = "^&'@{}[],$=!-#()%+~_123.txt"
self.assertEqual(text.get_valid_filename(filename), "-_123.txt")
def test_compress_sequence(self):
data = [{'key': i} for i in range(10)]
seq = list(json.JSONEncoder().iterencode(data))
seq = [s.encode('utf-8') for s in seq]
actual_length = len(b''.join(seq))
out = text.compress_sequence(seq)
compressed_length = len(b''.join(out))
self.assertTrue(compressed_length < actual_length)
| bsd-3-clause |
martin-mann/bioconda-recipes | recipes/spectra-cluster-cli/spectra-cluster-cli.py | 26 | 2622 | #!/usr/bin/env python
#
# Wrapper script for Java Conda packages that ensures that the java runtime
# is invoked with the right options. Adapted from the bash script (http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in/246128#246128).
#
# Program Parameters
#
import os
import subprocess
import sys
from os import access
from os import getenv
from os import X_OK
jar_file = 'spectra-cluster-cli-1.0.1.jar'
default_jvm_mem_opts = ['-Xms512m', '-Xmx1g']
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts = []
prop_opts = []
pass_args = []
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') is None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args)
def main():
java = java_executable()
jar_dir = real_dirname(sys.argv[0])
(mem_opts, prop_opts, pass_args) = jvm_opts(sys.argv[1:])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()
| mit |
orbitfp7/nova | nova/network/__init__.py | 63 | 1418 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_config.cfg
from oslo_utils import importutils
_network_opts = [
oslo_config.cfg.StrOpt('network_api_class',
default='nova.network.api.API',
help='The full class name of the '
'network API class to use'),
]
oslo_config.cfg.CONF.register_opts(_network_opts)
def API(skip_policy_check=False):
network_api_class = oslo_config.cfg.CONF.network_api_class
if 'quantumv2' in network_api_class:
network_api_class = network_api_class.replace('quantumv2', 'neutronv2')
cls = importutils.import_class(network_api_class)
return cls(skip_policy_check=skip_policy_check)
| apache-2.0 |
michaelbausor/api-client-staging | generated/python/grpc-google-bigtable-v2/google/bigtable/v2/bigtable_pb2.py | 9 | 49225 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/bigtable/v2/bigtable.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.bigtable.v2 import data_pb2 as google_dot_bigtable_dot_v2_dot_data__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/bigtable/v2/bigtable.proto',
package='google.bigtable.v2',
syntax='proto3',
serialized_pb=_b('\n!google/bigtable/v2/bigtable.proto\x12\x12google.bigtable.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/bigtable/v2/data.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/rpc/status.proto\"\x92\x01\n\x0fReadRowsRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12(\n\x04rows\x18\x02 \x01(\x0b\x32\x1a.google.bigtable.v2.RowSet\x12-\n\x06\x66ilter\x18\x03 \x01(\x0b\x32\x1d.google.bigtable.v2.RowFilter\x12\x12\n\nrows_limit\x18\x04 \x01(\x03\"\xf8\x02\n\x10ReadRowsResponse\x12>\n\x06\x63hunks\x18\x01 \x03(\x0b\x32..google.bigtable.v2.ReadRowsResponse.CellChunk\x12\x1c\n\x14last_scanned_row_key\x18\x02 \x01(\x0c\x1a\x85\x02\n\tCellChunk\x12\x0f\n\x07row_key\x18\x01 \x01(\x0c\x12\x31\n\x0b\x66\x61mily_name\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\tqualifier\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x18\n\x10timestamp_micros\x18\x04 \x01(\x03\x12\x0e\n\x06labels\x18\x05 \x03(\t\x12\r\n\x05value\x18\x06 \x01(\x0c\x12\x12\n\nvalue_size\x18\x07 \x01(\x05\x12\x13\n\treset_row\x18\x08 \x01(\x08H\x00\x12\x14\n\ncommit_row\x18\t \x01(\x08H\x00\x42\x0c\n\nrow_status\"*\n\x14SampleRowKeysRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\">\n\x15SampleRowKeysResponse\x12\x0f\n\x07row_key\x18\x01 \x01(\x0c\x12\x14\n\x0coffset_bytes\x18\x02 \x01(\x03\"h\n\x10MutateRowRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12\x0f\n\x07row_key\x18\x02 \x01(\x0c\x12/\n\tmutations\x18\x03 \x03(\x0b\x32\x1c.google.bigtable.v2.Mutation\"\x13\n\x11MutateRowResponse\"\xb0\x01\n\x11MutateRowsRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12<\n\x07\x65ntries\x18\x02 \x03(\x0b\x32+.google.bigtable.v2.MutateRowsRequest.Entry\x1aI\n\x05\x45ntry\x12\x0f\n\x07row_key\x18\x01 \x01(\x0c\x12/\n\tmutations\x18\x02 \x03(\x0b\x32\x1c.google.bigtable.v2.Mutation\"\x8f\x01\n\x12MutateRowsResponse\x12=\n\x07\x65ntries\x18\x01 \x03(\x0b\x32,.google.bigtable.v2.MutateRowsResponse.Entry\x1a:\n\x05\x45ntry\x12\r\n\x05index\x18\x01 \x01(\x03\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"\xe5\x01\n\x18\x43heckAndMutateRowRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12\x0f\n\x07row_key\x18\x02 \x01(\x0c\x12\x37\n\x10predicate_filter\x18\x06 \x01(\x0b\x32\x1d.google.bigtable.v2.RowFilter\x12\x34\n\x0etrue_mutations\x18\x04 \x03(\x0b\x32\x1c.google.bigtable.v2.Mutation\x12\x35\n\x0f\x66\x61lse_mutations\x18\x05 \x03(\x0b\x32\x1c.google.bigtable.v2.Mutation\"6\n\x19\x43heckAndMutateRowResponse\x12\x19\n\x11predicate_matched\x18\x01 \x01(\x08\"x\n\x19ReadModifyWriteRowRequest\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12\x0f\n\x07row_key\x18\x02 \x01(\x0c\x12\x36\n\x05rules\x18\x03 \x03(\x0b\x32\'.google.bigtable.v2.ReadModifyWriteRule\"B\n\x1aReadModifyWriteRowResponse\x12$\n\x03row\x18\x01 \x01(\x0b\x32\x17.google.bigtable.v2.Row2\xad\x08\n\x08\x42igtable\x12\x9d\x01\n\x08ReadRows\x12#.google.bigtable.v2.ReadRowsRequest\x1a$.google.bigtable.v2.ReadRowsResponse\"D\x82\xd3\xe4\x93\x02>\"9/v2/{table_name=projects/*/instances/*/tables/*}:readRows:\x01*0\x01\x12\xae\x01\n\rSampleRowKeys\x12(.google.bigtable.v2.SampleRowKeysRequest\x1a).google.bigtable.v2.SampleRowKeysResponse\"F\x82\xd3\xe4\x93\x02@\x12>/v2/{table_name=projects/*/instances/*/tables/*}:sampleRowKeys0\x01\x12\x9f\x01\n\tMutateRow\x12$.google.bigtable.v2.MutateRowRequest\x1a%.google.bigtable.v2.MutateRowResponse\"E\x82\xd3\xe4\x93\x02?\":/v2/{table_name=projects/*/instances/*/tables/*}:mutateRow:\x01*\x12\xa5\x01\n\nMutateRows\x12%.google.bigtable.v2.MutateRowsRequest\x1a&.google.bigtable.v2.MutateRowsResponse\"F\x82\xd3\xe4\x93\x02@\";/v2/{table_name=projects/*/instances/*/tables/*}:mutateRows:\x01*0\x01\x12\xbf\x01\n\x11\x43heckAndMutateRow\x12,.google.bigtable.v2.CheckAndMutateRowRequest\x1a-.google.bigtable.v2.CheckAndMutateRowResponse\"M\x82\xd3\xe4\x93\x02G\"B/v2/{table_name=projects/*/instances/*/tables/*}:checkAndMutateRow:\x01*\x12\xc3\x01\n\x12ReadModifyWriteRow\x12-.google.bigtable.v2.ReadModifyWriteRowRequest\x1a..google.bigtable.v2.ReadModifyWriteRowResponse\"N\x82\xd3\xe4\x93\x02H\"C/v2/{table_name=projects/*/instances/*/tables/*}:readModifyWriteRow:\x01*B)\n\x16\x63om.google.bigtable.v2B\rBigtableProtoP\x01\x62\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_bigtable_dot_v2_dot_data__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_READROWSREQUEST = _descriptor.Descriptor(
name='ReadRowsRequest',
full_name='google.bigtable.v2.ReadRowsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='table_name', full_name='google.bigtable.v2.ReadRowsRequest.table_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rows', full_name='google.bigtable.v2.ReadRowsRequest.rows', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='filter', full_name='google.bigtable.v2.ReadRowsRequest.filter', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rows_limit', full_name='google.bigtable.v2.ReadRowsRequest.rows_limit', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=176,
serialized_end=322,
)
_READROWSRESPONSE_CELLCHUNK = _descriptor.Descriptor(
name='CellChunk',
full_name='google.bigtable.v2.ReadRowsResponse.CellChunk',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='row_key', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.row_key', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='family_name', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.family_name', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='qualifier', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.qualifier', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='timestamp_micros', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.timestamp_micros', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='labels', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.labels', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.value', index=5,
number=6, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value_size', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.value_size', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='reset_row', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.reset_row', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='commit_row', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.commit_row', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='row_status', full_name='google.bigtable.v2.ReadRowsResponse.CellChunk.row_status',
index=0, containing_type=None, fields=[]),
],
serialized_start=440,
serialized_end=701,
)
_READROWSRESPONSE = _descriptor.Descriptor(
name='ReadRowsResponse',
full_name='google.bigtable.v2.ReadRowsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chunks', full_name='google.bigtable.v2.ReadRowsResponse.chunks', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_scanned_row_key', full_name='google.bigtable.v2.ReadRowsResponse.last_scanned_row_key', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_READROWSRESPONSE_CELLCHUNK, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=325,
serialized_end=701,
)
_SAMPLEROWKEYSREQUEST = _descriptor.Descriptor(
name='SampleRowKeysRequest',
full_name='google.bigtable.v2.SampleRowKeysRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='table_name', full_name='google.bigtable.v2.SampleRowKeysRequest.table_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=703,
serialized_end=745,
)
_SAMPLEROWKEYSRESPONSE = _descriptor.Descriptor(
name='SampleRowKeysResponse',
full_name='google.bigtable.v2.SampleRowKeysResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='row_key', full_name='google.bigtable.v2.SampleRowKeysResponse.row_key', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='offset_bytes', full_name='google.bigtable.v2.SampleRowKeysResponse.offset_bytes', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=747,
serialized_end=809,
)
_MUTATEROWREQUEST = _descriptor.Descriptor(
name='MutateRowRequest',
full_name='google.bigtable.v2.MutateRowRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='table_name', full_name='google.bigtable.v2.MutateRowRequest.table_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='row_key', full_name='google.bigtable.v2.MutateRowRequest.row_key', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mutations', full_name='google.bigtable.v2.MutateRowRequest.mutations', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=811,
serialized_end=915,
)
_MUTATEROWRESPONSE = _descriptor.Descriptor(
name='MutateRowResponse',
full_name='google.bigtable.v2.MutateRowResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=917,
serialized_end=936,
)
_MUTATEROWSREQUEST_ENTRY = _descriptor.Descriptor(
name='Entry',
full_name='google.bigtable.v2.MutateRowsRequest.Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='row_key', full_name='google.bigtable.v2.MutateRowsRequest.Entry.row_key', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mutations', full_name='google.bigtable.v2.MutateRowsRequest.Entry.mutations', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1042,
serialized_end=1115,
)
_MUTATEROWSREQUEST = _descriptor.Descriptor(
name='MutateRowsRequest',
full_name='google.bigtable.v2.MutateRowsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='table_name', full_name='google.bigtable.v2.MutateRowsRequest.table_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='entries', full_name='google.bigtable.v2.MutateRowsRequest.entries', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_MUTATEROWSREQUEST_ENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=939,
serialized_end=1115,
)
_MUTATEROWSRESPONSE_ENTRY = _descriptor.Descriptor(
name='Entry',
full_name='google.bigtable.v2.MutateRowsResponse.Entry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='google.bigtable.v2.MutateRowsResponse.Entry.index', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='status', full_name='google.bigtable.v2.MutateRowsResponse.Entry.status', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1203,
serialized_end=1261,
)
_MUTATEROWSRESPONSE = _descriptor.Descriptor(
name='MutateRowsResponse',
full_name='google.bigtable.v2.MutateRowsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entries', full_name='google.bigtable.v2.MutateRowsResponse.entries', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_MUTATEROWSRESPONSE_ENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1118,
serialized_end=1261,
)
_CHECKANDMUTATEROWREQUEST = _descriptor.Descriptor(
name='CheckAndMutateRowRequest',
full_name='google.bigtable.v2.CheckAndMutateRowRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='table_name', full_name='google.bigtable.v2.CheckAndMutateRowRequest.table_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='row_key', full_name='google.bigtable.v2.CheckAndMutateRowRequest.row_key', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='predicate_filter', full_name='google.bigtable.v2.CheckAndMutateRowRequest.predicate_filter', index=2,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='true_mutations', full_name='google.bigtable.v2.CheckAndMutateRowRequest.true_mutations', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='false_mutations', full_name='google.bigtable.v2.CheckAndMutateRowRequest.false_mutations', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1264,
serialized_end=1493,
)
_CHECKANDMUTATEROWRESPONSE = _descriptor.Descriptor(
name='CheckAndMutateRowResponse',
full_name='google.bigtable.v2.CheckAndMutateRowResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='predicate_matched', full_name='google.bigtable.v2.CheckAndMutateRowResponse.predicate_matched', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1495,
serialized_end=1549,
)
_READMODIFYWRITEROWREQUEST = _descriptor.Descriptor(
name='ReadModifyWriteRowRequest',
full_name='google.bigtable.v2.ReadModifyWriteRowRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='table_name', full_name='google.bigtable.v2.ReadModifyWriteRowRequest.table_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='row_key', full_name='google.bigtable.v2.ReadModifyWriteRowRequest.row_key', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rules', full_name='google.bigtable.v2.ReadModifyWriteRowRequest.rules', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1551,
serialized_end=1671,
)
_READMODIFYWRITEROWRESPONSE = _descriptor.Descriptor(
name='ReadModifyWriteRowResponse',
full_name='google.bigtable.v2.ReadModifyWriteRowResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='row', full_name='google.bigtable.v2.ReadModifyWriteRowResponse.row', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1673,
serialized_end=1739,
)
_READROWSREQUEST.fields_by_name['rows'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._ROWSET
_READROWSREQUEST.fields_by_name['filter'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._ROWFILTER
_READROWSRESPONSE_CELLCHUNK.fields_by_name['family_name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_READROWSRESPONSE_CELLCHUNK.fields_by_name['qualifier'].message_type = google_dot_protobuf_dot_wrappers__pb2._BYTESVALUE
_READROWSRESPONSE_CELLCHUNK.containing_type = _READROWSRESPONSE
_READROWSRESPONSE_CELLCHUNK.oneofs_by_name['row_status'].fields.append(
_READROWSRESPONSE_CELLCHUNK.fields_by_name['reset_row'])
_READROWSRESPONSE_CELLCHUNK.fields_by_name['reset_row'].containing_oneof = _READROWSRESPONSE_CELLCHUNK.oneofs_by_name['row_status']
_READROWSRESPONSE_CELLCHUNK.oneofs_by_name['row_status'].fields.append(
_READROWSRESPONSE_CELLCHUNK.fields_by_name['commit_row'])
_READROWSRESPONSE_CELLCHUNK.fields_by_name['commit_row'].containing_oneof = _READROWSRESPONSE_CELLCHUNK.oneofs_by_name['row_status']
_READROWSRESPONSE.fields_by_name['chunks'].message_type = _READROWSRESPONSE_CELLCHUNK
_MUTATEROWREQUEST.fields_by_name['mutations'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._MUTATION
_MUTATEROWSREQUEST_ENTRY.fields_by_name['mutations'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._MUTATION
_MUTATEROWSREQUEST_ENTRY.containing_type = _MUTATEROWSREQUEST
_MUTATEROWSREQUEST.fields_by_name['entries'].message_type = _MUTATEROWSREQUEST_ENTRY
_MUTATEROWSRESPONSE_ENTRY.fields_by_name['status'].message_type = google_dot_rpc_dot_status__pb2._STATUS
_MUTATEROWSRESPONSE_ENTRY.containing_type = _MUTATEROWSRESPONSE
_MUTATEROWSRESPONSE.fields_by_name['entries'].message_type = _MUTATEROWSRESPONSE_ENTRY
_CHECKANDMUTATEROWREQUEST.fields_by_name['predicate_filter'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._ROWFILTER
_CHECKANDMUTATEROWREQUEST.fields_by_name['true_mutations'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._MUTATION
_CHECKANDMUTATEROWREQUEST.fields_by_name['false_mutations'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._MUTATION
_READMODIFYWRITEROWREQUEST.fields_by_name['rules'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._READMODIFYWRITERULE
_READMODIFYWRITEROWRESPONSE.fields_by_name['row'].message_type = google_dot_bigtable_dot_v2_dot_data__pb2._ROW
DESCRIPTOR.message_types_by_name['ReadRowsRequest'] = _READROWSREQUEST
DESCRIPTOR.message_types_by_name['ReadRowsResponse'] = _READROWSRESPONSE
DESCRIPTOR.message_types_by_name['SampleRowKeysRequest'] = _SAMPLEROWKEYSREQUEST
DESCRIPTOR.message_types_by_name['SampleRowKeysResponse'] = _SAMPLEROWKEYSRESPONSE
DESCRIPTOR.message_types_by_name['MutateRowRequest'] = _MUTATEROWREQUEST
DESCRIPTOR.message_types_by_name['MutateRowResponse'] = _MUTATEROWRESPONSE
DESCRIPTOR.message_types_by_name['MutateRowsRequest'] = _MUTATEROWSREQUEST
DESCRIPTOR.message_types_by_name['MutateRowsResponse'] = _MUTATEROWSRESPONSE
DESCRIPTOR.message_types_by_name['CheckAndMutateRowRequest'] = _CHECKANDMUTATEROWREQUEST
DESCRIPTOR.message_types_by_name['CheckAndMutateRowResponse'] = _CHECKANDMUTATEROWRESPONSE
DESCRIPTOR.message_types_by_name['ReadModifyWriteRowRequest'] = _READMODIFYWRITEROWREQUEST
DESCRIPTOR.message_types_by_name['ReadModifyWriteRowResponse'] = _READMODIFYWRITEROWRESPONSE
ReadRowsRequest = _reflection.GeneratedProtocolMessageType('ReadRowsRequest', (_message.Message,), dict(
DESCRIPTOR = _READROWSREQUEST,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.ReadRowsRequest)
))
_sym_db.RegisterMessage(ReadRowsRequest)
ReadRowsResponse = _reflection.GeneratedProtocolMessageType('ReadRowsResponse', (_message.Message,), dict(
CellChunk = _reflection.GeneratedProtocolMessageType('CellChunk', (_message.Message,), dict(
DESCRIPTOR = _READROWSRESPONSE_CELLCHUNK,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.ReadRowsResponse.CellChunk)
))
,
DESCRIPTOR = _READROWSRESPONSE,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.ReadRowsResponse)
))
_sym_db.RegisterMessage(ReadRowsResponse)
_sym_db.RegisterMessage(ReadRowsResponse.CellChunk)
SampleRowKeysRequest = _reflection.GeneratedProtocolMessageType('SampleRowKeysRequest', (_message.Message,), dict(
DESCRIPTOR = _SAMPLEROWKEYSREQUEST,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.SampleRowKeysRequest)
))
_sym_db.RegisterMessage(SampleRowKeysRequest)
SampleRowKeysResponse = _reflection.GeneratedProtocolMessageType('SampleRowKeysResponse', (_message.Message,), dict(
DESCRIPTOR = _SAMPLEROWKEYSRESPONSE,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.SampleRowKeysResponse)
))
_sym_db.RegisterMessage(SampleRowKeysResponse)
MutateRowRequest = _reflection.GeneratedProtocolMessageType('MutateRowRequest', (_message.Message,), dict(
DESCRIPTOR = _MUTATEROWREQUEST,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowRequest)
))
_sym_db.RegisterMessage(MutateRowRequest)
MutateRowResponse = _reflection.GeneratedProtocolMessageType('MutateRowResponse', (_message.Message,), dict(
DESCRIPTOR = _MUTATEROWRESPONSE,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowResponse)
))
_sym_db.RegisterMessage(MutateRowResponse)
MutateRowsRequest = _reflection.GeneratedProtocolMessageType('MutateRowsRequest', (_message.Message,), dict(
Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), dict(
DESCRIPTOR = _MUTATEROWSREQUEST_ENTRY,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowsRequest.Entry)
))
,
DESCRIPTOR = _MUTATEROWSREQUEST,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowsRequest)
))
_sym_db.RegisterMessage(MutateRowsRequest)
_sym_db.RegisterMessage(MutateRowsRequest.Entry)
MutateRowsResponse = _reflection.GeneratedProtocolMessageType('MutateRowsResponse', (_message.Message,), dict(
Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), dict(
DESCRIPTOR = _MUTATEROWSRESPONSE_ENTRY,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowsResponse.Entry)
))
,
DESCRIPTOR = _MUTATEROWSRESPONSE,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.MutateRowsResponse)
))
_sym_db.RegisterMessage(MutateRowsResponse)
_sym_db.RegisterMessage(MutateRowsResponse.Entry)
CheckAndMutateRowRequest = _reflection.GeneratedProtocolMessageType('CheckAndMutateRowRequest', (_message.Message,), dict(
DESCRIPTOR = _CHECKANDMUTATEROWREQUEST,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.CheckAndMutateRowRequest)
))
_sym_db.RegisterMessage(CheckAndMutateRowRequest)
CheckAndMutateRowResponse = _reflection.GeneratedProtocolMessageType('CheckAndMutateRowResponse', (_message.Message,), dict(
DESCRIPTOR = _CHECKANDMUTATEROWRESPONSE,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.CheckAndMutateRowResponse)
))
_sym_db.RegisterMessage(CheckAndMutateRowResponse)
ReadModifyWriteRowRequest = _reflection.GeneratedProtocolMessageType('ReadModifyWriteRowRequest', (_message.Message,), dict(
DESCRIPTOR = _READMODIFYWRITEROWREQUEST,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.ReadModifyWriteRowRequest)
))
_sym_db.RegisterMessage(ReadModifyWriteRowRequest)
ReadModifyWriteRowResponse = _reflection.GeneratedProtocolMessageType('ReadModifyWriteRowResponse', (_message.Message,), dict(
DESCRIPTOR = _READMODIFYWRITEROWRESPONSE,
__module__ = 'google.bigtable.v2.bigtable_pb2'
# @@protoc_insertion_point(class_scope:google.bigtable.v2.ReadModifyWriteRowResponse)
))
_sym_db.RegisterMessage(ReadModifyWriteRowResponse)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\026com.google.bigtable.v2B\rBigtableProtoP\001'))
import grpc
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
class BigtableStub(object):
"""Service for reading from and writing to existing Bigtable tables.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ReadRows = channel.unary_stream(
'/google.bigtable.v2.Bigtable/ReadRows',
request_serializer=ReadRowsRequest.SerializeToString,
response_deserializer=ReadRowsResponse.FromString,
)
self.SampleRowKeys = channel.unary_stream(
'/google.bigtable.v2.Bigtable/SampleRowKeys',
request_serializer=SampleRowKeysRequest.SerializeToString,
response_deserializer=SampleRowKeysResponse.FromString,
)
self.MutateRow = channel.unary_unary(
'/google.bigtable.v2.Bigtable/MutateRow',
request_serializer=MutateRowRequest.SerializeToString,
response_deserializer=MutateRowResponse.FromString,
)
self.MutateRows = channel.unary_stream(
'/google.bigtable.v2.Bigtable/MutateRows',
request_serializer=MutateRowsRequest.SerializeToString,
response_deserializer=MutateRowsResponse.FromString,
)
self.CheckAndMutateRow = channel.unary_unary(
'/google.bigtable.v2.Bigtable/CheckAndMutateRow',
request_serializer=CheckAndMutateRowRequest.SerializeToString,
response_deserializer=CheckAndMutateRowResponse.FromString,
)
self.ReadModifyWriteRow = channel.unary_unary(
'/google.bigtable.v2.Bigtable/ReadModifyWriteRow',
request_serializer=ReadModifyWriteRowRequest.SerializeToString,
response_deserializer=ReadModifyWriteRowResponse.FromString,
)
class BigtableServicer(object):
"""Service for reading from and writing to existing Bigtable tables.
"""
def ReadRows(self, request, context):
"""Streams back the contents of all requested rows, optionally
applying the same Reader filter to each. Depending on their size,
rows and cells may be broken up across multiple responses, but
atomicity of each row will still be preserved. See the
ReadRowsResponse documentation for details.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SampleRowKeys(self, request, context):
"""Returns a sample of row keys in the table. The returned row keys will
delimit contiguous sections of the table of approximately equal size,
which can be used to break up the data for distributed tasks like
mapreduces.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def MutateRow(self, request, context):
"""Mutates a row atomically. Cells already present in the row are left
unchanged unless explicitly changed by `mutation`.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def MutateRows(self, request, context):
"""Mutates multiple rows in a batch. Each individual row is mutated
atomically as in MutateRow, but the entire batch is not executed
atomically.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CheckAndMutateRow(self, request, context):
"""Mutates a row atomically based on the output of a predicate Reader filter.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ReadModifyWriteRow(self, request, context):
"""Modifies a row atomically. The method reads the latest existing timestamp
and value from the specified columns and writes a new entry based on
pre-defined read/modify/write rules. The new value for the timestamp is the
greater of the existing timestamp or the current server time. The method
returns the new contents of all modified cells.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_BigtableServicer_to_server(servicer, server):
rpc_method_handlers = {
'ReadRows': grpc.unary_stream_rpc_method_handler(
servicer.ReadRows,
request_deserializer=ReadRowsRequest.FromString,
response_serializer=ReadRowsResponse.SerializeToString,
),
'SampleRowKeys': grpc.unary_stream_rpc_method_handler(
servicer.SampleRowKeys,
request_deserializer=SampleRowKeysRequest.FromString,
response_serializer=SampleRowKeysResponse.SerializeToString,
),
'MutateRow': grpc.unary_unary_rpc_method_handler(
servicer.MutateRow,
request_deserializer=MutateRowRequest.FromString,
response_serializer=MutateRowResponse.SerializeToString,
),
'MutateRows': grpc.unary_stream_rpc_method_handler(
servicer.MutateRows,
request_deserializer=MutateRowsRequest.FromString,
response_serializer=MutateRowsResponse.SerializeToString,
),
'CheckAndMutateRow': grpc.unary_unary_rpc_method_handler(
servicer.CheckAndMutateRow,
request_deserializer=CheckAndMutateRowRequest.FromString,
response_serializer=CheckAndMutateRowResponse.SerializeToString,
),
'ReadModifyWriteRow': grpc.unary_unary_rpc_method_handler(
servicer.ReadModifyWriteRow,
request_deserializer=ReadModifyWriteRowRequest.FromString,
response_serializer=ReadModifyWriteRowResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.bigtable.v2.Bigtable', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class BetaBigtableServicer(object):
"""Service for reading from and writing to existing Bigtable tables.
"""
def ReadRows(self, request, context):
"""Streams back the contents of all requested rows, optionally
applying the same Reader filter to each. Depending on their size,
rows and cells may be broken up across multiple responses, but
atomicity of each row will still be preserved. See the
ReadRowsResponse documentation for details.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def SampleRowKeys(self, request, context):
"""Returns a sample of row keys in the table. The returned row keys will
delimit contiguous sections of the table of approximately equal size,
which can be used to break up the data for distributed tasks like
mapreduces.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def MutateRow(self, request, context):
"""Mutates a row atomically. Cells already present in the row are left
unchanged unless explicitly changed by `mutation`.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def MutateRows(self, request, context):
"""Mutates multiple rows in a batch. Each individual row is mutated
atomically as in MutateRow, but the entire batch is not executed
atomically.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def CheckAndMutateRow(self, request, context):
"""Mutates a row atomically based on the output of a predicate Reader filter.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def ReadModifyWriteRow(self, request, context):
"""Modifies a row atomically. The method reads the latest existing timestamp
and value from the specified columns and writes a new entry based on
pre-defined read/modify/write rules. The new value for the timestamp is the
greater of the existing timestamp or the current server time. The method
returns the new contents of all modified cells.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
class BetaBigtableStub(object):
"""Service for reading from and writing to existing Bigtable tables.
"""
def ReadRows(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Streams back the contents of all requested rows, optionally
applying the same Reader filter to each. Depending on their size,
rows and cells may be broken up across multiple responses, but
atomicity of each row will still be preserved. See the
ReadRowsResponse documentation for details.
"""
raise NotImplementedError()
def SampleRowKeys(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Returns a sample of row keys in the table. The returned row keys will
delimit contiguous sections of the table of approximately equal size,
which can be used to break up the data for distributed tasks like
mapreduces.
"""
raise NotImplementedError()
def MutateRow(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Mutates a row atomically. Cells already present in the row are left
unchanged unless explicitly changed by `mutation`.
"""
raise NotImplementedError()
MutateRow.future = None
def MutateRows(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Mutates multiple rows in a batch. Each individual row is mutated
atomically as in MutateRow, but the entire batch is not executed
atomically.
"""
raise NotImplementedError()
def CheckAndMutateRow(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Mutates a row atomically based on the output of a predicate Reader filter.
"""
raise NotImplementedError()
CheckAndMutateRow.future = None
def ReadModifyWriteRow(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Modifies a row atomically. The method reads the latest existing timestamp
and value from the specified columns and writes a new entry based on
pre-defined read/modify/write rules. The new value for the timestamp is the
greater of the existing timestamp or the current server time. The method
returns the new contents of all modified cells.
"""
raise NotImplementedError()
ReadModifyWriteRow.future = None
def beta_create_Bigtable_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
request_deserializers = {
('google.bigtable.v2.Bigtable', 'CheckAndMutateRow'): CheckAndMutateRowRequest.FromString,
('google.bigtable.v2.Bigtable', 'MutateRow'): MutateRowRequest.FromString,
('google.bigtable.v2.Bigtable', 'MutateRows'): MutateRowsRequest.FromString,
('google.bigtable.v2.Bigtable', 'ReadModifyWriteRow'): ReadModifyWriteRowRequest.FromString,
('google.bigtable.v2.Bigtable', 'ReadRows'): ReadRowsRequest.FromString,
('google.bigtable.v2.Bigtable', 'SampleRowKeys'): SampleRowKeysRequest.FromString,
}
response_serializers = {
('google.bigtable.v2.Bigtable', 'CheckAndMutateRow'): CheckAndMutateRowResponse.SerializeToString,
('google.bigtable.v2.Bigtable', 'MutateRow'): MutateRowResponse.SerializeToString,
('google.bigtable.v2.Bigtable', 'MutateRows'): MutateRowsResponse.SerializeToString,
('google.bigtable.v2.Bigtable', 'ReadModifyWriteRow'): ReadModifyWriteRowResponse.SerializeToString,
('google.bigtable.v2.Bigtable', 'ReadRows'): ReadRowsResponse.SerializeToString,
('google.bigtable.v2.Bigtable', 'SampleRowKeys'): SampleRowKeysResponse.SerializeToString,
}
method_implementations = {
('google.bigtable.v2.Bigtable', 'CheckAndMutateRow'): face_utilities.unary_unary_inline(servicer.CheckAndMutateRow),
('google.bigtable.v2.Bigtable', 'MutateRow'): face_utilities.unary_unary_inline(servicer.MutateRow),
('google.bigtable.v2.Bigtable', 'MutateRows'): face_utilities.unary_stream_inline(servicer.MutateRows),
('google.bigtable.v2.Bigtable', 'ReadModifyWriteRow'): face_utilities.unary_unary_inline(servicer.ReadModifyWriteRow),
('google.bigtable.v2.Bigtable', 'ReadRows'): face_utilities.unary_stream_inline(servicer.ReadRows),
('google.bigtable.v2.Bigtable', 'SampleRowKeys'): face_utilities.unary_stream_inline(servicer.SampleRowKeys),
}
server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
return beta_implementations.server(method_implementations, options=server_options)
def beta_create_Bigtable_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
request_serializers = {
('google.bigtable.v2.Bigtable', 'CheckAndMutateRow'): CheckAndMutateRowRequest.SerializeToString,
('google.bigtable.v2.Bigtable', 'MutateRow'): MutateRowRequest.SerializeToString,
('google.bigtable.v2.Bigtable', 'MutateRows'): MutateRowsRequest.SerializeToString,
('google.bigtable.v2.Bigtable', 'ReadModifyWriteRow'): ReadModifyWriteRowRequest.SerializeToString,
('google.bigtable.v2.Bigtable', 'ReadRows'): ReadRowsRequest.SerializeToString,
('google.bigtable.v2.Bigtable', 'SampleRowKeys'): SampleRowKeysRequest.SerializeToString,
}
response_deserializers = {
('google.bigtable.v2.Bigtable', 'CheckAndMutateRow'): CheckAndMutateRowResponse.FromString,
('google.bigtable.v2.Bigtable', 'MutateRow'): MutateRowResponse.FromString,
('google.bigtable.v2.Bigtable', 'MutateRows'): MutateRowsResponse.FromString,
('google.bigtable.v2.Bigtable', 'ReadModifyWriteRow'): ReadModifyWriteRowResponse.FromString,
('google.bigtable.v2.Bigtable', 'ReadRows'): ReadRowsResponse.FromString,
('google.bigtable.v2.Bigtable', 'SampleRowKeys'): SampleRowKeysResponse.FromString,
}
cardinalities = {
'CheckAndMutateRow': cardinality.Cardinality.UNARY_UNARY,
'MutateRow': cardinality.Cardinality.UNARY_UNARY,
'MutateRows': cardinality.Cardinality.UNARY_STREAM,
'ReadModifyWriteRow': cardinality.Cardinality.UNARY_UNARY,
'ReadRows': cardinality.Cardinality.UNARY_STREAM,
'SampleRowKeys': cardinality.Cardinality.UNARY_STREAM,
}
stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
return beta_implementations.dynamic_stub(channel, 'google.bigtable.v2.Bigtable', cardinalities, options=stub_options)
# @@protoc_insertion_point(module_scope)
| bsd-3-clause |
odahoda/noisicaa | noisidev/runvmtests.py | 1 | 17693 | #!/usr/bin/env python3
# @begin:license
#
# Copyright (c) 2015-2019, Benjamin Niemann <pink@odahoda.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @end:license
import asyncio
import argparse
import datetime
import glob
import logging
import os
import os.path
import shutil
import subprocess
import sys
import time
import traceback
import asyncssh
from . import testvm
logger = logging.getLogger(__name__)
ROOT_DIR = os.path.abspath(
os.path.join(os.path.join(os.path.dirname(__file__), '..')))
TEST_SCRIPT = r'''#!/bin/bash
SOURCE="{settings.source}"
BRANCH="{settings.branch}"
set -e
set -x
mkdir -p ~/.pip
cat >~/.pip/pip.conf <<EOF
[global]
index-url = http://10.0.2.2:{settings.devpi_port}/root/pypi/+simple/
trusted-host = 10.0.2.2
EOF
sudo apt-get -q -y install python3 python3-venv
rm -fr noisicaa/
if [ $SOURCE == git ]; then
sudo apt-get -q -y install git
git clone --branch=$BRANCH --single-branch https://github.com/odahoda/noisicaa
elif [ $SOURCE == local ]; then
mkdir noisicaa/
tar -x -z -Cnoisicaa/ -flocal.tar.gz
fi
cd noisicaa/
./waf configure --venvdir=../venv --download --install-system-packages
./waf build
sudo ./waf install
./waf configure --venvdir=../venv --download --install-system-packages --enable-tests
./waf build
./waf test --tags=unit
'''
async def log_dumper(fp_in, out_func, encoding=None):
if encoding is None:
line = ''
lf = '\n'
else:
line = bytearray()
lf = b'\n'
while not fp_in.at_eof():
c = await fp_in.read(1)
if c == lf:
if encoding is not None:
line = line.decode(encoding)
out_func(line)
if encoding is None:
line = ''
else:
line = bytearray()
else:
line += c
if line:
if encoding is not None:
line = line.decode(encoding)
out_func(buf)
class TestMixin(testvm.VM):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.__installed_sentinel = os.path.join(self.vm_dir, 'installed')
@property
def is_installed(self):
return os.path.isfile(self.__installed_sentinel)
async def __spinner(self, prefix, result):
start_time = datetime.datetime.now()
spinner = '-\|/'
spinner_idx = 0
while True:
duration = (datetime.datetime.now() - start_time).total_seconds()
minutes = duration // 60
seconds = duration - 60 * minutes
sys.stdout.write('\033[2K\r%s [%02d:%02d] ... ' % (prefix, minutes, seconds))
if not result.empty():
sys.stdout.write(await result.get())
sys.stdout.write('\n')
break
sys.stdout.write('(%s)' % spinner[spinner_idx])
spinner_idx = (spinner_idx + 1) % len(spinner)
sys.stdout.flush()
await asyncio.sleep(0.2, loop=self.event_loop)
async def install(self):
logger.info("Installing VM '%s'...", self.name)
result = asyncio.Queue(loop=self.event_loop)
spinner_task = self.event_loop.create_task(self.__spinner("Installing VM '%s'" % self.name, result))
try:
if os.path.isdir(self.vm_dir):
shutil.rmtree(self.vm_dir)
await super().install()
await self.create_snapshot('clean')
open(self.__installed_sentinel, 'w').close()
except:
logger.error("Installation of VM '%s' failed.", self.name)
result.put_nowait('FAILED')
raise
else:
logger.info("Installed VM '%s'...", self.name)
result.put_nowait('OK')
finally:
await spinner_task
async def run_test(self, settings):
logger.info("Running test '%s'... ", self.name)
result = asyncio.Queue(loop=self.event_loop)
spinner_task = self.event_loop.create_task(self.__spinner("Running test '%s'" % self.name, result))
try:
await self.do_test(settings)
except Exception as exc:
logger.error("Test '%s' failed with an exception:\n%s", self.name, traceback.format_exc())
result.put_nowait('FAILED')
return False
else:
logger.info("Test '%s' completed successfully.")
result.put_nowait('SUCCESS')
return True
finally:
await spinner_task
async def do_test(self, settings):
vm_logger = logging.getLogger(self.name)
logger.info("Waiting for SSH port to open...")
await self.wait_for_ssh()
logger.info("Connecting to VM...")
client = await asyncssh.connect(
host='localhost',
port=5555,
options=asyncssh.SSHClientConnectionOptions(
username='testuser',
password='123',
known_hosts=None),
loop=self.event_loop)
try:
sftp = await client.start_sftp_client()
try:
logger.info("Copy runtest.sh...")
async with sftp.open('runtest.sh', 'w') as fp:
await fp.write(TEST_SCRIPT.format(settings=settings))
await sftp.chmod('runtest.sh', 0o775)
if settings.source == 'local':
logger.info("Copy local.tar.gz...")
proc = subprocess.Popen(
['git', 'config', 'core.quotepath', 'off'],
cwd=ROOT_DIR)
proc.wait()
assert proc.returncode == 0
proc = subprocess.Popen(
['bash', '-c', 'tar -c -z -T<(git ls-tree --full-tree -r --name-only HEAD) -f-'],
cwd=ROOT_DIR,
stdout=subprocess.PIPE)
async with sftp.open('local.tar.gz', 'wb') as fp:
while True:
buf = proc.stdout.read(1 << 20)
if not buf:
break
await fp.write(buf)
proc.wait()
assert proc.returncode == 0
finally:
sftp.exit()
proc = await client.create_process("./runtest.sh", stderr=subprocess.STDOUT)
stdout_dumper = self.event_loop.create_task(log_dumper(proc.stdout, vm_logger.info))
await proc.wait()
await stdout_dumper
assert proc.returncode == 0
finally:
client.close()
class Ubuntu_16_04(TestMixin, testvm.Ubuntu_16_04):
pass
class Ubuntu_18_04(TestMixin, testvm.Ubuntu_18_04):
pass
ALL_VMTESTS = {
'ubuntu-16.04': Ubuntu_16_04,
'ubuntu-18.04': Ubuntu_18_04,
}
VM_BASE_DIR = os.path.abspath(
os.path.join(os.path.join(os.path.dirname(__file__), '..'), 'vmtests'))
class TestSettings(object):
def __init__(self, args):
self.branch = args.branch
self.source = args.source
self.shutdown = args.shutdown
self.devpi_port = args.devpi_port
def bool_arg(value):
if isinstance(value, bool):
return value
if isinstance(value, str):
if value.lower() in ('true', 'y', 'yes', 'on', '1'):
return True
if value.lower() in ('false', 'n', 'no', 'off', '0'):
return False
raise ValueError("Invalid value '%s'." % value)
raise TypeError("Invalid type '%s'." % type(value).__name__)
async def main(event_loop, argv):
argparser = argparse.ArgumentParser()
argparser.add_argument(
'--log-level',
choices=['debug', 'info', 'warning', 'error', 'critical'],
default='critical',
help="Minimum level for log messages written to STDERR.")
argparser.add_argument('--source', type=str, choices=['local', 'git'], default='local')
argparser.add_argument('--branch', type=str, default='master')
argparser.add_argument(
'--rebuild-vm', type=bool_arg, default=False,
help="Rebuild the VM from scratch, discarding the current state.")
argparser.add_argument(
'--clean-snapshot', type=bool_arg, default=True,
help=("Restore the VM from the 'clean' snapshot (which was created after the VM has"
" been setup) before running the tests."))
argparser.add_argument(
'--just-start', action='store_true', default=False,
help=("Just start the VM in the current state (not restoring the clean snapshot)"
" and don't run the tests."))
argparser.add_argument(
'--login', action='store_true', default=False,
help=("Start the VM in the current state (not restoring the clean snapshot)"
" and open a shell session. The VM is powered off when the shell is closed."))
argparser.add_argument(
'--shutdown', type=bool_arg, default=True,
help="Shut the VM down after running the tests.")
argparser.add_argument(
'--gui', type=bool_arg, default=None,
help="Force showing/hiding the UI.")
argparser.add_argument(
'--force-install', action="store_true", default=False,
help="Force reinstallation of operating system before starting VM.")
argparser.add_argument(
'--cores', type=int,
default=min(4, len(os.sched_getaffinity(0))),
help="Number of emulated cores in the VM.")
argparser.add_argument(
'--devpi-port', type=int,
default=18000,
help="Local port for devpi server.")
argparser.add_argument(
'--apt-cacher-port', type=int,
default=3142,
help="Local port for apt-cacher-ng server.")
argparser.add_argument('vms', nargs='*')
args = argparser.parse_args(argv[1:])
if not args.vms:
args.vms = list(sorted(ALL_VMTESTS.keys()))
for vm_name in args.vms:
if vm_name not in ALL_VMTESTS:
raise ValueError("'%s' is not a valid test name" % vm_name)
root_logger = logging.getLogger()
for handler in root_logger.handlers:
root_logger.removeHandler(handler)
formatter = logging.Formatter(
'%(relativeCreated)8d:%(levelname)-8s:%(name)s: %(message)s')
root_logger.setLevel(logging.DEBUG)
log_path = os.path.join(VM_BASE_DIR, time.strftime('debug-%Y%m%d-%H%M%S.log'))
current_log_path = os.path.join(VM_BASE_DIR, 'debug.log')
if os.path.isfile(current_log_path) or os.path.islink(current_log_path):
os.unlink(current_log_path)
os.symlink(log_path, current_log_path)
handler = logging.FileHandler(log_path, 'w')
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
root_logger.addHandler(handler)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
handler.setLevel(
{'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL,
}[args.log_level])
root_logger.addHandler(handler)
try:
logger.info(' '.join(argv))
devpi_logger = logging.getLogger('devpi')
devpi_serverdir = os.path.join(ROOT_DIR, 'vmtests', '_cache', 'devpi')
if not os.path.isdir(devpi_serverdir):
logger.info("Initializing devpi cache at '%s'...", devpi_serverdir)
devpi = await asyncio.create_subprocess_exec(
'devpi-server',
'--serverdir=%s' % devpi_serverdir,
'--init',
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
loop=event_loop)
devpi_stdout_dumper = event_loop.create_task(log_dumper(devpi.stdout, devpi_logger.debug, encoding='utf-8'))
await devpi.wait()
await devpi_stdout_dumper
logger.info("Starting local devpi server on port %d...", args.devpi_port)
devpi = await asyncio.create_subprocess_exec(
'devpi-server',
'--serverdir=%s' % devpi_serverdir,
'--port=%d' % args.devpi_port,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
loop=event_loop)
devpi_stdout_dumper = event_loop.create_task(log_dumper(devpi.stdout, devpi_logger.debug, encoding='utf-8'))
try:
logger.info("Starting local apt-cacher-ng server on port %d...", args.devpi_port)
apt_cacher_logger = logging.getLogger('apt-cacher-ng')
apt_cacher = await asyncio.create_subprocess_exec(
os.path.join(os.environ['VIRTUAL_ENV'], 'sbin', 'apt-cacher-ng'),
'ForeGround=1',
'Port=%d' % args.apt_cacher_port,
'CacheDir=%s' % os.path.join(ROOT_DIR, 'vmtests', '_cache', 'apt-cacher-ng'),
'LogDir=%s' % os.path.join(ROOT_DIR, 'vmtests', '_cache', 'apt-cacher-ng'),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
loop=event_loop)
apt_cacher_stdout_dumper = event_loop.create_task(log_dumper(apt_cacher.stdout, apt_cacher_logger.debug, encoding='utf-8'))
try:
settings = TestSettings(args)
vm_args = {
'base_dir': VM_BASE_DIR,
'event_loop': event_loop,
'cores': args.cores,
'memory': 2 << 30,
}
if args.just_start:
assert len(args.vms) == 1
vm_name = args.vms[0]
vm_cls = ALL_VMTESTS[vm_name]
vm = vm_cls(name=vm_name, **vm_args)
if args.force_install:
await vm.install()
assert vm.is_installed
try:
await vm.start(gui=args.gui if args.gui is not None else True)
await vm.wait_for_state(vm.POWEROFF, timeout=3600)
finally:
await vm.poweroff()
return
if args.login:
assert len(args.vms) == 1
vm_name = args.vms[0]
vm_cls = ALL_VMTESTS[vm_name]
vm = vm_cls(name=vm_name, **vm_args)
if args.force_install:
await vm.install()
assert vm.is_installed
try:
await vm.start(gui=args.gui if args.gui is not None else False)
await vm.wait_for_ssh()
proc = await asyncio.create_subprocess_exec(
'/usr/bin/sshpass', '-p123',
'/usr/bin/ssh',
'-p5555',
'-X',
'-oStrictHostKeyChecking=off',
'-oUserKnownHostsFile=/dev/null',
'-oLogLevel=quiet',
'testuser@localhost',
loop=event_loop)
await proc.wait()
finally:
await vm.poweroff()
return
results = {}
for vm_name in args.vms:
vm_cls = ALL_VMTESTS[vm_name]
vm = vm_cls(name=vm_name, **vm_args)
if not vm.is_installed or args.force_install:
await vm.install()
elif args.clean_snapshot:
await vm.restore_snapshot('clean')
try:
await vm.start(gui=args.gui if args.gui is not None else False)
results[vm.name] = await vm.run_test(settings)
finally:
await vm.poweroff()
if not all(results.values()):
print()
print('-' * 96)
print("%d/%d tests FAILED." % (
sum(1 for success in results.values() if not success), len(results)))
print()
for vm, success in sorted(results.items(), key=lambda i: i[0]):
print("%s... %s" % (vm, 'SUCCESS' if success else 'FAILED'))
return 1
return 0
finally:
apt_cacher.terminate()
await apt_cacher.wait()
await apt_cacher_stdout_dumper
finally:
devpi.terminate()
await devpi.wait()
await devpi_stdout_dumper
except:
logger.error("runvmtests failed with an exception:\n%s", traceback.format_exc())
raise
finally:
print("Full logs at %s" % log_path)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
sys.exit(loop.run_until_complete(main(loop, sys.argv)))
| gpl-2.0 |
kisoku/ansible | lib/ansible/plugins/action/async.py | 13 | 3133 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import random
from ansible import constants as C
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
''' transfer the given module name, plus the async module, then run it '''
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
if self._play_context.check_mode:
result['skipped'] = True
result['msg'] = 'check mode not supported for this module'
return result
if not tmp:
tmp = self._make_tmp_path()
module_name = self._task.action
async_module_path = self._connection._shell.join_path(tmp, 'async_wrapper')
remote_module_path = self._connection._shell.join_path(tmp, module_name)
env_string = self._compute_environment_string()
module_args = self._task.args.copy()
if self._play_context.no_log or not C.DEFAULT_NO_TARGET_SYSLOG:
module_args['_ansible_no_log'] = True
# configure, upload, and chmod the target module
(module_style, shebang, module_data) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
self._transfer_data(remote_module_path, module_data)
self._remote_chmod('a+rx', remote_module_path)
# configure, upload, and chmod the async_wrapper module
(async_module_style, shebang, async_module_data) = self._configure_module(module_name='async_wrapper', module_args=dict(), task_vars=task_vars)
self._transfer_data(async_module_path, async_module_data)
self._remote_chmod('a+rx', async_module_path)
argsfile = self._transfer_data(self._connection._shell.join_path(tmp, 'arguments'), json.dumps(module_args))
async_limit = self._task.async
async_jid = str(random.randint(0, 999999999999))
async_cmd = " ".join([str(x) for x in [env_string, async_module_path, async_jid, async_limit, remote_module_path, argsfile]])
result.update(self._low_level_execute_command(cmd=async_cmd))
# clean up after
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES:
self._remove_tmp_path(tmp)
result['changed'] = True
return result
| gpl-3.0 |
pwmarcz/django | tests/admin_widgets/models.py | 36 | 4488 | from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from django.utils.encoding import python_2_unicode_compatible
class MyFileField(models.FileField):
pass
@python_2_unicode_compatible
class Member(models.Model):
name = models.CharField(max_length=100)
birthdate = models.DateTimeField(blank=True, null=True)
gender = models.CharField(max_length=1, blank=True, choices=[('M', 'Male'), ('F', 'Female')])
email = models.EmailField(blank=True)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Band(models.Model):
name = models.CharField(max_length=100)
style = models.CharField(max_length=20)
members = models.ManyToManyField(Member)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Album(models.Model):
band = models.ForeignKey(Band)
name = models.CharField(max_length=100)
cover_art = models.FileField(upload_to='albums')
backside_art = MyFileField(upload_to='albums_back', null=True)
def __str__(self):
return self.name
class HiddenInventoryManager(models.Manager):
def get_queryset(self):
return super(HiddenInventoryManager, self).get_queryset().filter(hidden=False)
@python_2_unicode_compatible
class Inventory(models.Model):
barcode = models.PositiveIntegerField(unique=True)
parent = models.ForeignKey('self', to_field='barcode', blank=True, null=True)
name = models.CharField(blank=False, max_length=20)
hidden = models.BooleanField(default=False)
# see #9258
default_manager = models.Manager()
objects = HiddenInventoryManager()
def __str__(self):
return self.name
class Event(models.Model):
main_band = models.ForeignKey(Band, limit_choices_to=models.Q(pk__gt=0), related_name='events_main_band_at')
supporting_bands = models.ManyToManyField(Band, blank=True, related_name='events_supporting_band_at')
start_date = models.DateField(blank=True, null=True)
start_time = models.TimeField(blank=True, null=True)
description = models.TextField(blank=True)
link = models.URLField(blank=True)
min_age = models.IntegerField(blank=True, null=True)
@python_2_unicode_compatible
class Car(models.Model):
owner = models.ForeignKey(User)
make = models.CharField(max_length=30)
model = models.CharField(max_length=30)
def __str__(self):
return "%s %s" % (self.make, self.model)
class CarTire(models.Model):
"""
A single car tire. This to test that a user can only select their own cars.
"""
car = models.ForeignKey(Car)
class Honeycomb(models.Model):
location = models.CharField(max_length=20)
class Bee(models.Model):
"""
A model with a FK to a model that won't be registered with the admin
(Honeycomb) so the corresponding raw ID widget won't have a magnifying
glass link to select related honeycomb instances.
"""
honeycomb = models.ForeignKey(Honeycomb)
class Individual(models.Model):
"""
A model with a FK to itself. It won't be registered with the admin, so the
corresponding raw ID widget won't have a magnifying glass link to select
related instances (rendering will be called programmatically in this case).
"""
name = models.CharField(max_length=20)
parent = models.ForeignKey('self', null=True)
class Company(models.Model):
name = models.CharField(max_length=20)
class Advisor(models.Model):
"""
A model with a m2m to a model that won't be registered with the admin
(Company) so the corresponding raw ID widget won't have a magnifying
glass link to select related company instances.
"""
name = models.CharField(max_length=20)
companies = models.ManyToManyField(Company)
@python_2_unicode_compatible
class Student(models.Model):
name = models.CharField(max_length=255)
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
@python_2_unicode_compatible
class School(models.Model):
name = models.CharField(max_length=255)
students = models.ManyToManyField(Student, related_name='current_schools')
alumni = models.ManyToManyField(Student, related_name='previous_schools')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Profile(models.Model):
user = models.ForeignKey('auth.User', 'username')
def __str__(self):
return self.user.username
| bsd-3-clause |
Simage/shinken | test/test_service_without_host.py | 14 | 1580 | #!/usr/bin/env python
# Copyright (C) 2009-2014:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from shinken_test import *
class Testservice_without_host(ShinkenTest):
def setUp(self):
self.setup_with_file('etc/shinken_service_without_host.cfg')
def test_service_without_host_do_not_break(self):
self.assertIs(False, self.conf.conf_is_correct)
[b.prepare() for b in self.broks.values()]
logs = [b.data['log'] for b in self.broks.values() if b.type == 'log']
self.assertLess(
0,
len([ log
for log in logs
if re.search("The service 'WillError' got an unknown host_name 'NOEXIST'",
log)
]))
if __name__ == '__main__':
unittest.main()
| agpl-3.0 |
ZLLab-Mooc/edx-platform | lms/djangoapps/course_wiki/tests/test_tab.py | 158 | 2454 | """
Tests for wiki views.
"""
from django.conf import settings
from django.test.client import RequestFactory
from courseware.tabs import get_course_tab_list
from student.tests.factories import AdminFactory, UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class WikiTabTestCase(ModuleStoreTestCase):
"""Test cases for Wiki Tab."""
def setUp(self):
super(WikiTabTestCase, self).setUp()
self.course = CourseFactory.create()
self.instructor = AdminFactory.create()
self.user = UserFactory()
def get_wiki_tab(self, user, course):
"""Returns true if the "Wiki" tab is shown."""
request = RequestFactory().request()
request.user = user
all_tabs = get_course_tab_list(request, course)
wiki_tabs = [tab for tab in all_tabs if tab.name == 'Wiki']
return wiki_tabs[0] if len(wiki_tabs) == 1 else None
def test_wiki_enabled_and_public(self):
"""
Test wiki tab when Enabled setting is True and the wiki is open to
the public.
"""
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = True
self.assertIsNotNone(self.get_wiki_tab(self.user, self.course))
def test_wiki_enabled_and_not_public(self):
"""
Test wiki when it is enabled but not open to the public
"""
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = False
self.assertIsNone(self.get_wiki_tab(self.user, self.course))
self.assertIsNotNone(self.get_wiki_tab(self.instructor, self.course))
def test_wiki_enabled_false(self):
"""Test wiki tab when Enabled setting is False"""
settings.WIKI_ENABLED = False
self.assertIsNone(self.get_wiki_tab(self.user, self.course))
self.assertIsNone(self.get_wiki_tab(self.instructor, self.course))
def test_wiki_visibility(self):
"""Test toggling of visibility of wiki tab"""
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = True
wiki_tab = self.get_wiki_tab(self.user, self.course)
self.assertIsNotNone(wiki_tab)
self.assertTrue(wiki_tab.is_hideable)
wiki_tab.is_hidden = True
self.assertTrue(wiki_tab['is_hidden'])
wiki_tab['is_hidden'] = False
self.assertFalse(wiki_tab.is_hidden)
| agpl-3.0 |
knehez/edx-platform | cms/djangoapps/contentstore/views/tests/test_assets.py | 21 | 21518 | """
Unit tests for the asset upload endpoint.
"""
from datetime import datetime
from io import BytesIO
from pytz import UTC
from PIL import Image
import json
from django.conf import settings
from contentstore.tests.utils import CourseTestCase
from contentstore.views import assets
from contentstore.utils import reverse_course_url
from xmodule.assetstore.assetmgr import AssetMetadataFoundTemporary
from xmodule.assetstore import AssetMetadata
from xmodule.contentstore.content import StaticContent
from xmodule.contentstore.django import contentstore
from xmodule.modulestore.django import modulestore
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.xml_importer import import_course_from_xml
from django.test.utils import override_settings
from opaque_keys.edx.locations import SlashSeparatedCourseKey, AssetLocation
from static_replace import replace_static_urls
import mock
from ddt import ddt
from ddt import data
TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT
MAX_FILE_SIZE = settings.MAX_ASSET_UPLOAD_FILE_SIZE_IN_MB * 1000 ** 2
FEATURES_WITH_CERTS_ENABLED = settings.FEATURES.copy()
FEATURES_WITH_CERTS_ENABLED['CERTIFICATES_HTML_VIEW'] = True
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
class AssetsTestCase(CourseTestCase):
"""
Parent class for all asset tests.
"""
def setUp(self):
super(AssetsTestCase, self).setUp()
self.url = reverse_course_url('assets_handler', self.course.id)
def upload_asset(self, name="asset-1", asset_type='text'):
"""
Post to the asset upload url
"""
asset = self.get_sample_asset(name, asset_type)
response = self.client.post(self.url, {"name": name, "file": asset})
return response
def get_sample_asset(self, name, asset_type='text'):
"""
Returns an in-memory file of the specified type with the given name for testing
"""
sample_asset = BytesIO()
sample_file_contents = "This file is generated by python unit test"
if asset_type == 'text':
sample_asset.name = '{name}.txt'.format(name=name)
sample_asset.write(sample_file_contents)
elif asset_type == 'image':
image = Image.new("RGB", size=(50, 50), color=(256, 0, 0))
image.save(sample_asset, 'jpeg')
sample_asset.name = '{name}.jpg'.format(name=name)
elif asset_type == 'opendoc':
sample_asset.name = '{name}.odt'.format(name=name)
sample_asset.write(sample_file_contents)
sample_asset.seek(0)
return sample_asset
class BasicAssetsTestCase(AssetsTestCase):
"""
Test getting assets via html w/o additional args
"""
def test_basic(self):
resp = self.client.get(self.url, HTTP_ACCEPT='text/html')
self.assertEquals(resp.status_code, 200)
def test_static_url_generation(self):
course_key = SlashSeparatedCourseKey('org', 'class', 'run')
location = course_key.make_asset_key('asset', 'my_file_name.jpg')
path = StaticContent.get_static_path_from_location(location)
self.assertEquals(path, '/static/my_file_name.jpg')
def test_pdf_asset(self):
module_store = modulestore()
course_items = import_course_from_xml(
module_store,
self.user.id,
TEST_DATA_DIR,
['toy'],
static_content_store=contentstore(),
verbose=True
)
course = course_items[0]
url = reverse_course_url('assets_handler', course.id)
# Test valid contentType for pdf asset (textbook.pdf)
resp = self.client.get(url, HTTP_ACCEPT='application/json')
self.assertContains(resp, "/c4x/edX/toy/asset/textbook.pdf")
asset_location = AssetLocation.from_deprecated_string('/c4x/edX/toy/asset/textbook.pdf')
content = contentstore().find(asset_location)
# Check after import textbook.pdf has valid contentType ('application/pdf')
# Note: Actual contentType for textbook.pdf in asset.json is 'text/pdf'
self.assertEqual(content.content_type, 'application/pdf')
def test_relative_url_for_split_course(self):
"""
Test relative path for split courses assets
"""
with modulestore().default_store(ModuleStoreEnum.Type.split):
module_store = modulestore()
course_id = module_store.make_course_key('edX', 'toy', '2012_Fall')
import_course_from_xml(
module_store,
self.user.id,
TEST_DATA_DIR,
['toy'],
static_content_store=contentstore(),
target_id=course_id,
create_if_not_present=True
)
course = module_store.get_course(course_id)
filename = 'sample_static.txt'
html_src_attribute = '"/static/{}"'.format(filename)
asset_url = replace_static_urls(html_src_attribute, course_id=course.id)
url = asset_url.replace('"', '')
base_url = url.replace(filename, '')
self.assertTrue("/{}".format(filename) in url)
resp = self.client.get(url)
self.assertEquals(resp.status_code, 200)
# simulation of html page where base_url is up-to asset's main directory
# and relative_path is dom element with its src
relative_path = 'just_a_test.jpg'
# browser append relative_path with base_url
absolute_path = base_url + relative_path
self.assertTrue("/{}".format(relative_path) in absolute_path)
resp = self.client.get(absolute_path)
self.assertEquals(resp.status_code, 200)
class PaginationTestCase(AssetsTestCase):
"""
Tests the pagination of assets returned from the REST API.
"""
def test_json_responses(self):
"""
Test the ajax asset interfaces
"""
self.upload_asset("asset-1")
self.upload_asset("asset-2")
self.upload_asset("asset-3")
self.upload_asset("asset-4", "opendoc")
# Verify valid page requests
self.assert_correct_asset_response(self.url, 0, 4, 4)
self.assert_correct_asset_response(self.url + "?page_size=2", 0, 2, 4)
self.assert_correct_asset_response(
self.url + "?page_size=2&page=1", 2, 2, 4)
self.assert_correct_sort_response(self.url, 'date_added', 'asc')
self.assert_correct_sort_response(self.url, 'date_added', 'desc')
self.assert_correct_sort_response(self.url, 'display_name', 'asc')
self.assert_correct_sort_response(self.url, 'display_name', 'desc')
self.assert_correct_filter_response(self.url, 'asset_type', '')
self.assert_correct_filter_response(self.url, 'asset_type', 'OTHER')
self.assert_correct_filter_response(
self.url, 'asset_type', 'Documents')
# Verify querying outside the range of valid pages
self.assert_correct_asset_response(
self.url + "?page_size=2&page=-1", 0, 2, 4)
self.assert_correct_asset_response(
self.url + "?page_size=2&page=2", 2, 2, 4)
self.assert_correct_asset_response(
self.url + "?page_size=3&page=1", 3, 1, 4)
@mock.patch('xmodule.contentstore.mongo.MongoContentStore.get_all_content_for_course')
def test_mocked_filtered_response(self, mock_get_all_content_for_course):
"""
Test the ajax asset interfaces
"""
asset_key = self.course.id.make_asset_key(
AssetMetadata.GENERAL_ASSET_TYPE, 'test.jpg')
upload_date = datetime(2015, 1, 12, 10, 30, tzinfo=UTC)
thumbnail_location = [
'c4x', 'edX', 'toy', 'thumbnail', 'test_thumb.jpg', None]
mock_get_all_content_for_course.return_value = [
[
{
"asset_key": asset_key,
"displayname": "test.jpg",
"contentType": "image/jpg",
"url": "/c4x/A/CS102/asset/test.jpg",
"uploadDate": upload_date,
"id": "/c4x/A/CS102/asset/test.jpg",
"portable_url": "/static/test.jpg",
"thumbnail": None,
"thumbnail_location": thumbnail_location,
"locked": None
}
],
1
]
# Verify valid page requests
self.assert_correct_filter_response(self.url, 'asset_type', 'OTHER')
def assert_correct_asset_response(self, url, expected_start, expected_length, expected_total):
"""
Get from the url and ensure it contains the expected number of responses
"""
resp = self.client.get(url, HTTP_ACCEPT='application/json')
json_response = json.loads(resp.content)
assets_response = json_response['assets']
self.assertEquals(json_response['start'], expected_start)
self.assertEquals(len(assets_response), expected_length)
self.assertEquals(json_response['totalCount'], expected_total)
def assert_correct_sort_response(self, url, sort, direction):
"""
Get from the url w/ a sort option and ensure items honor that sort
"""
resp = self.client.get(
url + '?sort=' + sort + '&direction=' + direction, HTTP_ACCEPT='application/json')
json_response = json.loads(resp.content)
assets_response = json_response['assets']
name1 = assets_response[0][sort]
name2 = assets_response[1][sort]
name3 = assets_response[2][sort]
if direction == 'asc':
self.assertLessEqual(name1, name2)
self.assertLessEqual(name2, name3)
else:
self.assertGreaterEqual(name1, name2)
self.assertGreaterEqual(name2, name3)
def assert_correct_filter_response(self, url, filter_type, filter_value):
"""
Get from the url w/ a filter option and ensure items honor that filter
"""
requested_file_types = settings.FILES_AND_UPLOAD_TYPE_FILTERS.get(
filter_value, None)
resp = self.client.get(
url + '?' + filter_type + '=' + filter_value, HTTP_ACCEPT='application/json')
json_response = json.loads(resp.content)
assets_response = json_response['assets']
if filter_value is not '':
content_types = [asset['content_type'].lower()
for asset in assets_response]
if filter_value is 'OTHER':
all_file_type_extensions = []
for file_type in settings.FILES_AND_UPLOAD_TYPE_FILTERS:
all_file_type_extensions.extend(file_type)
for content_type in content_types:
self.assertNotIn(content_type, all_file_type_extensions)
else:
for content_type in content_types:
self.assertIn(content_type, requested_file_types)
@ddt
class UploadTestCase(AssetsTestCase):
"""
Unit tests for uploading a file
"""
def setUp(self):
super(UploadTestCase, self).setUp()
self.url = reverse_course_url('assets_handler', self.course.id)
def test_happy_path(self):
resp = self.upload_asset()
self.assertEquals(resp.status_code, 200)
def test_upload_image(self):
resp = self.upload_asset("test_image", asset_type="image")
self.assertEquals(resp.status_code, 200)
def test_no_file(self):
resp = self.client.post(self.url, {"name": "file.txt"}, "application/json")
self.assertEquals(resp.status_code, 400)
@data(
(int(MAX_FILE_SIZE / 2.0), "small.file.test", 200),
(MAX_FILE_SIZE, "justequals.file.test", 200),
(MAX_FILE_SIZE + 90, "large.file.test", 413),
)
@mock.patch('contentstore.views.assets.get_file_size')
def test_file_size(self, case, get_file_size):
max_file_size, name, status_code = case
get_file_size.return_value = max_file_size
f = self.get_sample_asset(name=name)
resp = self.client.post(self.url, {
"name": name,
"file": f
})
self.assertEquals(resp.status_code, status_code)
class DownloadTestCase(AssetsTestCase):
"""
Unit tests for downloading a file.
"""
def setUp(self):
super(DownloadTestCase, self).setUp()
self.url = reverse_course_url('assets_handler', self.course.id)
# First, upload something.
self.asset_name = 'download_test'
resp = self.upload_asset(self.asset_name)
self.assertEquals(resp.status_code, 200)
self.uploaded_url = json.loads(resp.content)['asset']['url']
def test_download(self):
# Now, download it.
resp = self.client.get(self.uploaded_url, HTTP_ACCEPT='text/html')
self.assertEquals(resp.status_code, 200)
self.assertContains(resp, 'This file is generated by python unit test')
def test_download_not_found_throw(self):
url = self.uploaded_url.replace(self.asset_name, 'not_the_asset_name')
resp = self.client.get(url, HTTP_ACCEPT='text/html')
self.assertEquals(resp.status_code, 404)
def test_metadata_found_in_modulestore(self):
# Insert asset metadata into the modulestore (with no accompanying asset).
asset_key = self.course.id.make_asset_key(AssetMetadata.GENERAL_ASSET_TYPE, 'pic1.jpg')
asset_md = AssetMetadata(asset_key, {
'internal_name': 'EKMND332DDBK',
'basename': 'pix/archive',
'locked': False,
'curr_version': '14',
'prev_version': '13'
})
modulestore().save_asset_metadata(asset_md, 15)
# Get the asset metadata and have it be found in the modulestore.
# Currently, no asset metadata should be found in the modulestore. The code is not yet storing it there.
# If asset metadata *is* found there, an exception is raised. This test ensures the exception is indeed raised.
# THIS IS TEMPORARY. Soon, asset metadata *will* be stored in the modulestore.
with self.assertRaises((AssetMetadataFoundTemporary, NameError)):
self.client.get(unicode(asset_key), HTTP_ACCEPT='text/html')
class AssetToJsonTestCase(AssetsTestCase):
"""
Unit test for transforming asset information into something
we can send out to the client via JSON.
"""
@override_settings(LMS_BASE="lms_base_url")
def test_basic(self):
upload_date = datetime(2013, 6, 1, 10, 30, tzinfo=UTC)
content_type = 'image/jpg'
course_key = SlashSeparatedCourseKey('org', 'class', 'run')
location = course_key.make_asset_key('asset', 'my_file_name.jpg')
thumbnail_location = course_key.make_asset_key('thumbnail', 'my_file_name_thumb.jpg')
# pylint: disable=protected-access
output = assets._get_asset_json("my_file", content_type, upload_date, location, thumbnail_location, True)
self.assertEquals(output["display_name"], "my_file")
self.assertEquals(output["date_added"], "Jun 01, 2013 at 10:30 UTC")
self.assertEquals(output["url"], "/c4x/org/class/asset/my_file_name.jpg")
self.assertEquals(output["external_url"], "lms_base_url/c4x/org/class/asset/my_file_name.jpg")
self.assertEquals(output["portable_url"], "/static/my_file_name.jpg")
self.assertEquals(output["thumbnail"], "/c4x/org/class/thumbnail/my_file_name_thumb.jpg")
self.assertEquals(output["id"], unicode(location))
self.assertEquals(output['locked'], True)
output = assets._get_asset_json("name", content_type, upload_date, location, None, False)
self.assertIsNone(output["thumbnail"])
class LockAssetTestCase(AssetsTestCase):
"""
Unit test for locking and unlocking an asset.
"""
def test_locking(self):
"""
Tests a simple locking and unlocking of an asset in the toy course.
"""
def verify_asset_locked_state(locked):
""" Helper method to verify lock state in the contentstore """
asset_location = StaticContent.get_location_from_path('/c4x/edX/toy/asset/sample_static.txt')
content = contentstore().find(asset_location)
self.assertEqual(content.locked, locked)
def post_asset_update(lock, course):
""" Helper method for posting asset update. """
content_type = 'application/txt'
upload_date = datetime(2013, 6, 1, 10, 30, tzinfo=UTC)
asset_location = course.id.make_asset_key('asset', 'sample_static.txt')
url = reverse_course_url('assets_handler', course.id, kwargs={'asset_key_string': unicode(asset_location)})
resp = self.client.post(
url,
# pylint: disable=protected-access
json.dumps(assets._get_asset_json(
"sample_static.txt", content_type, upload_date, asset_location, None, lock)),
"application/json"
)
self.assertEqual(resp.status_code, 201)
return json.loads(resp.content)
# Load the toy course.
module_store = modulestore()
course_items = import_course_from_xml(
module_store,
self.user.id,
TEST_DATA_DIR,
['toy'],
static_content_store=contentstore(),
verbose=True
)
course = course_items[0]
verify_asset_locked_state(False)
# Lock the asset
resp_asset = post_asset_update(True, course)
self.assertTrue(resp_asset['locked'])
verify_asset_locked_state(True)
# Unlock the asset
resp_asset = post_asset_update(False, course)
self.assertFalse(resp_asset['locked'])
verify_asset_locked_state(False)
class DeleteAssetTestCase(AssetsTestCase):
"""
Unit test for removing an asset.
"""
def setUp(self):
""" Scaffolding """
super(DeleteAssetTestCase, self).setUp()
self.url = reverse_course_url('assets_handler', self.course.id)
# First, upload something.
self.asset_name = 'delete_test'
self.asset = self.get_sample_asset(self.asset_name)
response = self.client.post(self.url, {"name": self.asset_name, "file": self.asset})
self.assertEquals(response.status_code, 200)
self.uploaded_url = json.loads(response.content)['asset']['url']
self.asset_location = AssetLocation.from_deprecated_string(self.uploaded_url)
self.content = contentstore().find(self.asset_location)
def test_delete_asset(self):
""" Tests the happy path :) """
test_url = reverse_course_url(
'assets_handler', self.course.id, kwargs={'asset_key_string': unicode(self.uploaded_url)})
resp = self.client.delete(test_url, HTTP_ACCEPT="application/json")
self.assertEquals(resp.status_code, 204)
def test_delete_image_type_asset(self):
""" Tests deletion of image type asset """
image_asset = self.get_sample_asset(self.asset_name, asset_type="image")
thumbnail_image_asset = self.get_sample_asset('delete_test_thumbnail', asset_type="image")
# upload image
response = self.client.post(self.url, {"name": "delete_image_test", "file": image_asset})
self.assertEquals(response.status_code, 200)
uploaded_image_url = json.loads(response.content)['asset']['url']
# upload image thumbnail
response = self.client.post(self.url, {"name": "delete_image_thumb_test", "file": thumbnail_image_asset})
self.assertEquals(response.status_code, 200)
thumbnail_url = json.loads(response.content)['asset']['url']
thumbnail_location = StaticContent.get_location_from_path(thumbnail_url)
image_asset_location = AssetLocation.from_deprecated_string(uploaded_image_url)
content = contentstore().find(image_asset_location)
content.thumbnail_location = thumbnail_location
contentstore().save(content)
with mock.patch('opaque_keys.edx.locator.CourseLocator.make_asset_key') as mock_asset_key:
mock_asset_key.return_value = thumbnail_location
test_url = reverse_course_url(
'assets_handler', self.course.id, kwargs={'asset_key_string': unicode(uploaded_image_url)})
resp = self.client.delete(test_url, HTTP_ACCEPT="application/json")
self.assertEquals(resp.status_code, 204)
def test_delete_asset_with_invalid_asset(self):
""" Tests the sad path :( """
test_url = reverse_course_url(
'assets_handler', self.course.id, kwargs={'asset_key_string': unicode("/c4x/edX/toy/asset/invalid.pdf")})
resp = self.client.delete(test_url, HTTP_ACCEPT="application/json")
self.assertEquals(resp.status_code, 404)
def test_delete_asset_with_invalid_thumbnail(self):
""" Tests the sad path :( """
test_url = reverse_course_url(
'assets_handler', self.course.id, kwargs={'asset_key_string': unicode(self.uploaded_url)})
self.content.thumbnail_location = StaticContent.get_location_from_path('/c4x/edX/toy/asset/invalid')
contentstore().save(self.content)
resp = self.client.delete(test_url, HTTP_ACCEPT="application/json")
self.assertEquals(resp.status_code, 204)
| agpl-3.0 |
akumar21NCSU/servo | tests/wpt/web-platform-tests/tools/html5lib/html5lib/tests/test_sanitizer.py | 430 | 4645 | from __future__ import absolute_import, division, unicode_literals
try:
import json
except ImportError:
import simplejson as json
from html5lib import html5parser, sanitizer, constants, treebuilders
def toxmlFactory():
tree = treebuilders.getTreeBuilder("etree")
def toxml(element):
# encode/decode roundtrip required for Python 2.6 compatibility
result_bytes = tree.implementation.tostring(element, encoding="utf-8")
return result_bytes.decode("utf-8")
return toxml
def runSanitizerTest(name, expected, input, toxml=None):
if toxml is None:
toxml = toxmlFactory()
expected = ''.join([toxml(token) for token in html5parser.HTMLParser().
parseFragment(expected)])
expected = json.loads(json.dumps(expected))
assert expected == sanitize_html(input)
def sanitize_html(stream, toxml=None):
if toxml is None:
toxml = toxmlFactory()
return ''.join([toxml(token) for token in
html5parser.HTMLParser(tokenizer=sanitizer.HTMLSanitizer).
parseFragment(stream)])
def test_should_handle_astral_plane_characters():
assert '<html:p xmlns:html="http://www.w3.org/1999/xhtml">\U0001d4b5 \U0001d538</html:p>' == sanitize_html("<p>𝒵 𝔸</p>")
def test_sanitizer():
toxml = toxmlFactory()
for tag_name in sanitizer.HTMLSanitizer.allowed_elements:
if tag_name in ['caption', 'col', 'colgroup', 'optgroup', 'option', 'table', 'tbody', 'td', 'tfoot', 'th', 'thead', 'tr']:
continue # TODO
if tag_name != tag_name.lower():
continue # TODO
if tag_name == 'image':
yield (runSanitizerTest, "test_should_allow_%s_tag" % tag_name,
"<img title=\"1\"/>foo <bad>bar</bad> baz",
"<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name),
toxml)
elif tag_name == 'br':
yield (runSanitizerTest, "test_should_allow_%s_tag" % tag_name,
"<br title=\"1\"/>foo <bad>bar</bad> baz<br/>",
"<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name),
toxml)
elif tag_name in constants.voidElements:
yield (runSanitizerTest, "test_should_allow_%s_tag" % tag_name,
"<%s title=\"1\"/>foo <bad>bar</bad> baz" % tag_name,
"<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name),
toxml)
else:
yield (runSanitizerTest, "test_should_allow_%s_tag" % tag_name,
"<%s title=\"1\">foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name),
"<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name),
toxml)
for tag_name in sanitizer.HTMLSanitizer.allowed_elements:
tag_name = tag_name.upper()
yield (runSanitizerTest, "test_should_forbid_%s_tag" % tag_name,
"<%s title=\"1\">foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name),
"<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name),
toxml)
for attribute_name in sanitizer.HTMLSanitizer.allowed_attributes:
if attribute_name != attribute_name.lower():
continue # TODO
if attribute_name == 'style':
continue
yield (runSanitizerTest, "test_should_allow_%s_attribute" % attribute_name,
"<p %s=\"foo\">foo <bad>bar</bad> baz</p>" % attribute_name,
"<p %s='foo'>foo <bad>bar</bad> baz</p>" % attribute_name,
toxml)
for attribute_name in sanitizer.HTMLSanitizer.allowed_attributes:
attribute_name = attribute_name.upper()
yield (runSanitizerTest, "test_should_forbid_%s_attribute" % attribute_name,
"<p>foo <bad>bar</bad> baz</p>",
"<p %s='display: none;'>foo <bad>bar</bad> baz</p>" % attribute_name,
toxml)
for protocol in sanitizer.HTMLSanitizer.allowed_protocols:
yield (runSanitizerTest, "test_should_allow_%s_uris" % protocol,
"<a href=\"%s\">foo</a>" % protocol,
"""<a href="%s">foo</a>""" % protocol,
toxml)
for protocol in sanitizer.HTMLSanitizer.allowed_protocols:
yield (runSanitizerTest, "test_should_allow_uppercase_%s_uris" % protocol,
"<a href=\"%s\">foo</a>" % protocol,
"""<a href="%s">foo</a>""" % protocol,
toxml)
| mpl-2.0 |
beni55/sympy | sympy/mpmath/tests/test_elliptic.py | 19 | 23954 | """
Limited tests of the elliptic functions module. A full suite of
extensive testing can be found in elliptic_torture_tests.py
Author of the first version: M.T. Taschuk
References:
[1] Abramowitz & Stegun. 'Handbook of Mathematical Functions, 9th Ed.',
(Dover duplicate of 1972 edition)
[2] Whittaker 'A Course of Modern Analysis, 4th Ed.', 1946,
Cambridge University Press
"""
import sympy.mpmath
import random
from sympy.mpmath import *
def mpc_ae(a, b, eps=eps):
res = True
res = res and a.real.ae(b.real, eps)
res = res and a.imag.ae(b.imag, eps)
return res
zero = mpf(0)
one = mpf(1)
jsn = ellipfun('sn')
jcn = ellipfun('cn')
jdn = ellipfun('dn')
calculate_nome = lambda k: qfrom(k=k)
def test_ellipfun():
mp.dps = 15
assert ellipfun('ss', 0, 0) == 1
assert ellipfun('cc', 0, 0) == 1
assert ellipfun('dd', 0, 0) == 1
assert ellipfun('nn', 0, 0) == 1
assert ellipfun('sn', 0.25, 0).ae(sin(0.25))
assert ellipfun('cn', 0.25, 0).ae(cos(0.25))
assert ellipfun('dn', 0.25, 0).ae(1)
assert ellipfun('ns', 0.25, 0).ae(csc(0.25))
assert ellipfun('nc', 0.25, 0).ae(sec(0.25))
assert ellipfun('nd', 0.25, 0).ae(1)
assert ellipfun('sc', 0.25, 0).ae(tan(0.25))
assert ellipfun('sd', 0.25, 0).ae(sin(0.25))
assert ellipfun('cd', 0.25, 0).ae(cos(0.25))
assert ellipfun('cs', 0.25, 0).ae(cot(0.25))
assert ellipfun('dc', 0.25, 0).ae(sec(0.25))
assert ellipfun('ds', 0.25, 0).ae(csc(0.25))
assert ellipfun('sn', 0.25, 1).ae(tanh(0.25))
assert ellipfun('cn', 0.25, 1).ae(sech(0.25))
assert ellipfun('dn', 0.25, 1).ae(sech(0.25))
assert ellipfun('ns', 0.25, 1).ae(coth(0.25))
assert ellipfun('nc', 0.25, 1).ae(cosh(0.25))
assert ellipfun('nd', 0.25, 1).ae(cosh(0.25))
assert ellipfun('sc', 0.25, 1).ae(sinh(0.25))
assert ellipfun('sd', 0.25, 1).ae(sinh(0.25))
assert ellipfun('cd', 0.25, 1).ae(1)
assert ellipfun('cs', 0.25, 1).ae(csch(0.25))
assert ellipfun('dc', 0.25, 1).ae(1)
assert ellipfun('ds', 0.25, 1).ae(csch(0.25))
assert ellipfun('sn', 0.25, 0.5).ae(0.24615967096986145833)
assert ellipfun('cn', 0.25, 0.5).ae(0.96922928989378439337)
assert ellipfun('dn', 0.25, 0.5).ae(0.98473484156599474563)
assert ellipfun('ns', 0.25, 0.5).ae(4.0624038700573130369)
assert ellipfun('nc', 0.25, 0.5).ae(1.0317476065024692949)
assert ellipfun('nd', 0.25, 0.5).ae(1.0155017958029488665)
assert ellipfun('sc', 0.25, 0.5).ae(0.25397465134058993408)
assert ellipfun('sd', 0.25, 0.5).ae(0.24997558792415733063)
assert ellipfun('cd', 0.25, 0.5).ae(0.98425408443195497052)
assert ellipfun('cs', 0.25, 0.5).ae(3.9374008182374110826)
assert ellipfun('dc', 0.25, 0.5).ae(1.0159978158253033913)
assert ellipfun('ds', 0.25, 0.5).ae(4.0003906313579720593)
def test_calculate_nome():
mp.dps = 100
q = calculate_nome(zero)
assert(q == zero)
mp.dps = 25
# used Mathematica's EllipticNomeQ[m]
math1 = [(mpf(1)/10, mpf('0.006584651553858370274473060')),
(mpf(2)/10, mpf('0.01394285727531826872146409')),
(mpf(3)/10, mpf('0.02227743615715350822901627')),
(mpf(4)/10, mpf('0.03188334731336317755064299')),
(mpf(5)/10, mpf('0.04321391826377224977441774')),
(mpf(6)/10, mpf('0.05702025781460967637754953')),
(mpf(7)/10, mpf('0.07468994353717944761143751')),
(mpf(8)/10, mpf('0.09927369733882489703607378')),
(mpf(9)/10, mpf('0.1401731269542615524091055')),
(mpf(9)/10, mpf('0.1401731269542615524091055'))]
for i in math1:
m = i[0]
q = calculate_nome(sqrt(m))
assert q.ae(i[1])
mp.dps = 15
def test_jtheta():
mp.dps = 25
z = q = zero
for n in range(1,5):
value = jtheta(n, z, q)
assert(value == (n-1)//2)
for q in [one, mpf(2)]:
for n in range(1,5):
raised = True
try:
r = jtheta(n, z, q)
except:
pass
else:
raised = False
assert(raised)
z = one/10
q = one/11
# Mathematical N[EllipticTheta[1, 1/10, 1/11], 25]
res = mpf('0.1069552990104042681962096')
result = jtheta(1, z, q)
assert(result.ae(res))
# Mathematica N[EllipticTheta[2, 1/10, 1/11], 25]
res = mpf('1.101385760258855791140606')
result = jtheta(2, z, q)
assert(result.ae(res))
# Mathematica N[EllipticTheta[3, 1/10, 1/11], 25]
res = mpf('1.178319743354331061795905')
result = jtheta(3, z, q)
assert(result.ae(res))
# Mathematica N[EllipticTheta[4, 1/10, 1/11], 25]
res = mpf('0.8219318954665153577314573')
result = jtheta(4, z, q)
assert(result.ae(res))
# test for sin zeros for jtheta(1, z, q)
# test for cos zeros for jtheta(2, z, q)
z1 = pi
z2 = pi/2
for i in range(10):
qstring = str(random.random())
q = mpf(qstring)
result = jtheta(1, z1, q)
assert(result.ae(0))
result = jtheta(2, z2, q)
assert(result.ae(0))
mp.dps = 15
def test_jtheta_issue39():
# near the circle of covergence |q| = 1 the convergence slows
# down; for |q| > Q_LIM the theta functions raise ValueError
mp.dps = 30
mp.dps += 30
q = mpf(6)/10 - one/10**6 - mpf(8)/10 * j
mp.dps -= 30
# Mathematica run first
# N[EllipticTheta[3, 1, 6/10 - 10^-6 - 8/10*I], 2000]
# then it works:
# N[EllipticTheta[3, 1, 6/10 - 10^-6 - 8/10*I], 30]
res = mpf('32.0031009628901652627099524264') + \
mpf('16.6153027998236087899308935624') * j
result = jtheta(3, 1, q)
# check that for abs(q) > Q_LIM a ValueError exception is raised
mp.dps += 30
q = mpf(6)/10 - one/10**7 - mpf(8)/10 * j
mp.dps -= 30
try:
result = jtheta(3, 1, q)
except ValueError:
pass
else:
assert(False)
# bug reported in issue39
mp.dps = 100
z = (1+j)/3
q = mpf(368983957219251)/10**15 + mpf(636363636363636)/10**15 * j
# Mathematica N[EllipticTheta[1, z, q], 35]
res = mpf('2.4439389177990737589761828991467471') + \
mpf('0.5446453005688226915290954851851490') *j
mp.dps = 30
result = jtheta(1, z, q)
assert(result.ae(res))
mp.dps = 80
z = 3 + 4*j
q = 0.5 + 0.5*j
r1 = jtheta(1, z, q)
mp.dps = 15
r2 = jtheta(1, z, q)
assert r1.ae(r2)
mp.dps = 80
z = 3 + j
q1 = exp(j*3)
# longer test
# for n in range(1, 6)
for n in range(1, 2):
mp.dps = 80
q = q1*(1 - mpf(1)/10**n)
r1 = jtheta(1, z, q)
mp.dps = 15
r2 = jtheta(1, z, q)
assert r1.ae(r2)
mp.dps = 15
# issue 3138 about high derivatives
assert jtheta(3, 4.5, 0.25, 9).ae(1359.04892680683)
assert jtheta(3, 4.5, 0.25, 50).ae(-6.14832772630905e+33)
mp.dps = 50
r = jtheta(3, 4.5, 0.25, 9)
assert r.ae('1359.048926806828939547859396600218966947753213803')
r = jtheta(3, 4.5, 0.25, 50)
assert r.ae('-6148327726309051673317975084654262.4119215720343656')
def test_jtheta_identities():
"""
Tests the some of the jacobi identidies found in Abramowitz,
Sec. 16.28, Pg. 576. The identities are tested to 1 part in 10^98.
"""
mp.dps = 110
eps1 = ldexp(eps, 30)
for i in range(10):
qstring = str(random.random())
q = mpf(qstring)
zstring = str(10*random.random())
z = mpf(zstring)
# Abramowitz 16.28.1
# v_1(z, q)**2 * v_4(0, q)**2 = v_3(z, q)**2 * v_2(0, q)**2
# - v_2(z, q)**2 * v_3(0, q)**2
term1 = (jtheta(1, z, q)**2) * (jtheta(4, zero, q)**2)
term2 = (jtheta(3, z, q)**2) * (jtheta(2, zero, q)**2)
term3 = (jtheta(2, z, q)**2) * (jtheta(3, zero, q)**2)
equality = term1 - term2 + term3
assert(equality.ae(0, eps1))
zstring = str(100*random.random())
z = mpf(zstring)
# Abramowitz 16.28.2
# v_2(z, q)**2 * v_4(0, q)**2 = v_4(z, q)**2 * v_2(0, q)**2
# - v_1(z, q)**2 * v_3(0, q)**2
term1 = (jtheta(2, z, q)**2) * (jtheta(4, zero, q)**2)
term2 = (jtheta(4, z, q)**2) * (jtheta(2, zero, q)**2)
term3 = (jtheta(1, z, q)**2) * (jtheta(3, zero, q)**2)
equality = term1 - term2 + term3
assert(equality.ae(0, eps1))
# Abramowitz 16.28.3
# v_3(z, q)**2 * v_4(0, q)**2 = v_4(z, q)**2 * v_3(0, q)**2
# - v_1(z, q)**2 * v_2(0, q)**2
term1 = (jtheta(3, z, q)**2) * (jtheta(4, zero, q)**2)
term2 = (jtheta(4, z, q)**2) * (jtheta(3, zero, q)**2)
term3 = (jtheta(1, z, q)**2) * (jtheta(2, zero, q)**2)
equality = term1 - term2 + term3
assert(equality.ae(0, eps1))
# Abramowitz 16.28.4
# v_4(z, q)**2 * v_4(0, q)**2 = v_3(z, q)**2 * v_3(0, q)**2
# - v_2(z, q)**2 * v_2(0, q)**2
term1 = (jtheta(4, z, q)**2) * (jtheta(4, zero, q)**2)
term2 = (jtheta(3, z, q)**2) * (jtheta(3, zero, q)**2)
term3 = (jtheta(2, z, q)**2) * (jtheta(2, zero, q)**2)
equality = term1 - term2 + term3
assert(equality.ae(0, eps1))
# Abramowitz 16.28.5
# v_2(0, q)**4 + v_4(0, q)**4 == v_3(0, q)**4
term1 = (jtheta(2, zero, q))**4
term2 = (jtheta(4, zero, q))**4
term3 = (jtheta(3, zero, q))**4
equality = term1 + term2 - term3
assert(equality.ae(0, eps1))
mp.dps = 15
def test_jtheta_complex():
mp.dps = 30
z = mpf(1)/4 + j/8
q = mpf(1)/3 + j/7
# Mathematica N[EllipticTheta[1, 1/4 + I/8, 1/3 + I/7], 35]
res = mpf('0.31618034835986160705729105731678285') + \
mpf('0.07542013825835103435142515194358975') * j
r = jtheta(1, z, q)
assert(mpc_ae(r, res))
# Mathematica N[EllipticTheta[2, 1/4 + I/8, 1/3 + I/7], 35]
res = mpf('1.6530986428239765928634711417951828') + \
mpf('0.2015344864707197230526742145361455') * j
r = jtheta(2, z, q)
assert(mpc_ae(r, res))
# Mathematica N[EllipticTheta[3, 1/4 + I/8, 1/3 + I/7], 35]
res = mpf('1.6520564411784228184326012700348340') + \
mpf('0.1998129119671271328684690067401823') * j
r = jtheta(3, z, q)
assert(mpc_ae(r, res))
# Mathematica N[EllipticTheta[4, 1/4 + I/8, 1/3 + I/7], 35]
res = mpf('0.37619082382228348252047624089973824') - \
mpf('0.15623022130983652972686227200681074') * j
r = jtheta(4, z, q)
assert(mpc_ae(r, res))
# check some theta function identities
mp.dos = 100
z = mpf(1)/4 + j/8
q = mpf(1)/3 + j/7
mp.dps += 10
a = [0,0, jtheta(2, 0, q), jtheta(3, 0, q), jtheta(4, 0, q)]
t = [0, jtheta(1, z, q), jtheta(2, z, q), jtheta(3, z, q), jtheta(4, z, q)]
r = [(t[2]*a[4])**2 - (t[4]*a[2])**2 + (t[1] *a[3])**2,
(t[3]*a[4])**2 - (t[4]*a[3])**2 + (t[1] *a[2])**2,
(t[1]*a[4])**2 - (t[3]*a[2])**2 + (t[2] *a[3])**2,
(t[4]*a[4])**2 - (t[3]*a[3])**2 + (t[2] *a[2])**2,
a[2]**4 + a[4]**4 - a[3]**4]
mp.dps -= 10
for x in r:
assert(mpc_ae(x, mpc(0)))
mp.dps = 15
def test_djtheta():
mp.dps = 30
z = one/7 + j/3
q = one/8 + j/5
# Mathematica N[EllipticThetaPrime[1, 1/7 + I/3, 1/8 + I/5], 35]
res = mpf('1.5555195883277196036090928995803201') - \
mpf('0.02439761276895463494054149673076275') * j
result = jtheta(1, z, q, 1)
assert(mpc_ae(result, res))
# Mathematica N[EllipticThetaPrime[2, 1/7 + I/3, 1/8 + I/5], 35]
res = mpf('0.19825296689470982332701283509685662') - \
mpf('0.46038135182282106983251742935250009') * j
result = jtheta(2, z, q, 1)
assert(mpc_ae(result, res))
# Mathematica N[EllipticThetaPrime[3, 1/7 + I/3, 1/8 + I/5], 35]
res = mpf('0.36492498415476212680896699407390026') - \
mpf('0.57743812698666990209897034525640369') * j
result = jtheta(3, z, q, 1)
assert(mpc_ae(result, res))
# Mathematica N[EllipticThetaPrime[4, 1/7 + I/3, 1/8 + I/5], 35]
res = mpf('-0.38936892528126996010818803742007352') + \
mpf('0.66549886179739128256269617407313625') * j
result = jtheta(4, z, q, 1)
assert(mpc_ae(result, res))
for i in range(10):
q = (one*random.random() + j*random.random())/2
# identity in Wittaker, Watson &21.41
a = jtheta(1, 0, q, 1)
b = jtheta(2, 0, q)*jtheta(3, 0, q)*jtheta(4, 0, q)
assert(a.ae(b))
# test higher derivatives
mp.dps = 20
for q,z in [(one/3, one/5), (one/3 + j/8, one/5),
(one/3, one/5 + j/8), (one/3 + j/7, one/5 + j/8)]:
for n in [1, 2, 3, 4]:
r = jtheta(n, z, q, 2)
r1 = diff(lambda zz: jtheta(n, zz, q), z, n=2)
assert r.ae(r1)
r = jtheta(n, z, q, 3)
r1 = diff(lambda zz: jtheta(n, zz, q), z, n=3)
assert r.ae(r1)
# identity in Wittaker, Watson &21.41
q = one/3
z = zero
a = [0]*5
a[1] = jtheta(1, z, q, 3)/jtheta(1, z, q, 1)
for n in [2,3,4]:
a[n] = jtheta(n, z, q, 2)/jtheta(n, z, q)
equality = a[2] + a[3] + a[4] - a[1]
assert(equality.ae(0))
mp.dps = 15
def test_jsn():
"""
Test some special cases of the sn(z, q) function.
"""
mp.dps = 100
# trival case
result = jsn(zero, zero)
assert(result == zero)
# Abramowitz Table 16.5
#
# sn(0, m) = 0
for i in range(10):
qstring = str(random.random())
q = mpf(qstring)
equality = jsn(zero, q)
assert(equality.ae(0))
# Abramowitz Table 16.6.1
#
# sn(z, 0) = sin(z), m == 0
#
# sn(z, 1) = tanh(z), m == 1
#
# It would be nice to test these, but I find that they run
# in to numerical trouble. I'm currently treating as a boundary
# case for sn function.
mp.dps = 25
arg = one/10
#N[JacobiSN[1/10, 2^-100], 25]
res = mpf('0.09983341664682815230681420')
m = ldexp(one, -100)
result = jsn(arg, m)
assert(result.ae(res))
# N[JacobiSN[1/10, 1/10], 25]
res = mpf('0.09981686718599080096451168')
result = jsn(arg, arg)
assert(result.ae(res))
mp.dps = 15
def test_jcn():
"""
Test some special cases of the cn(z, q) function.
"""
mp.dps = 100
# Abramowitz Table 16.5
# cn(0, q) = 1
qstring = str(random.random())
q = mpf(qstring)
cn = jcn(zero, q)
assert(cn.ae(one))
# Abramowitz Table 16.6.2
#
# cn(u, 0) = cos(u), m == 0
#
# cn(u, 1) = sech(z), m == 1
#
# It would be nice to test these, but I find that they run
# in to numerical trouble. I'm currently treating as a boundary
# case for cn function.
mp.dps = 25
arg = one/10
m = ldexp(one, -100)
#N[JacobiCN[1/10, 2^-100], 25]
res = mpf('0.9950041652780257660955620')
result = jcn(arg, m)
assert(result.ae(res))
# N[JacobiCN[1/10, 1/10], 25]
res = mpf('0.9950058256237368748520459')
result = jcn(arg, arg)
assert(result.ae(res))
mp.dps = 15
def test_jdn():
"""
Test some special cases of the dn(z, q) function.
"""
mp.dps = 100
# Abramowitz Table 16.5
# dn(0, q) = 1
mstring = str(random.random())
m = mpf(mstring)
dn = jdn(zero, m)
assert(dn.ae(one))
mp.dps = 25
# N[JacobiDN[1/10, 1/10], 25]
res = mpf('0.9995017055025556219713297')
arg = one/10
result = jdn(arg, arg)
assert(result.ae(res))
mp.dps = 15
def test_sn_cn_dn_identities():
"""
Tests the some of the jacobi elliptic function identities found
on Mathworld. Haven't found in Abramowitz.
"""
mp.dps = 100
N = 5
for i in range(N):
qstring = str(random.random())
q = mpf(qstring)
zstring = str(100*random.random())
z = mpf(zstring)
# MathWorld
# sn(z, q)**2 + cn(z, q)**2 == 1
term1 = jsn(z, q)**2
term2 = jcn(z, q)**2
equality = one - term1 - term2
assert(equality.ae(0))
# MathWorld
# k**2 * sn(z, m)**2 + dn(z, m)**2 == 1
for i in range(N):
mstring = str(random.random())
m = mpf(qstring)
k = m.sqrt()
zstring = str(10*random.random())
z = mpf(zstring)
term1 = k**2 * jsn(z, m)**2
term2 = jdn(z, m)**2
equality = one - term1 - term2
assert(equality.ae(0))
for i in range(N):
mstring = str(random.random())
m = mpf(mstring)
k = m.sqrt()
zstring = str(random.random())
z = mpf(zstring)
# MathWorld
# k**2 * cn(z, m)**2 + (1 - k**2) = dn(z, m)**2
term1 = k**2 * jcn(z, m)**2
term2 = 1 - k**2
term3 = jdn(z, m)**2
equality = term3 - term1 - term2
assert(equality.ae(0))
K = ellipk(k**2)
# Abramowitz Table 16.5
# sn(K, m) = 1; K is K(k), first complete elliptic integral
r = jsn(K, m)
assert(r.ae(one))
# Abramowitz Table 16.5
# cn(K, q) = 0; K is K(k), first complete elliptic integral
equality = jcn(K, m)
assert(equality.ae(0))
# Abramowitz Table 16.6.3
# dn(z, 0) = 1, m == 0
z = m
value = jdn(z, zero)
assert(value.ae(one))
mp.dps = 15
def test_sn_cn_dn_complex():
mp.dps = 30
# N[JacobiSN[1/4 + I/8, 1/3 + I/7], 35] in Mathematica
res = mpf('0.2495674401066275492326652143537') + \
mpf('0.12017344422863833381301051702823') * j
u = mpf(1)/4 + j/8
m = mpf(1)/3 + j/7
r = jsn(u, m)
assert(mpc_ae(r, res))
#N[JacobiCN[1/4 + I/8, 1/3 + I/7], 35]
res = mpf('0.9762691700944007312693721148331') - \
mpf('0.0307203994181623243583169154824')*j
r = jcn(u, m)
#assert r.real.ae(res.real)
#assert r.imag.ae(res.imag)
assert(mpc_ae(r, res))
#N[JacobiDN[1/4 + I/8, 1/3 + I/7], 35]
res = mpf('0.99639490163039577560547478589753039') - \
mpf('0.01346296520008176393432491077244994')*j
r = jdn(u, m)
assert(mpc_ae(r, res))
mp.dps = 15
def test_elliptic_integrals():
# Test cases from Carlson's paper
mp.dps = 15
assert elliprd(0,2,1).ae(1.7972103521033883112)
assert elliprd(2,3,4).ae(0.16510527294261053349)
assert elliprd(j,-j,2).ae(0.65933854154219768919)
assert elliprd(0,j,-j).ae(1.2708196271909686299 + 2.7811120159520578777j)
assert elliprd(0,j-1,j).ae(-1.8577235439239060056 - 0.96193450888838559989j)
assert elliprd(-2-j,-j,-1+j).ae(1.8249027393703805305 - 1.2218475784827035855j)
for n in [5, 15, 30, 60, 100]:
mp.dps = n
assert elliprf(1,2,0).ae('1.3110287771460599052324197949455597068413774757158115814084108519003952935352071251151477664807145467230678763')
assert elliprf(0.5,1,0).ae('1.854074677301371918433850347195260046217598823521766905585928045056021776838119978357271861650371897277771871')
assert elliprf(j,-j,0).ae('1.854074677301371918433850347195260046217598823521766905585928045056021776838119978357271861650371897277771871')
assert elliprf(j-1,j,0).ae(mpc('0.79612586584233913293056938229563057846592264089185680214929401744498956943287031832657642790719940442165621412',
'-1.2138566698364959864300942567386038975419875860741507618279563735753073152507112254567291141460317931258599889'))
assert elliprf(2,3,4).ae('0.58408284167715170669284916892566789240351359699303216166309375305508295130412919665541330837704050454472379308')
assert elliprf(j,-j,2).ae('1.0441445654064360931078658361850779139591660747973017593275012615517220315993723776182276555339288363064476126')
assert elliprf(j-1,j,1-j).ae(mpc('0.93912050218619371196624617169781141161485651998254431830645241993282941057500174238125105410055253623847335313',
'-0.53296252018635269264859303449447908970360344322834582313172115220559316331271520508208025270300138589669326136'))
assert elliprc(0,0.25).ae(+pi)
assert elliprc(2.25,2).ae(+ln2)
assert elliprc(0,j).ae(mpc('1.1107207345395915617539702475151734246536554223439225557713489017391086982748684776438317336911913093408525532',
'-1.1107207345395915617539702475151734246536554223439225557713489017391086982748684776438317336911913093408525532'))
assert elliprc(-j,j).ae(mpc('1.2260849569072198222319655083097718755633725139745941606203839524036426936825652935738621522906572884239069297',
'-0.34471136988767679699935618332997956653521218571295874986708834375026550946053920574015526038040124556716711353'))
assert elliprc(0.25,-2).ae(ln2/3)
assert elliprc(j,-1).ae(mpc('0.77778596920447389875196055840799837589537035343923012237628610795937014001905822029050288316217145443865649819',
'0.1983248499342877364755170948292130095921681309577950696116251029742793455964385947473103628983664877025779304'))
assert elliprj(0,1,2,3).ae('0.77688623778582332014190282640545501102298064276022952731669118325952563819813258230708177398475643634103990878')
assert elliprj(2,3,4,5).ae('0.14297579667156753833233879421985774801466647854232626336218889885463800128817976132826443904216546421431528308')
assert elliprj(2,3,4,-1+j).ae(mpc('0.13613945827770535203521374457913768360237593025944342652613569368333226052158214183059386307242563164036672709',
'-0.38207561624427164249600936454845112611060375760094156571007648297226090050927156176977091273224510621553615189'))
assert elliprj(j,-j,0,2).ae('1.6490011662710884518243257224860232300246792717163891216346170272567376981346412066066050103935109581019055806')
assert elliprj(-1+j,-1-j,1,2).ae('0.94148358841220238083044612133767270187474673547917988681610772381758628963408843935027667916713866133196845063')
assert elliprj(j,-j,0,1-j).ae(mpc('1.8260115229009316249372594065790946657011067182850435297162034335356430755397401849070610280860044610878657501',
'1.2290661908643471500163617732957042849283739403009556715926326841959667290840290081010472716420690899886276961'))
assert elliprj(-1+j,-1-j,1,-3+j).ae(mpc('-0.61127970812028172123588152373622636829986597243716610650831553882054127570542477508023027578037045504958619422',
'-1.0684038390006807880182112972232562745485871763154040245065581157751693730095703406209466903752930797510491155'))
assert elliprj(-1+j,-2-j,-j,-1+j).ae(mpc('1.8249027393703805304622013339009022294368078659619988943515764258335975852685224202567854526307030593012768954',
'-1.2218475784827035854568450371590419833166777535029296025352291308244564398645467465067845461070602841312456831'))
assert elliprg(0,16,16).ae(+pi)
assert elliprg(2,3,4).ae('1.7255030280692277601061148835701141842692457170470456590515892070736643637303053506944907685301315299153040991')
assert elliprg(0,j,-j).ae('0.42360654239698954330324956174109581824072295516347109253028968632986700241706737986160014699730561497106114281')
assert elliprg(j-1,j,0).ae(mpc('0.44660591677018372656731970402124510811555212083508861036067729944477855594654762496407405328607219895053798354',
'0.70768352357515390073102719507612395221369717586839400605901402910893345301718731499237159587077682267374159282'))
assert elliprg(-j,j-1,j).ae(mpc('0.36023392184473309033675652092928695596803358846377334894215349632203382573844427952830064383286995172598964266',
'0.40348623401722113740956336997761033878615232917480045914551915169013722542827052849476969199578321834819903921'))
assert elliprg(0, mpf('0.0796'), 4).ae('1.0284758090288040009838871385180217366569777284430590125081211090574701293154645750017813190805144572673802094')
mp.dps = 15
def test_issue_3297():
assert isnan(qfrom(m=nan))
| bsd-3-clause |
Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/encodings/cp857.py | 593 | 34164 | """ Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP857.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp857',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x0131, # LATIN SMALL LETTER DOTLESS I
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x0098: 0x0130, # LATIN CAPITAL LETTER I WITH DOT ABOVE
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
0x009e: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
0x009f: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x00a6: 0x011e, # LATIN CAPITAL LETTER G WITH BREVE
0x00a7: 0x011f, # LATIN SMALL LETTER G WITH BREVE
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x00ae, # REGISTERED SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00b7: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
0x00b8: 0x00a9, # COPYRIGHT SIGN
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x00a2, # CENT SIGN
0x00be: 0x00a5, # YEN SIGN
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
0x00c7: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x00ba, # MASCULINE ORDINAL INDICATOR
0x00d1: 0x00aa, # FEMININE ORDINAL INDICATOR
0x00d2: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00d4: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
0x00d5: None, # UNDEFINED
0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d8: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x00a6, # BROKEN BAR
0x00de: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00e3: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
0x00e4: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
0x00e5: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: None, # UNDEFINED
0x00e8: 0x00d7, # MULTIPLICATION SIGN
0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00ea: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00eb: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
0x00ed: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
0x00ee: 0x00af, # MACRON
0x00ef: 0x00b4, # ACUTE ACCENT
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: None, # UNDEFINED
0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
0x00f4: 0x00b6, # PILCROW SIGN
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x00b8, # CEDILLA
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x00a8, # DIAERESIS
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x00b9, # SUPERSCRIPT ONE
0x00fc: 0x00b3, # SUPERSCRIPT THREE
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
u'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe0' # 0x0085 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe5' # 0x0086 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xea' # 0x0088 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xe8' # 0x008a -> LATIN SMALL LETTER E WITH GRAVE
u'\xef' # 0x008b -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\u0131' # 0x008d -> LATIN SMALL LETTER DOTLESS I
u'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x008f -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xe6' # 0x0091 -> LATIN SMALL LIGATURE AE
u'\xc6' # 0x0092 -> LATIN CAPITAL LIGATURE AE
u'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf2' # 0x0095 -> LATIN SMALL LETTER O WITH GRAVE
u'\xfb' # 0x0096 -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xf9' # 0x0097 -> LATIN SMALL LETTER U WITH GRAVE
u'\u0130' # 0x0098 -> LATIN CAPITAL LETTER I WITH DOT ABOVE
u'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xf8' # 0x009b -> LATIN SMALL LETTER O WITH STROKE
u'\xa3' # 0x009c -> POUND SIGN
u'\xd8' # 0x009d -> LATIN CAPITAL LETTER O WITH STROKE
u'\u015e' # 0x009e -> LATIN CAPITAL LETTER S WITH CEDILLA
u'\u015f' # 0x009f -> LATIN SMALL LETTER S WITH CEDILLA
u'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
u'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
u'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
u'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
u'\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE
u'\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE
u'\u011e' # 0x00a6 -> LATIN CAPITAL LETTER G WITH BREVE
u'\u011f' # 0x00a7 -> LATIN SMALL LETTER G WITH BREVE
u'\xbf' # 0x00a8 -> INVERTED QUESTION MARK
u'\xae' # 0x00a9 -> REGISTERED SIGN
u'\xac' # 0x00aa -> NOT SIGN
u'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
u'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
u'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK
u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\xc1' # 0x00b5 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0x00b6 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc0' # 0x00b7 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xa9' # 0x00b8 -> COPYRIGHT SIGN
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\xa2' # 0x00bd -> CENT SIGN
u'\xa5' # 0x00be -> YEN SIGN
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\xe3' # 0x00c6 -> LATIN SMALL LETTER A WITH TILDE
u'\xc3' # 0x00c7 -> LATIN CAPITAL LETTER A WITH TILDE
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\xa4' # 0x00cf -> CURRENCY SIGN
u'\xba' # 0x00d0 -> MASCULINE ORDINAL INDICATOR
u'\xaa' # 0x00d1 -> FEMININE ORDINAL INDICATOR
u'\xca' # 0x00d2 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0x00d3 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0x00d4 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\ufffe' # 0x00d5 -> UNDEFINED
u'\xcd' # 0x00d6 -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0x00d7 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0x00d8 -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\xa6' # 0x00dd -> BROKEN BAR
u'\xcc' # 0x00de -> LATIN CAPITAL LETTER I WITH GRAVE
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\xd3' # 0x00e0 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
u'\xd4' # 0x00e2 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd2' # 0x00e3 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xf5' # 0x00e4 -> LATIN SMALL LETTER O WITH TILDE
u'\xd5' # 0x00e5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xb5' # 0x00e6 -> MICRO SIGN
u'\ufffe' # 0x00e7 -> UNDEFINED
u'\xd7' # 0x00e8 -> MULTIPLICATION SIGN
u'\xda' # 0x00e9 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0x00ea -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0x00eb -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xec' # 0x00ec -> LATIN SMALL LETTER I WITH GRAVE
u'\xff' # 0x00ed -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\xaf' # 0x00ee -> MACRON
u'\xb4' # 0x00ef -> ACUTE ACCENT
u'\xad' # 0x00f0 -> SOFT HYPHEN
u'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
u'\ufffe' # 0x00f2 -> UNDEFINED
u'\xbe' # 0x00f3 -> VULGAR FRACTION THREE QUARTERS
u'\xb6' # 0x00f4 -> PILCROW SIGN
u'\xa7' # 0x00f5 -> SECTION SIGN
u'\xf7' # 0x00f6 -> DIVISION SIGN
u'\xb8' # 0x00f7 -> CEDILLA
u'\xb0' # 0x00f8 -> DEGREE SIGN
u'\xa8' # 0x00f9 -> DIAERESIS
u'\xb7' # 0x00fa -> MIDDLE DOT
u'\xb9' # 0x00fb -> SUPERSCRIPT ONE
u'\xb3' # 0x00fc -> SUPERSCRIPT THREE
u'\xb2' # 0x00fd -> SUPERSCRIPT TWO
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK
0x00a2: 0x00bd, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a5: 0x00be, # YEN SIGN
0x00a6: 0x00dd, # BROKEN BAR
0x00a7: 0x00f5, # SECTION SIGN
0x00a8: 0x00f9, # DIAERESIS
0x00a9: 0x00b8, # COPYRIGHT SIGN
0x00aa: 0x00d1, # FEMININE ORDINAL INDICATOR
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00ad: 0x00f0, # SOFT HYPHEN
0x00ae: 0x00a9, # REGISTERED SIGN
0x00af: 0x00ee, # MACRON
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b3: 0x00fc, # SUPERSCRIPT THREE
0x00b4: 0x00ef, # ACUTE ACCENT
0x00b5: 0x00e6, # MICRO SIGN
0x00b6: 0x00f4, # PILCROW SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00b8: 0x00f7, # CEDILLA
0x00b9: 0x00fb, # SUPERSCRIPT ONE
0x00ba: 0x00d0, # MASCULINE ORDINAL INDICATOR
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00be: 0x00f3, # VULGAR FRACTION THREE QUARTERS
0x00bf: 0x00a8, # INVERTED QUESTION MARK
0x00c0: 0x00b7, # LATIN CAPITAL LETTER A WITH GRAVE
0x00c1: 0x00b5, # LATIN CAPITAL LETTER A WITH ACUTE
0x00c2: 0x00b6, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00c3: 0x00c7, # LATIN CAPITAL LETTER A WITH TILDE
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x008f, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x0092, # LATIN CAPITAL LIGATURE AE
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c8: 0x00d4, # LATIN CAPITAL LETTER E WITH GRAVE
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00ca: 0x00d2, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00cb: 0x00d3, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00cc: 0x00de, # LATIN CAPITAL LETTER I WITH GRAVE
0x00cd: 0x00d6, # LATIN CAPITAL LETTER I WITH ACUTE
0x00ce: 0x00d7, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00cf: 0x00d8, # LATIN CAPITAL LETTER I WITH DIAERESIS
0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE
0x00d2: 0x00e3, # LATIN CAPITAL LETTER O WITH GRAVE
0x00d3: 0x00e0, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d4: 0x00e2, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00d5: 0x00e5, # LATIN CAPITAL LETTER O WITH TILDE
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d7: 0x00e8, # MULTIPLICATION SIGN
0x00d8: 0x009d, # LATIN CAPITAL LETTER O WITH STROKE
0x00d9: 0x00eb, # LATIN CAPITAL LETTER U WITH GRAVE
0x00da: 0x00e9, # LATIN CAPITAL LETTER U WITH ACUTE
0x00db: 0x00ea, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e0: 0x0085, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e3: 0x00c6, # LATIN SMALL LETTER A WITH TILDE
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x0086, # LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x0091, # LATIN SMALL LIGATURE AE
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008a, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0088, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ec: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ef: 0x008b, # LATIN SMALL LETTER I WITH DIAERESIS
0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE
0x00f2: 0x0095, # LATIN SMALL LETTER O WITH GRAVE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f5: 0x00e4, # LATIN SMALL LETTER O WITH TILDE
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00f8: 0x009b, # LATIN SMALL LETTER O WITH STROKE
0x00f9: 0x0097, # LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fb: 0x0096, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00ff: 0x00ed, # LATIN SMALL LETTER Y WITH DIAERESIS
0x011e: 0x00a6, # LATIN CAPITAL LETTER G WITH BREVE
0x011f: 0x00a7, # LATIN SMALL LETTER G WITH BREVE
0x0130: 0x0098, # LATIN CAPITAL LETTER I WITH DOT ABOVE
0x0131: 0x008d, # LATIN SMALL LETTER DOTLESS I
0x015e: 0x009e, # LATIN CAPITAL LETTER S WITH CEDILLA
0x015f: 0x009f, # LATIN SMALL LETTER S WITH CEDILLA
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| gpl-3.0 |
sebmolinari/los-kpos | app/mod_auth/controllers.py | 1 | 4029 | #Flask imports
from flask import Blueprint, render_template, flash, redirect, url_for, abort
from flask.ext.login import LoginManager, login_required, logout_user, login_user
#App imports
from app import app
from app.mod_auth.forms import LoginForm, UserForm, EmailForm, PasswordForm
from app.mod_auth.models import User
from utils import ts, send_email
lm = LoginManager()
lm.init_app(app)
lm.login_view = "auth.user_login"
mod_auth = Blueprint('auth', __name__, url_prefix='/user',
template_folder='templates')
@lm.user_loader
def user_load(id):
return User.get_by_id(int(id))
@mod_auth.route('/login/', methods=['GET', 'POST'])
def user_login():
#special case if database is empty we should create an user
if len(User.get_all()) == 0:
return redirect(url_for('auth.user_create'))
form = LoginForm()
if form.validate_on_submit():
user = User.get_by_email(email=form.email.data)
if user and User.check_password(user.password, form.password.data):
login_user(user)
return redirect(url_for('index'))
flash('Wrong email or password')
return render_template("login.html", form=form)
@mod_auth.route('/logout/')
@login_required
def user_logout():
logout_user()
return redirect(url_for('index'))
@mod_auth.route('/create', methods=["GET", "POST"])
def user_create():
form = UserForm()
if form.validate_on_submit():
user = User()
user.name = form.name.data
user.email = form.email.data
user.password = form.password.data
user.is_admin = form.is_admin.data
#TODO Fix possible duplicated keys!
User.save(user)
# Now we'll send the email confirmation link
subject = "Confirm your email"
token = ts.dumps(user.email, salt='email-confirm-key')
confirm_url = url_for(
'auth.user_confirm_email',
token=token,
_external=True)
html = render_template(
'activate.html',
confirm_url=confirm_url)
# We'll assume that send_email has been defined in myapp/util.py
app.logger.info('Url use to confirm: {0}'.format(confirm_url))
send_email(user.email, subject, html)
return redirect(url_for("index"))
return render_template("create.html", form=form)
@mod_auth.route('/confirm/<token>')
def user_confirm_email(token):
try:
email = ts.loads(token, salt="email-confirm-key", max_age=86400)
except:
abort(404)
user = User.get_by_email(email=email)
user.email_confirmed = True
User.save(user)
return redirect(url_for('auth.user_login'))
@mod_auth.route('/reset', methods=["GET", "POST"])
def user_password_reset():
form = EmailForm()
if form.validate_on_submit():
user = User.get_by_email(email=form.email.data)
subject = "Password reset requested"
# Here we use the URLSafeTimedSerializer we created in `util` at the
# beginning of the chapter
token = ts.dumps(user.email, salt='recover-key')
recover_url = url_for(
'auth.user_reset_password_with_token',
token=token,
_external=True)
html = render_template(
'recover.html',
recover_url=recover_url)
# Let's assume that send_email was defined in myapp/util.py
send_email(user.email, subject, html)
return redirect(url_for('index'))
return render_template('reset.html', form=form)
@mod_auth.route('/reset/<token>', methods=["GET", "POST"])
def user_reset_password_with_token(token):
try:
email = ts.loads(token, salt="recover-key", max_age=86400)
except:
abort(404)
form = PasswordForm()
if form.validate_on_submit():
user = User.get_by_email(email=email)
user.password = form.password.data
User.save(user)
return redirect(url_for('auth.user_login'))
return render_template('reset_with_token.html', form=form, token=token)
| gpl-3.0 |
SRI-CSL/ETB | demos/allsatlive/yices_parse.py | 1 | 2665 | #Defines grammar for reading yices files; used in the include <file> api for yices.
from pyparsing import *
#Grammar for s-expressions which is used to parse Yices expressions
token = Word(alphanums + "-./_:*+=!<>")
LPAR = "("
RPAR = ")"
#Yices comments are ignored; parentheses are retained since Yices expressions are printed back
#as strings for the Yices api
lispStyleComment = Group(";" + restOfLine)
sexp = Forward()
sexpList = ZeroOrMore(sexp)
sexpList.ignore(lispStyleComment)
sexpGroup = Group(LPAR + sexpList + RPAR)
sexp << (token | sexpGroup)
#Grammar for Yices commands
#_LPAR = Suppress(LPAR)
#_RPAR = Suppress(RPAR)
#The command names are enumerated
yDefine = Literal("define")
yAssert = Literal("assert")
yAssertPlus = Literal("assert+")
yRetract = Literal("retract")
yCheck = Literal("check")
yMaxSat = Literal("maxsat")
ySetEvidence = Literal("set-evidence!")
ySetVerbosity = Literal("set-verbosity")
ySetArithOnly = Literal("set-arith-only")
yPush = Literal("push")
yPop = Literal("pop")
yEcho = Literal("echo")
yReset = Literal("reset")
yCommandName = yDefine + yAssert + yAssertPlus + yRetract + yCheck + yMaxSat + ySetEvidence + ySetVerbosity + ySetArithOnly + yPush + yPop + yEcho + yReset
#name is word without colons
name = Word(alphanums + "-./_*+=!<>")
colons = Suppress("::")
#Define commands are treated differently since we have to parse out the '::'
yDefineCommand = Group(yDefine + name + colons + sexp + sexpList)
yOtherCommandName = yAssert | yAssertPlus | yRetract | yCheck | yMaxSat | ySetEvidence | ySetVerbosity | ySetArithOnly | yPush | yPop | yEcho | yReset
yOtherCommand = Group(yOtherCommandName + sexpList)
yCommandBody = yDefineCommand | yOtherCommand
yCommand = Group(LPAR + yCommandBody + RPAR)
yCommandList = ZeroOrMore(yCommand)
yCommandList.ignore(lispStyleComment)
# no longer used: defineName = Group(name + colons + sexp + sexpList)
lparPrint = " ("
rparPrint = ") "
def printSexp(parsedSexp):
if parsedSexp == LPAR:
return lparPrint
elif parsedSexp == RPAR:
return rparPrint
elif type(parsedSexp) == str:
return parsedSexp
elif parsedSexp == []:
return ''
else:
print(parsedSexp)
first = printSexp(parsedSexp[0])
rest = printSexp(parsedSexp[1:])
print('first = %s' % first)
print('rest = %s' % rest)
if (first == lparPrint) or (first == rparPrint) or (rest == rparPrint):
return '%s%s' % (first, rest)
else:
return '%s %s' % (first, rest)
test1 = """(define a::bool)"""
test2 = """(define b ::bool)"""
test3 = """(define c :: bool)"""
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.