repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
bud4/samba | python/samba/tests/registry.py | 1 | 2444 | # Unix SMB/CIFS implementation.
# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2007
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Tests for samba.registry."""
import os
from samba import registry
import samba.tests
from samba import WERRORError
from subprocess import Popen, PIPE
class HelperTests(samba.tests.TestCase):
def test_predef_to_name(self):
self.assertEquals("HKEY_LOCAL_MACHINE",
registry.get_predef_name(0x80000002))
def test_str_regtype(self):
self.assertEquals("REG_DWORD", registry.str_regtype(4))
class HiveTests(samba.tests.TestCaseInTempDir):
def setUp(self):
super(HiveTests, self).setUp()
self.hive_path = os.path.join(self.tempdir, "ldb_new.ldb")
self.hive = registry.open_ldb(self.hive_path)
def tearDown(self):
del self.hive
os.unlink(self.hive_path)
super(HiveTests, self).tearDown()
def test_ldb_new(self):
self.assertTrue(self.hive is not None)
def test_set_value(self):
self.assertIsNone(self.hive.set_value('foo1', 1, 'bar1'))
def test_flush(self):
self.assertIsNone(self.hive.set_value('foo2', 1, 'bar2'))
self.assertIsNone(self.hive.flush())
proc = Popen(['bin/tdbdump', self.hive_path], stdout=PIPE, stderr=PIPE)
tdb_dump, err = proc.communicate()
self.assertTrue(b'DN=VALUE=FOO2,HIVE=NONE' in tdb_dump)
def test_del_value(self):
self.assertIsNone(self.hive.set_value('foo3', 1, 'bar3'))
self.assertIsNone(self.hive.del_value('foo3'))
def test_del_nonexisting_value(self):
self.assertRaises(WERRORError, self.hive.del_value, 'foo4')
class RegistryTests(samba.tests.TestCase):
def test_new(self):
self.registry = registry.Registry()
self.assertIsNotNone(self.registry)
| gpl-3.0 |
elijah513/fabric | fabric/network.py | 19 | 25040 | """
Classes and subroutines dealing with network connections and related topics.
"""
from __future__ import with_statement
from functools import wraps
import getpass
import os
import re
import time
import socket
import sys
from StringIO import StringIO
from fabric.auth import get_password, set_password
from fabric.utils import abort, handle_prompt_abort, warn
from fabric.exceptions import NetworkError
try:
import warnings
warnings.simplefilter('ignore', DeprecationWarning)
import paramiko as ssh
except ImportError, e:
import traceback
traceback.print_exc()
msg = """
There was a problem importing our SSH library (see traceback above).
Please make sure all dependencies are installed and importable.
""".rstrip()
sys.stderr.write(msg + '\n')
sys.exit(1)
ipv6_regex = re.compile(
'^\[?(?P<host>[0-9A-Fa-f:]+(?:%[a-z]+\d+)?)\]?(:(?P<port>\d+))?$')
def direct_tcpip(client, host, port):
return client.get_transport().open_channel(
'direct-tcpip',
(host, int(port)),
('', 0)
)
def is_key_load_error(e):
return (
e.__class__ is ssh.SSHException
and 'Unable to parse key file' in str(e)
)
def _tried_enough(tries):
from fabric.state import env
return tries >= env.connection_attempts
def get_gateway(host, port, cache, replace=False):
"""
Create and return a gateway socket, if one is needed.
This function checks ``env`` for gateway or proxy-command settings and
returns the necessary socket-like object for use by a final host
connection.
:param host:
Hostname of target server.
:param port:
Port to connect to on target server.
:param cache:
A ``HostConnectionCache`` object, in which gateway ``SSHClient``
objects are to be retrieved/cached.
:param replace:
Whether to forcibly replace a cached gateway client object.
:returns:
A ``socket.socket``-like object, or ``None`` if none was created.
"""
from fabric.state import env, output
sock = None
proxy_command = ssh_config().get('proxycommand', None)
if env.gateway:
gateway = normalize_to_string(env.gateway)
# ensure initial gateway connection
if replace or gateway not in cache:
if output.debug:
print "Creating new gateway connection to %r" % gateway
cache[gateway] = connect(*normalize(gateway) + (cache, False))
# now we should have an open gw connection and can ask it for a
# direct-tcpip channel to the real target. (bypass cache's own
# __getitem__ override to avoid hilarity - this is usually called
# within that method.)
sock = direct_tcpip(dict.__getitem__(cache, gateway), host, port)
elif proxy_command:
sock = ssh.ProxyCommand(proxy_command)
return sock
class HostConnectionCache(dict):
"""
Dict subclass allowing for caching of host connections/clients.
This subclass will intelligently create new client connections when keys
are requested, or return previously created connections instead.
It also handles creating new socket-like objects when required to implement
gateway connections and `ProxyCommand`, and handing them to the inner
connection methods.
Key values are the same as host specifiers throughout Fabric: optional
username + ``@``, mandatory hostname, optional ``:`` + port number.
Examples:
* ``example.com`` - typical Internet host address.
* ``firewall`` - atypical, but still legal, local host address.
* ``user@example.com`` - with specific username attached.
* ``bob@smith.org:222`` - with specific nonstandard port attached.
When the username is not given, ``env.user`` is used. ``env.user``
defaults to the currently running user at startup but may be overwritten by
user code or by specifying a command-line flag.
Note that differing explicit usernames for the same hostname will result in
multiple client connections being made. For example, specifying
``user1@example.com`` will create a connection to ``example.com``, logged
in as ``user1``; later specifying ``user2@example.com`` will create a new,
2nd connection as ``user2``.
The same applies to ports: specifying two different ports will result in
two different connections to the same host being made. If no port is given,
22 is assumed, so ``example.com`` is equivalent to ``example.com:22``.
"""
def connect(self, key):
"""
Force a new connection to ``key`` host string.
"""
from fabric.state import env
user, host, port = normalize(key)
key = normalize_to_string(key)
seek_gateway = True
# break the loop when the host is gateway itself
if env.gateway:
seek_gateway = normalize_to_string(env.gateway) != key
self[key] = connect(
user, host, port, cache=self, seek_gateway=seek_gateway)
def __getitem__(self, key):
"""
Autoconnect + return connection object
"""
key = normalize_to_string(key)
if key not in self:
self.connect(key)
return dict.__getitem__(self, key)
#
# Dict overrides that normalize input keys
#
def __setitem__(self, key, value):
return dict.__setitem__(self, normalize_to_string(key), value)
def __delitem__(self, key):
return dict.__delitem__(self, normalize_to_string(key))
def __contains__(self, key):
return dict.__contains__(self, normalize_to_string(key))
def ssh_config(host_string=None):
"""
Return ssh configuration dict for current env.host_string host value.
Memoizes the loaded SSH config file, but not the specific per-host results.
This function performs the necessary "is SSH config enabled?" checks and
will simply return an empty dict if not. If SSH config *is* enabled and the
value of env.ssh_config_path is not a valid file, it will abort.
May give an explicit host string as ``host_string``.
"""
from fabric.state import env
dummy = {}
if not env.use_ssh_config:
return dummy
if '_ssh_config' not in env:
try:
conf = ssh.SSHConfig()
path = os.path.expanduser(env.ssh_config_path)
with open(path) as fd:
conf.parse(fd)
env._ssh_config = conf
except IOError:
warn("Unable to load SSH config file '%s'" % path)
return dummy
host = parse_host_string(host_string or env.host_string)['host']
return env._ssh_config.lookup(host)
def key_filenames():
"""
Returns list of SSH key filenames for the current env.host_string.
Takes into account ssh_config and env.key_filename, including normalization
to a list. Also performs ``os.path.expanduser`` expansion on any key
filenames.
"""
from fabric.state import env
keys = env.key_filename
# For ease of use, coerce stringish key filename into list
if isinstance(env.key_filename, basestring) or env.key_filename is None:
keys = [keys]
# Strip out any empty strings (such as the default value...meh)
keys = filter(bool, keys)
# Honor SSH config
conf = ssh_config()
if 'identityfile' in conf:
# Assume a list here as we require Paramiko 1.10+
keys.extend(conf['identityfile'])
return map(os.path.expanduser, keys)
def key_from_env(passphrase=None):
"""
Returns a paramiko-ready key from a text string of a private key
"""
from fabric.state import env, output
if 'key' in env:
if output.debug:
# NOTE: this may not be the most secure thing; OTOH anybody running
# the process must by definition have access to the key value,
# so only serious problem is if they're logging the output.
sys.stderr.write("Trying to honor in-memory key %r\n" % env.key)
for pkey_class in (ssh.rsakey.RSAKey, ssh.dsskey.DSSKey):
if output.debug:
sys.stderr.write("Trying to load it as %s\n" % pkey_class)
try:
return pkey_class.from_private_key(StringIO(env.key), passphrase)
except Exception, e:
# File is valid key, but is encrypted: raise it, this will
# cause cxn loop to prompt for passphrase & retry
if 'Private key file is encrypted' in e:
raise
# Otherwise, it probably means it wasn't a valid key of this
# type, so try the next one.
else:
pass
def parse_host_string(host_string):
# Split host_string to user (optional) and host/port
user_hostport = host_string.rsplit('@', 1)
hostport = user_hostport.pop()
user = user_hostport[0] if user_hostport and user_hostport[0] else None
# Split host/port string to host and optional port
# For IPv6 addresses square brackets are mandatory for host/port separation
if hostport.count(':') > 1:
# Looks like IPv6 address
r = ipv6_regex.match(hostport).groupdict()
host = r['host'] or None
port = r['port'] or None
else:
# Hostname or IPv4 address
host_port = hostport.rsplit(':', 1)
host = host_port.pop(0) or None
port = host_port[0] if host_port and host_port[0] else None
return {'user': user, 'host': host, 'port': port}
def normalize(host_string, omit_port=False):
"""
Normalizes a given host string, returning explicit host, user, port.
If ``omit_port`` is given and is True, only the host and user are returned.
This function will process SSH config files if Fabric is configured to do
so, and will use them to fill in some default values or swap in hostname
aliases.
"""
from fabric.state import env
# Gracefully handle "empty" input by returning empty output
if not host_string:
return ('', '') if omit_port else ('', '', '')
# Parse host string (need this early on to look up host-specific ssh_config
# values)
r = parse_host_string(host_string)
host = r['host']
# Env values (using defaults if somehow earlier defaults were replaced with
# empty values)
user = env.user or env.local_user
port = env.port or env.default_port
# SSH config data
conf = ssh_config(host_string)
# Only use ssh_config values if the env value appears unmodified from
# the true defaults. If the user has tweaked them, that new value
# takes precedence.
if user == env.local_user and 'user' in conf:
user = conf['user']
if port == env.default_port and 'port' in conf:
port = conf['port']
# Also override host if needed
if 'hostname' in conf:
host = conf['hostname']
# Merge explicit user/port values with the env/ssh_config derived ones
# (Host is already done at this point.)
user = r['user'] or user
port = r['port'] or port
if omit_port:
return user, host
return user, host, port
def to_dict(host_string):
user, host, port = normalize(host_string)
return {
'user': user, 'host': host, 'port': port, 'host_string': host_string
}
def from_dict(arg):
return join_host_strings(arg['user'], arg['host'], arg['port'])
def denormalize(host_string):
"""
Strips out default values for the given host string.
If the user part is the default user, it is removed;
if the port is port 22, it also is removed.
"""
from fabric.state import env
r = parse_host_string(host_string)
user = ''
if r['user'] is not None and r['user'] != env.user:
user = r['user'] + '@'
port = ''
if r['port'] is not None and r['port'] != '22':
port = ':' + r['port']
host = r['host']
host = '[%s]' % host if port and host.count(':') > 1 else host
return user + host + port
def join_host_strings(user, host, port=None):
"""
Turns user/host/port strings into ``user@host:port`` combined string.
This function is not responsible for handling missing user/port strings;
for that, see the ``normalize`` function.
If ``host`` looks like IPv6 address, it will be enclosed in square brackets
If ``port`` is omitted, the returned string will be of the form
``user@host``.
"""
if port:
# Square brackets are necessary for IPv6 host/port separation
template = "%s@[%s]:%s" if host.count(':') > 1 else "%s@%s:%s"
return template % (user, host, port)
else:
return "%s@%s" % (user, host)
def normalize_to_string(host_string):
"""
normalize() returns a tuple; this returns another valid host string.
"""
return join_host_strings(*normalize(host_string))
def connect(user, host, port, cache, seek_gateway=True):
"""
Create and return a new SSHClient instance connected to given host.
:param user: Username to connect as.
:param host: Network hostname.
:param port: SSH daemon port.
:param cache:
A ``HostConnectionCache`` instance used to cache/store gateway hosts
when gatewaying is enabled.
:param seek_gateway:
Whether to try setting up a gateway socket for this connection. Used so
the actual gateway connection can prevent recursion.
"""
from state import env, output
#
# Initialization
#
# Init client
client = ssh.SSHClient()
# Load system hosts file (e.g. /etc/ssh/ssh_known_hosts)
known_hosts = env.get('system_known_hosts')
if known_hosts:
client.load_system_host_keys(known_hosts)
# Load known host keys (e.g. ~/.ssh/known_hosts) unless user says not to.
if not env.disable_known_hosts:
client.load_system_host_keys()
# Unless user specified not to, accept/add new, unknown host keys
if not env.reject_unknown_hosts:
client.set_missing_host_key_policy(ssh.AutoAddPolicy())
#
# Connection attempt loop
#
# Initialize loop variables
connected = False
password = get_password(user, host, port)
tries = 0
sock = None
# Loop until successful connect (keep prompting for new password)
while not connected:
# Attempt connection
try:
tries += 1
# (Re)connect gateway socket, if needed.
# Nuke cached client object if not on initial try.
if seek_gateway:
sock = get_gateway(host, port, cache, replace=tries > 0)
# Ready to connect
client.connect(
hostname=host,
port=int(port),
username=user,
password=password,
pkey=key_from_env(password),
key_filename=key_filenames(),
timeout=env.timeout,
allow_agent=not env.no_agent,
look_for_keys=not env.no_keys,
sock=sock
)
connected = True
# set a keepalive if desired
if env.keepalive:
client.get_transport().set_keepalive(env.keepalive)
return client
# BadHostKeyException corresponds to key mismatch, i.e. what on the
# command line results in the big banner error about man-in-the-middle
# attacks.
except ssh.BadHostKeyException, e:
raise NetworkError("Host key for %s did not match pre-existing key! Server's key was changed recently, or possible man-in-the-middle attack." % host, e)
# Prompt for new password to try on auth failure
except (
ssh.AuthenticationException,
ssh.PasswordRequiredException,
ssh.SSHException
), e:
msg = str(e)
# If we get SSHExceptionError and the exception message indicates
# SSH protocol banner read failures, assume it's caused by the
# server load and try again.
if e.__class__ is ssh.SSHException \
and msg == 'Error reading SSH protocol banner':
if _tried_enough(tries):
raise NetworkError(msg, e)
continue
# For whatever reason, empty password + no ssh key or agent
# results in an SSHException instead of an
# AuthenticationException. Since it's difficult to do
# otherwise, we must assume empty password + SSHException ==
# auth exception.
#
# Conversely: if we get SSHException and there
# *was* a password -- it is probably something non auth
# related, and should be sent upwards. (This is not true if the
# exception message does indicate key parse problems.)
#
# This also holds true for rejected/unknown host keys: we have to
# guess based on other heuristics.
if e.__class__ is ssh.SSHException \
and (password or msg.startswith('Unknown server')) \
and not is_key_load_error(e):
raise NetworkError(msg, e)
# Otherwise, assume an auth exception, and prompt for new/better
# password.
# Paramiko doesn't handle prompting for locked private
# keys (i.e. keys with a passphrase and not loaded into an agent)
# so we have to detect this and tweak our prompt slightly.
# (Otherwise, however, the logic flow is the same, because
# ssh's connect() method overrides the password argument to be
# either the login password OR the private key passphrase. Meh.)
#
# NOTE: This will come up if you normally use a
# passphrase-protected private key with ssh-agent, and enter an
# incorrect remote username, because ssh.connect:
# * Tries the agent first, which will fail as you gave the wrong
# username, so obviously any loaded keys aren't gonna work for a
# nonexistent remote account;
# * Then tries the on-disk key file, which is passphrased;
# * Realizes there's no password to try unlocking that key with,
# because you didn't enter a password, because you're using
# ssh-agent;
# * In this condition (trying a key file, password is None)
# ssh raises PasswordRequiredException.
text = None
if e.__class__ is ssh.PasswordRequiredException \
or is_key_load_error(e):
# NOTE: we can't easily say WHICH key's passphrase is needed,
# because ssh doesn't provide us with that info, and
# env.key_filename may be a list of keys, so we can't know
# which one raised the exception. Best not to try.
prompt = "[%s] Passphrase for private key"
text = prompt % env.host_string
password = prompt_for_password(text)
# Update env.password, env.passwords if empty
set_password(user, host, port, password)
# Ctrl-D / Ctrl-C for exit
# TODO: this may no longer actually serve its original purpose and may
# also hide TypeErrors from paramiko. Double check in v2.
except (EOFError, TypeError):
# Print a newline (in case user was sitting at prompt)
print('')
sys.exit(0)
# Handle DNS error / name lookup failure
except socket.gaierror, e:
raise NetworkError('Name lookup failed for %s' % host, e)
# Handle timeouts and retries, including generic errors
# NOTE: In 2.6, socket.error subclasses IOError
except socket.error, e:
not_timeout = type(e) is not socket.timeout
giving_up = _tried_enough(tries)
# Baseline error msg for when debug is off
msg = "Timed out trying to connect to %s" % host
# Expanded for debug on
err = msg + " (attempt %s of %s)" % (tries, env.connection_attempts)
if giving_up:
err += ", giving up"
err += ")"
# Debuggin'
if output.debug:
sys.stderr.write(err + '\n')
# Having said our piece, try again
if not giving_up:
# Sleep if it wasn't a timeout, so we still get timeout-like
# behavior
if not_timeout:
time.sleep(env.timeout)
continue
# Override eror msg if we were retrying other errors
if not_timeout:
msg = "Low level socket error connecting to host %s on port %s: %s" % (
host, port, e[1]
)
# Here, all attempts failed. Tweak error msg to show # tries.
# TODO: find good humanization module, jeez
s = "s" if env.connection_attempts > 1 else ""
msg += " (tried %s time%s)" % (env.connection_attempts, s)
raise NetworkError(msg, e)
# Ensure that if we terminated without connecting and we were given an
# explicit socket, close it out.
finally:
if not connected and sock is not None:
sock.close()
def _password_prompt(prompt, stream):
# NOTE: Using encode-to-ascii to prevent (Windows, at least) getpass from
# choking if given Unicode.
return getpass.getpass(prompt.encode('ascii', 'ignore'), stream)
def prompt_for_password(prompt=None, no_colon=False, stream=None):
"""
Prompts for and returns a new password if required; otherwise, returns
None.
A trailing colon is appended unless ``no_colon`` is True.
If the user supplies an empty password, the user will be re-prompted until
they enter a non-empty password.
``prompt_for_password`` autogenerates the user prompt based on the current
host being connected to. To override this, specify a string value for
``prompt``.
``stream`` is the stream the prompt will be printed to; if not given,
defaults to ``sys.stderr``.
"""
from fabric.state import env
handle_prompt_abort("a connection or sudo password")
stream = stream or sys.stderr
# Construct prompt
default = "[%s] Login password for '%s'" % (env.host_string, env.user)
password_prompt = prompt if (prompt is not None) else default
if not no_colon:
password_prompt += ": "
# Get new password value
new_password = _password_prompt(password_prompt, stream)
# Otherwise, loop until user gives us a non-empty password (to prevent
# returning the empty string, and to avoid unnecessary network overhead.)
while not new_password:
print("Sorry, you can't enter an empty password. Please try again.")
new_password = _password_prompt(password_prompt, stream)
return new_password
def needs_host(func):
"""
Prompt user for value of ``env.host_string`` when ``env.host_string`` is
empty.
This decorator is basically a safety net for silly users who forgot to
specify the host/host list in one way or another. It should be used to wrap
operations which require a network connection.
Due to how we execute commands per-host in ``main()``, it's not possible to
specify multiple hosts at this point in time, so only a single host will be
prompted for.
Because this decorator sets ``env.host_string``, it will prompt once (and
only once) per command. As ``main()`` clears ``env.host_string`` between
commands, this decorator will also end up prompting the user once per
command (in the case where multiple commands have no hosts set, of course.)
"""
from fabric.state import env
@wraps(func)
def host_prompting_wrapper(*args, **kwargs):
while not env.get('host_string', False):
handle_prompt_abort("the target host connection string")
host_string = raw_input("No hosts found. Please specify (single)"
" host string for connection: ")
env.update(to_dict(host_string))
return func(*args, **kwargs)
host_prompting_wrapper.undecorated = func
return host_prompting_wrapper
def disconnect_all():
"""
Disconnect from all currently connected servers.
Used at the end of ``fab``'s main loop, and also intended for use by
library users.
"""
from fabric.state import connections, output
# Explicitly disconnect from all servers
for key in connections.keys():
if output.status:
# Here we can't use the py3k print(x, end=" ")
# because 2.5 backwards compatibility
sys.stdout.write("Disconnecting from %s... " % denormalize(key))
connections[key].close()
del connections[key]
if output.status:
sys.stdout.write("done.\n")
| bsd-2-clause |
mdxy2010/forlinux-ok6410 | u-boot15/tools/patman/get_maintainer.py | 57 | 1238 | # Copyright (c) 2012 The Chromium OS Authors.
#
# SPDX-License-Identifier: GPL-2.0+
#
import command
import gitutil
import os
def FindGetMaintainer():
"""Look for the get_maintainer.pl script.
Returns:
If the script is found we'll return a path to it; else None.
"""
try_list = [
os.path.join(gitutil.GetTopLevel(), 'scripts'),
]
# Look in the list
for path in try_list:
fname = os.path.join(path, 'get_maintainer.pl')
if os.path.isfile(fname):
return fname
return None
def GetMaintainer(fname, verbose=False):
"""Run get_maintainer.pl on a file if we find it.
We look for get_maintainer.pl in the 'scripts' directory at the top of
git. If we find it we'll run it. If we don't find get_maintainer.pl
then we fail silently.
Args:
fname: Path to the patch file to run get_maintainer.pl on.
Returns:
A list of email addresses to CC to.
"""
get_maintainer = FindGetMaintainer()
if not get_maintainer:
if verbose:
print "WARNING: Couldn't find get_maintainer.pl"
return []
stdout = command.Output(get_maintainer, '--norolestats', fname)
return stdout.splitlines()
| gpl-2.0 |
cjgrady/compression | poc/src/scripts/addFileSizes.py | 1 | 1109 | """
@summary: Adds file sizes to csv file
"""
import csv
import os
inCsvFn = '/data/geo716/final project/data/projections2.csv'
outCsvFn = '/data/geo716/final project/data/projections.csv'
binDir = '/data/geo716/final project/data/bins/'
rleDir = '/data/geo716/final project/data/rles/'
hilbertDir = '/data/geo716/final project/data/hilberts/'
with open(inCsvFn) as csvIn:
with open(outCsvFn, 'w') as csvOut:
reader = csv.reader(csvIn)
writer = csv.writer(csvOut)
headers = reader.next()
headers.append('Binary size')
headers.append('RLE size')
headers.append('Hilbert size')
all = []
all.append(headers)
i = 0
for row in reader:
binFn = os.path.join(binDir, '%s.bin' % i)
rleFn = os.path.join(rleDir, '%s.rle' % i)
hilbertFn = os.path.join(hilbertDir, '%s.hilb' % i)
row.append(os.path.getsize(binFn))
row.append(os.path.getsize(rleFn))
row.append(os.path.getsize(hilbertFn))
all.append(row)
i += 1
writer.writerows(all)
| gpl-2.0 |
amwelch/a10sdk-python | a10sdk/core/gslb/gslb_policy.py | 2 | 8859 | from a10sdk.common.A10BaseClass import A10BaseClass
class Policy(A10BaseClass):
"""Class Description::
Policy for GSLB zone, service or geo-location.
Class policy supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param weighted_ip_enable: {"default": 0, "optional": true, "type": "number", "description": "Enable Select Service-IP by weighted preference", "format": "flag"}
:param alias_admin_preference: {"default": 0, "optional": true, "type": "number", "description": "Select alias name having maximum admin preference", "format": "flag"}
:param admin_ip_top_only: {"description": "Return highest priority server only", "format": "flag", "default": 0, "optional": true, "not": "ordered-ip-top-only", "type": "number"}
:param least_response: {"default": 0, "optional": true, "type": "number", "description": "Least response selection", "format": "flag"}
:param bw_cost_fail_break: {"default": 0, "optional": true, "type": "number", "description": "Break when exceed limit", "format": "flag"}
:param metric_fail_break: {"default": 0, "optional": true, "type": "number", "description": "Break if no valid Service-IP", "format": "flag"}
:param weighted_ip: {"default": 0, "optional": true, "type": "number", "description": "Select Service-IP by weighted preference", "format": "flag"}
:param round_robin: {"default": 1, "optional": true, "type": "number", "description": "Round robin selection, enabled by default", "format": "flag"}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param metric_type: {"optional": true, "enum": ["health-check", "weighted-ip", "weighted-site", "capacity", "active-servers", "active-rdt", "geographic", "connection-load", "num-session", "admin-preference", "bw-cost", "least-response", "admin-ip"], "type": "string", "format": "enum-list"}
:param num_session_tolerance: {"description": "The difference between the available sessions, default is 10 (Tolerance)", "format": "number", "default": 10, "optional": true, "maximum": 100, "minimum": 0, "type": "number"}
:param metric_order: {"default": 0, "optional": true, "type": "number", "description": "Specify order of metric", "format": "flag"}
:param weighted_ip_total_hits: {"default": 0, "optional": true, "type": "number", "description": "Weighted by total hits", "format": "flag"}
:param weighted_site_total_hits: {"default": 0, "optional": true, "type": "number", "description": "Weighted by total hits", "format": "flag"}
:param ordered_ip_top_only: {"description": "Return highest priority server only", "format": "flag", "default": 0, "optional": true, "not": "admin-ip-top-only", "type": "number"}
:param weighted_site_enable: {"default": 0, "optional": true, "type": "number", "description": "Enable Select Service-IP by weighted site preference", "format": "flag"}
:param bw_cost: {"default": 0, "optional": true, "type": "number", "description": "Select site with minimum bandwidth cost", "format": "flag"}
:param metric_force_check: {"default": 0, "optional": true, "type": "number", "description": "Always check Service-IP for all enabled metrics", "format": "flag"}
:param admin_ip_enable: {"default": 0, "optional": true, "type": "number", "description": "Enable admin ip", "format": "flag"}
:param geo_location_list: {"minItems": 1, "items": {"type": "geo-location"}, "uniqueItems": true, "array": [{"required": ["name"], "properties": {"ip-multiple-fields": {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"ip-addr2-sub": {"type": "string", "description": "Specify IP address range", "format": "ipv4-address"}, "optional": true, "ip-sub": {"type": "string", "description": "Specify IP information", "format": "ipv4-address"}, "ip-mask-sub": {"type": "string", "description": "Specify IP/mask format (Specify IP address mask)", "format": "ipv4-netmask-brief"}}}]}, "name": {"description": "Specify geo-location name, section range is (1-15)", "format": "string", "minLength": 1, "optional": false, "maxLength": 127, "type": "string"}, "ipv6-multiple-fields": {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"ipv6-mask-sub": {"description": "Specify IPv6/mask format (Specify IP address mask)", "minimum": 0, "type": "number", "maximum": 128, "format": "number"}, "ipv6-sub": {"type": "string", "description": "Specify IPv6 information", "format": "ipv6-address"}, "optional": true, "ipv6-addr2-sub": {"type": "string", "description": "Specify IPv6 address range", "format": "ipv6-address"}}}]}, "uuid": {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}}}], "type": "array", "$ref": "/axapi/v3/gslb/policy/{name}/geo-location/{name}"}
:param weighted_alias: {"default": 0, "optional": true, "type": "number", "description": "Select alias name by weighted preference", "format": "flag"}
:param bw_cost_enable: {"default": 0, "optional": true, "type": "number", "description": "Enable bw cost", "format": "flag"}
:param num_session_enable: {"default": 0, "optional": true, "type": "number", "description": "Enable Select Service-IP for device having maximum number of available sessions", "format": "flag"}
:param name: {"description": "Specify policy name", "format": "string", "default": "default", "minLength": 1, "optional": false, "maxLength": 63, "type": "string"}
:param active_servers_enable: {"default": 0, "optional": true, "type": "number", "description": "Enable Select Service-IP with the highest number of active servers", "format": "flag"}
:param active_servers_fail_break: {"default": 0, "optional": true, "type": "number", "description": "Break when no active server", "format": "flag"}
:param ip_list: {"description": "Specify IP List (IP List Name)", "format": "string", "minLength": 1, "optional": true, "maxLength": 63, "type": "string", "$ref": "/axapi/v3/gslb/ip-list"}
:param admin_preference: {"default": 0, "optional": true, "type": "number", "description": "Select Service-IP for the device having maximum admin preference", "format": "flag"}
:param weighted_site: {"default": 0, "optional": true, "type": "number", "description": "Select Service-IP by weighted site preference", "format": "flag"}
:param geographic: {"default": 1, "optional": true, "type": "number", "description": "Select Service-IP by geographic", "format": "flag"}
:param health_check: {"default": 1, "optional": true, "type": "number", "description": "Select Service-IP by health status", "format": "flag"}
:param active_servers: {"default": 0, "optional": true, "type": "number", "description": "Select Service-IP with the highest number of active servers", "format": "flag"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/gslb/policy/{name}`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required = [ "name"]
self.b_key = "policy"
self.a10_url="/axapi/v3/gslb/policy/{name}"
self.DeviceProxy = ""
self.weighted_ip_enable = ""
self.alias_admin_preference = ""
self.admin_ip_top_only = ""
self.least_response = ""
self.auto_map = {}
self.bw_cost_fail_break = ""
self.metric_fail_break = ""
self.edns = {}
self.weighted_ip = ""
self.active_rdt = {}
self.round_robin = ""
self.capacity = {}
self.uuid = ""
self.metric_type = ""
self.num_session_tolerance = ""
self.geo_location_match = {}
self.metric_order = ""
self.dns = {}
self.weighted_ip_total_hits = ""
self.weighted_site_total_hits = ""
self.ordered_ip_top_only = ""
self.weighted_site_enable = ""
self.bw_cost = ""
self.metric_force_check = ""
self.admin_ip_enable = ""
self.geo_location_list = []
self.weighted_alias = ""
self.bw_cost_enable = ""
self.num_session_enable = ""
self.name = ""
self.active_servers_enable = ""
self.active_servers_fail_break = ""
self.connection_load = {}
self.ip_list = ""
self.admin_preference = ""
self.weighted_site = ""
self.geographic = ""
self.health_check = ""
self.active_servers = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
| apache-2.0 |
acshi/osf.io | scripts/migration/migrate_registration_extra_again.py | 6 | 4147 | """
Changes existing question.extra on all registrations and draft registrations
to a list. Required for multiple files attached to a question.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as scripts_utils
from website.models import Node, DraftRegistration
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def migrate_extras(queryset, dry=True):
migrated = []
errored = set()
model_name = 'Node'
for obj in queryset:
# 1 transaction per obj, to prevent locking errors
with TokuTransaction():
changed = False
if isinstance(obj, DraftRegistration):
meta = [obj.registration_metadata]
model_name = 'DraftRegistration'
if obj.registered_node: # Skip over drafts that have been completed
continue
else:
meta = obj.registered_meta.values()
model_name = 'Node'
if not meta:
continue
for data in meta:
for question, answer in data.items():
if isinstance(answer.get('extra'), dict):
if not answer.get('extra'):
logger.info('Migrating extra for question {!r} on {} {}'.format(question, model_name, obj._id))
answer['extra'] = []
changed = True
else: # We don't expect to get here
logger.error('Found non-empty "extra" on {} {} for question {!r}'.format(model_name, obj._id, question))
errored.add(obj)
for value in answer.values():
if isinstance(value, dict):
for k, v in value.items():
if isinstance(v, dict) and isinstance(v.get('extra'), dict):
if not v.get('extra'):
logger.info('Migrating {}/extra for question {} on {} {}'.format(k, question, model_name, obj._id))
v['extra'] = []
changed = True
else: # We don't expect to get here
logger.error('Found non-empty "{}/extra" on {} {} for question {}'.format(k, model_name, obj._id, question))
errored.add(obj)
if changed:
migrated.append(obj._id)
if model_name == 'DraftRegistration':
# Prevent datetime_updated from being updated on save
obj._fields['datetime_updated']._auto_now = False
if not dry:
changed = obj.save()
if model_name == 'DraftRegistration':
assert changed == {'registration_metadata'}, 'Expected only registration_metadata to change. Got: {}'.format(changed)
return migrated, errored
def migrate(dry=True):
registrations = Node.find(
Q('is_registration', 'eq', True) &
Q('registered_meta', 'ne', None)
)
regs_migrated, reg_errored = migrate_extras(registrations, dry=dry)
drafts = DraftRegistration.find(Q('registration_metadata', 'ne', {}))
drafts_migrated, drafts_errored = migrate_extras(drafts, dry=dry)
logger.info('Migrated registered_meta for {} registrations'.format(len(regs_migrated)))
if reg_errored:
logger.error('{} errored: {}'.format(len(reg_errored), reg_errored))
logger.info('Migrated registered_meta for {} draft registrations'.format(len(drafts_migrated)))
if drafts_errored:
logger.error('{} errored: {}'.format(len(drafts_errored), drafts_errored))
if __name__ == '__main__':
dry_run = '--dry' in sys.argv
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
migrate(dry=dry_run)
| apache-2.0 |
JohnKendrick/PDielec | PDielec/GUI/ScenarioTab.py | 1 | 32714 | # -*- coding: utf8 -*-
from PyQt5.QtWidgets import QPushButton, QWidget
from PyQt5.QtWidgets import QComboBox, QLabel, QLineEdit, QDoubleSpinBox
from PyQt5.QtWidgets import QVBoxLayout, QHBoxLayout, QFormLayout
from PyQt5.QtWidgets import QSpinBox
from PyQt5.QtCore import Qt
from PDielec.Constants import support_matrix_db
from PDielec.Constants import avogadro_si
from PDielec.Utilities import Debug
class ScenarioTab(QWidget):
def __init__(self, parent, debug=False):
super(QWidget, self).__init__(parent)
global debugger
debugger = Debug(debug,'ScenarioTab:')
self.dirty = True
self.settings = {}
self.notebook = parent
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
matrix = 'ptfe'
self.settings['Matrix'] = matrix
self.settings['Matrix density'] = support_matrix_db[matrix][0]
self.settings['Matrix permittivity'] = support_matrix_db[matrix][1]
self.settings['Bubble radius'] = 30.0
self.settings['Bubble volume fraction'] = 0.0
self.settings['Mass fraction'] = 0.1
self.settings['Volume fraction'] = 0.1
self.settings['Particle size(mu)'] = 0.0001
self.settings['Particle size distribution sigma(mu)'] = 0.0
self.settings['Ellipsoid a/b'] = 1.0
self.settings['Unique direction - h'] = 0
self.settings['Unique direction - k'] = 0
self.settings['Unique direction - l'] = 1
self.settings['Mass or volume fraction'] = 'volume'
self.settings['ATR material refractive index'] = 4.0
self.settings['ATR theta'] = 45.0
self.settings['ATR S polarisation fraction'] = 0.5
# get the reader from the main tab
self.notebook = parent
self.reader = self.notebook.mainTab.reader
self.settings['Effective medium method'] = 'Maxwell-Garnett'
# self.methods = ['Maxwell-Garnett', 'Bruggeman', 'Averaged Permittivity', 'Mie', 'Anisotropic-Mie']
self.methods = ['Maxwell-Garnett', 'Bruggeman', 'Averaged Permittivity', 'Mie']
self.settings['Particle shape'] = 'Sphere'
self.shapes = ['Sphere', 'Needle', 'Plate', 'Ellipsoid']
self.scenarioIndex = None
# Create a scenario tab
vbox = QVBoxLayout()
form = QFormLayout()
#
# Support matrix
#
self.matrix_cb = QComboBox(self)
self.matrix_cb.setToolTip('Define the permittivity and density of the support matrix')
self.matrix_cb.addItems(support_matrix_db)
index = self.matrix_cb.findText(self.settings['Matrix'], Qt.MatchFixedString)
if index >=0:
self.matrix_cb.setCurrentIndex(index)
else:
print('support matrix index was not 0',matrix)
self.matrix_cb.activated.connect(self.on_matrix_cb_activated)
label = QLabel('Support matrix',self)
label.setToolTip('Define the permittivity and density of the support matrix')
form.addRow(label, self.matrix_cb)
#
# Support matrix permittivity
#
self.density_sb = QDoubleSpinBox(self)
self.density_sb.setRange(0.001, 100.0)
self.density_sb.setSingleStep(0.01)
self.density_sb.setDecimals(3)
self.density_sb.setToolTip('Define the support matrix density. \nThis makes changes to the support density and permittivity')
self.density_sb.setValue(self.settings['Matrix density'])
self.density_sb.valueChanged.connect(self.on_density_sb_changed)
label = QLabel('Support density', self)
label.setToolTip('Define the support matrix density. \nThis makes changes to the support density and permittivity')
form.addRow(label, self.density_sb)
#
# Support matrix permittivity
#
self.permittivity_sb = QDoubleSpinBox(self)
self.permittivity_sb.setRange(0.001, 100.0)
self.permittivity_sb.setSingleStep(0.01)
self.permittivity_sb.setDecimals(3)
self.permittivity_sb.setToolTip('Define the support matrix permittivity')
self.permittivity_sb.setValue(self.settings['Matrix permittivity'])
self.permittivity_sb.valueChanged.connect(self.on_permittivity_sb_changed)
label = QLabel('Support permittivity', self)
label.setToolTip('Define the support matrix permittivity')
form.addRow(label, self.permittivity_sb)
#
# Bubble volume fraction
#
self.bubble_vf_sb = QDoubleSpinBox(self)
self.bubble_vf_sb.setRange(0.0, 100.0*(1.0-self.settings['Volume fraction']))
self.bubble_vf_sb.setSingleStep(1.0)
self.bubble_vf_sb.setDecimals(1)
self.bubble_vf_sb.setToolTip('Define the % volume fraction of air bubble inclusions in the matrix')
self.bubble_vf_sb.setValue(100*self.settings['Bubble volume fraction'])
self.bubble_vf_sb.valueChanged.connect(self.on_bubble_vf_sb_changed)
label = QLabel('% Air void volume fraction', self)
label.setToolTip('Define the % volume fraction of air bubble inclusions in the matrix')
form.addRow(label, self.bubble_vf_sb)
#
# Bubble radius in microns
#
self.bubble_radius_sb = QDoubleSpinBox(self)
self.bubble_radius_sb.setRange(0.001, 1000.0)
self.bubble_radius_sb.setSingleStep(1.0)
self.bubble_radius_sb.setDecimals(3)
self.bubble_radius_sb.setToolTip('Define the air bubble radius')
self.bubble_radius_sb.setValue(self.settings['Bubble radius'])
self.bubble_radius_sb.valueChanged.connect(self.on_bubble_radius_sb_changed)
label = QLabel('Air void radius (μm)', self)
label.setToolTip('Define the air void radius')
form.addRow(label, self.bubble_radius_sb)
#
# Mass fraction of dielectric medium
#
self.mf_sb = QDoubleSpinBox(self)
self.mf_sb.setRange(0.000001, 100.0)
self.mf_sb.setSingleStep(0.1)
self.mf_sb.setDecimals(6)
self.mf_sb.setToolTip('The percentage mass fraction of the dielectric medium. \nNote that volume and mass fraction are linked')
self.mf_sb.setValue(100.0*self.settings['Mass fraction'])
self.mf_sb.valueChanged.connect(self.on_mf_sb_changed)
label = QLabel('% Mass fraction of dielectric', self)
label.setToolTip('The percentage mass fraction of the dielectric medium. \nNote that volume and mass fraction are linked')
form.addRow(label, self.mf_sb)
#
# Volume fraction of dielectric medium
#
self.vf_sb = QDoubleSpinBox(self)
self.vf_sb.setRange(0.000001, 100.0*(1.0-self.settings['Bubble volume fraction']))
self.vf_sb.setSingleStep(0.1)
self.vf_sb.setDecimals(6)
self.vf_sb.setToolTip('The percentage volume fraction of the dielectric medium. \nNote that volume and mass fraction are linked')
self.vf_sb.valueChanged.connect(self.on_vf_sb_changed)
self.vf_sb.setValue(100.0*self.settings['Volume fraction'])
label = QLabel('% Volume fraction of dielectric', self)
label.setToolTip('The percentage volume fraction of the dielectric medium. \nNote that volume and mass fraction are linked')
form.addRow(label, self.vf_sb)
#
# Calculation method
#
self.methods_cb = QComboBox(self)
self.methods_cb.setToolTip('Choose the calculation method for the effective medium theory')
self.methods_cb.addItems(self.methods)
index = self.methods_cb.findText(self.settings['Effective medium method'], Qt.MatchFixedString)
if index >=0:
self.methods_cb.setCurrentIndex(index)
else:
print('Method index was not 0',self.settings['Effective medium method'])
self.methods_cb.activated.connect(self.on_methods_cb_activated)
label = QLabel('Method',self)
label.setToolTip('Choose the calculation method for the effective medium theory')
form.addRow(label, self.methods_cb)
#
# Particle size option
#
self.size_sb = QDoubleSpinBox(self)
self.size_sb.setRange(0.000001, 1000.0)
self.size_sb.setSingleStep(0.1)
self.size_sb.setDecimals(6)
self.size_sb.setToolTip('Define the particle radius of the sphere in μm.')
self.size_sb.setValue(self.settings['Particle size(mu)'])
self.size_sb.valueChanged.connect(self.on_size_sb_changed)
label = QLabel('Particle radius (μm)',self)
label.setToolTip('Define the particle radius of the sphere in μm.')
form.addRow(label, self.size_sb)
#
# Particle sigma option
#
self.sigma_sb = QDoubleSpinBox(self)
self.sigma_sb.setRange(0.0, 1000.0)
self.sigma_sb.setSingleStep(0.1)
self.sigma_sb.setDecimals(6)
self.sigma_sb.setToolTip('Define the particle size distribution as a lognormal distribution with the given sigma. \nOnly applicable for the Mie method')
self.sigma_sb.setValue(self.settings['Particle size distribution sigma(mu)'])
self.sigma_sb.valueChanged.connect(self.on_sigma_sb_changed)
label = QLabel('Particle sigma (μm)',self)
label.setToolTip('Define the particle size distribition as a lognormal with the given sigma. \nOnly applicable for the Mie method')
form.addRow(label, self.sigma_sb)
#
# Crystallite shape
#
self.shape_cb = QComboBox(self)
self.shape_cb.setToolTip('Choose a particle shape. \nFor the Mie methods only sphere is allowed. \nFor shapes other than sphere there is a unique direction. \nFor ellipsoidal and needle like this is a direction [abc]. \nFor a plate the perpendicular to a crystal face (hkl) is used to define the unique direction')
self.shape_cb.addItems(self.shapes)
index = self.shape_cb.findText(self.settings['Particle shape'], Qt.MatchFixedString)
if index >=0:
self.shape_cb.setCurrentIndex(index)
else:
print('Method index was not 0',self.settings['Particle shape'])
self.shape_cb.activated.connect(self.on_shape_cb_activated)
label = QLabel('Particle shape',self)
label.setToolTip('Choose a particle shape. \nFor the Mie methods only sphere is allowed. \nFor shapes other than sphere there is a unique direction. \nFor ellipsoidal and needle like this is a direction [abc]. \nFor a plate the perpendicular to a crystal face (hkl) is used to define the unique direction')
form.addRow(label, self.shape_cb)
#
# Particle shape information
# unique direction (hkl) or [abc]
self.h_sb = QSpinBox(self)
self.h_sb.setToolTip('Define the h dimension of the unique direction')
self.h_sb.setRange(-20,20)
self.h_sb.setValue(self.settings['Unique direction - h'])
self.h_sb.valueChanged.connect(self.on_h_sb_changed)
self.k_sb = QSpinBox(self)
self.k_sb.setToolTip('Define the k dimension of the unique direction')
self.k_sb.setRange(-20,20)
self.k_sb.setValue(self.settings['Unique direction - k'])
self.k_sb.valueChanged.connect(self.on_k_sb_changed)
self.l_sb = QSpinBox(self)
self.l_sb.setToolTip('Define the l dimension of the unique direction')
self.l_sb.setRange(-20,20)
self.l_sb.setValue(self.settings['Unique direction - l'])
self.l_sb.valueChanged.connect(self.on_l_sb_changed)
hbox = QHBoxLayout()
hbox.addWidget(self.h_sb)
hbox.addWidget(self.k_sb)
hbox.addWidget(self.l_sb)
self.hkl_label = QLabel('Unique direction [abc]',self)
self.hkl_label.setToolTip('Define the unique direction by [abc] or (hkl). \n[abc] is used by needles and ellipsoids. It defines the unique direction in crystallographic units. \n(hkl) is used by plates it defines a surface and the unique direction is perpendicular to it.')
form.addRow(self.hkl_label, hbox)
#
# a over b ratio for ellipse
#
self.aoverb_sb = QDoubleSpinBox(self)
self.aoverb_sb.setRange(0.0, 1000.0)
self.aoverb_sb.setSingleStep(0.1)
self.aoverb_sb.setDecimals(6)
self.aoverb_sb.setToolTip('Define the ellipsoid a/b ratio or eccentricity. \nOnly applicable for the ellipsoid shapes \na/b < 1: oblate ellipsoid \na/b > 1: prolate ellipsoid')
self.aoverb_sb.setValue(self.settings['Ellipsoid a/b'])
self.aoverb_sb.valueChanged.connect(self.on_aoverb_sb_changed)
label = QLabel('Ellipsoid a/b eccentricty',self)
label.setToolTip('Define the ellipsoid a/b ratio or eccentricity. \nOnly applicable for the ellipsoid shapes \na/b < 1: oblate ellipsoid \na/b > 1: prolate ellipsoid')
form.addRow(label, self.aoverb_sb)
#
# Add ATR options
# Refractive Index
self.atr_index_sb = QDoubleSpinBox(self)
self.atr_index_sb.setRange(0.001, 100.0)
self.atr_index_sb.setSingleStep(0.01)
self.atr_index_sb.setDecimals(3)
self.atr_index_sb.setToolTip('Define the ATR material refractive index')
self.atr_index_sb.setValue(self.settings['ATR material refractive index'])
self.atr_index_sb.valueChanged.connect(self.on_atr_index_sb_changed)
label = QLabel('ATR material refractive index', self)
label.setToolTip('Define the ATR material refractive index')
form.addRow(label, self.atr_index_sb)
# Incident angle in degreees
self.atr_incident_ang_sb = QDoubleSpinBox(self)
self.atr_incident_ang_sb.setRange(0.0, 180.0)
self.atr_incident_ang_sb.setSingleStep(0.1)
self.atr_incident_ang_sb.setDecimals(1)
self.atr_incident_ang_sb.setToolTip('Define the ATR incident angle')
self.atr_incident_ang_sb.setValue(self.settings['ATR theta'])
self.atr_incident_ang_sb.valueChanged.connect(self.on_atr_incident_ang_sb_changed)
label = QLabel('ATR incident angle', self)
label.setToolTip('Define the ATR incident angle')
form.addRow(label, self.atr_incident_ang_sb)
# S polarisation fraction
self.atr_spolfrac_sb = QDoubleSpinBox(self)
self.atr_spolfrac_sb.setRange(0.0, 1.0)
self.atr_spolfrac_sb.setSingleStep(0.01)
self.atr_spolfrac_sb.setDecimals(3)
self.atr_spolfrac_sb.setToolTip('Define the ATR S polarisation fraction, the rest is P polarisation')
self.atr_spolfrac_sb.setValue(self.settings['ATR S polarisation fraction'])
self.atr_spolfrac_sb.valueChanged.connect(self.on_atr_spolfrac_sb_changed)
label = QLabel('ATR S polarisation fraction', self)
label.setToolTip('Define the S polarisation fraction, the rest is P polarisation')
form.addRow(label, self.atr_spolfrac_sb)
#
# Add a legend option
#
self.legend_le = QLineEdit(self)
self.legend_le.setToolTip('The legend will be used to describe the results in the plot')
self.legend_le.setText('Scenario legend')
self.legend_le.textChanged.connect(self.on_legend_le_changed)
label = QLabel('Scenario legend',self)
label.setToolTip('The legend will be used to describe the results in the plot')
form.addRow(label, self.legend_le)
#
# Final buttons
#
hbox = QHBoxLayout()
self.pushButton1 = QPushButton('Add another scenario')
self.pushButton1.setToolTip('Use another scenario to calculate the effect of changing the material on the absorption and permittivity')
self.pushButton1.clicked.connect(self.pushButton1Clicked)
hbox.addWidget(self.pushButton1)
self.pushButton3 = QPushButton('Delete this scenario')
self.pushButton3.setToolTip('Delete the current scenario')
self.pushButton3.clicked.connect(self.pushButton3Clicked)
hbox.addWidget(self.pushButton3)
form.addRow(hbox)
vbox.addLayout(form)
# finalise the layout
self.setLayout(vbox)
# sort out greying of boxes
self.change_greyed_out()
def pushButton1Clicked(self):
# Add another scenario
debugger.print('Button 1 pressed')
self.notebook.addScenario(copyFromIndex=self.scenarioIndex)
def pushButton3Clicked(self):
# Delete a scenario
debugger.print('Button 3 pressed')
self.notebook.deleteScenario(self.scenarioIndex)
def crystal_density(self):
if not self.reader:
return 1.0
volume = self.reader.volume
mass = 0.0
for m in self.reader.masses:
mass += m
density = mass / (avogadro_si * volume * 1.0e-24)
return density
def on_h_sb_changed(self,value):
debugger.print('on_h_sb_changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
self.settings['Unique direction - h'] = value
def on_k_sb_changed(self,value):
debugger.print('on_k_sb_changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
self.settings['Unique direction - k'] = value
def on_l_sb_changed(self,value):
debugger.print('on_l_sb_changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
self.settings['Unique direction - l'] = value
def on_shape_cb_activated(self,index):
debugger.print('on shape cb activated', index)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
self.settings['Particle shape'] = self.shapes[index]
if self.settings['Particle shape'] == 'Sphere':
self.settings['Unique direction - h'] = 0
self.settings['Unique direction - k'] = 0
self.settings['Unique direction - l'] = 0
self.change_greyed_out()
def on_methods_cb_activated(self,index):
debugger.print('on methods cb activated', index)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
self.settings['Effective medium method'] = self.methods[index]
if self.settings['Effective medium method'] == 'Mie':
self.settings['Particle shape'] = 'Sphere'
elif self.settings['Effective medium method'] == 'Anisotropic-Mie':
self.settings['Particle shape'] = 'Sphere'
elif self.settings['Effective medium method'] == 'Maxwell-Garnett':
self.settings['Particle size distribution sigma(mu)'] = 0.0
elif self.settings['Effective medium method'] == 'Bruggeman':
self.settings['Particle size distribution sigma(mu)'] = 0.0
elif self.settings['Effective medium method'] == 'Averaged Permittivity':
self.settings['Particle size(mu)'] = 0.0001
self.settings['Particle size distribution sigma(mu)'] = 0.0
self.change_greyed_out()
def on_mf_sb_changed(self,value):
debugger.print('on mass fraction line edit changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
self.settings['Mass or volume fraction'] = 'mass'
self.settings['Mass fraction'] = value/100.0
self.update_vf_sb()
def update_vf_sb(self):
mf1 = self.settings['Mass fraction']
mf2 = 1.0 - mf1
rho1 = self.crystal_density()
rho2 = self.settings['Matrix density']
vf1 = ( 1.0 - self.settings['Bubble volume fraction'] ) * (mf1/mf2)*(rho2/rho1) / ( 1 + (mf1/mf2)*(rho2/rho1))
# vf1 = 1.0 / ( 1.0 + mf2/mf1 * (rho1/rho2) )
self.settings['Volume fraction'] = vf1
self.vf_sb.blockSignals(True)
self.vf_sb.setValue(100.0*vf1)
self.vf_sb.blockSignals(False)
self.bubble_vf_sb.setRange(0.0, 100.0*(1.0-self.settings['Volume fraction']))
self.vf_sb.setRange(0.0, 100.0*(1.0-self.settings['Bubble volume fraction']))
debugger.print('Update_vf_sb')
debugger.print('rho 1', rho1)
debugger.print('rho 2', rho2)
debugger.print('vf 1 ', vf1)
def on_aoverb_sb_changed(self,value):
debugger.print('on_aoverb_le_changed',value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
self.settings['Ellipsoid a/b'] = value
def on_legend_le_changed(self,text):
debugger.print('on legend change', text)
self.dirty = True
self.settings['Legend'] = text
def on_sigma_sb_changed(self,value):
debugger.print('on sigma line edit changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
self.settings['Particle size distribution sigma(mu)'] = value
def on_size_sb_changed(self,value):
debugger.print('on size line edit changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
self.settings['Particle size(mu)'] = value
def on_vf_sb_changed(self,value):
debugger.print('on volume fraction line edit changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
self.settings['Mass or volume fraction'] = 'volume'
self.settings['Volume fraction'] = value/100.0
self.update_mf_sb()
def update_mf_sb(self):
vf1 = self.settings['Volume fraction']
vf2 = 1.0 - vf1 - self.settings['Bubble volume fraction']
rho1 = self.crystal_density()
rho2 = self.settings['Matrix density']
# mf1 = 1.0 / ( 1.0 + (vf2/vf1) * (rho2/rho1) )
mf1 = rho1*vf1 / ( rho1*vf1 + rho2*vf2 )
self.settings['Mass fraction'] = mf1
self.mf_sb.blockSignals(True)
self.mf_sb.setValue(100.0*mf1)
self.mf_sb.blockSignals(False)
debugger.print('Update_mf_sb')
debugger.print('rho 1', rho1)
debugger.print('rho 2', rho2)
debugger.print('mf 1 ', mf1)
def on_matrix_cb_activated(self,index):
debugger.print('on matrix combobox activated', index)
debugger.print('on matrix combobox activated', self.matrix_cb.currentText())
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
matrix = self.matrix_cb.currentText()
self.matrix_cb.blockSignals(True)
self.density_sb.blockSignals(True)
self.permittivity_sb.blockSignals(True)
self.settings['Matrix'] = matrix
self.settings['Matrix density'] = support_matrix_db[matrix][0]
self.settings['Matrix permittivity'] = support_matrix_db[matrix][1]
self.density_sb.setValue(self.settings['Matrix density'])
self.permittivity_sb.setValue(self.settings['Matrix permittivity'])
# volume fraction takes precedence
if self.settings['Mass or volume fraction'] == 'volume':
self.update_mf_sb()
self.update_vf_sb()
else:
self.update_vf_sb()
self.update_mf_sb()
self.matrix_cb.blockSignals(False)
self.density_sb.blockSignals(False)
self.permittivity_sb.blockSignals(False)
def on_density_sb_changed(self,value):
self.settings['Matrix density'] = value
# volume fraction taked precedence
if self.settings['Mass or volume fraction'] == 'volume':
self.update_mf_sb()
self.update_vf_sb()
else:
self.update_vf_sb()
self.update_mf_sb()
debugger.print('on density line edit changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
def on_bubble_vf_sb_changed(self,value):
self.settings['Bubble volume fraction'] = value/100.0
if self.settings['Mass or volume fraction'] == 'volume':
self.update_mf_sb()
else:
self.update_vf_sb()
debugger.print('on bubble volume fraction changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
def on_bubble_radius_sb_changed(self,value):
self.settings['Bubble radius'] = value
debugger.print('on permittivity line edit changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
def on_permittivity_sb_changed(self,value):
self.settings['Matrix permittivity'] = value
debugger.print('on permittivity line edit changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
def on_atr_index_sb_changed(self,value):
self.settings['ATR material refractive index'] = value
debugger.print('on atr index line edit changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
def on_atr_incident_ang_sb_changed(self,value):
self.settings['ATR theta'] = value
debugger.print('on atr incident angle line edit changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
def on_atr_spolfrac_sb_changed(self,value):
self.settings['ATR S polarisation fraction'] = value
debugger.print('on atr spolfraction line edit changed', value)
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
def set_reader(self,reader):
self.dirty = True
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
self.reader = reader
def change_greyed_out(self):
# Have a look through the settings and see if we need to grey anything out
method = self.settings['Effective medium method']
if method == 'Mie' or method == 'Anisotropic-Mie':
self.size_sb.setEnabled(True)
self.sigma_sb.setEnabled(True)
for i,shape in enumerate(self.shapes):
self.shape_cb.model().item(i).setEnabled(False)
self.settings['Particle shape'] = 'Sphere'
self.shape_cb.setEnabled(True)
index = self.shape_cb.findText(self.settings['Particle shape'], Qt.MatchFixedString)
if index >=0:
self.shape_cb.model().item(index).setEnabled(True)
self.shape_cb.setCurrentIndex(index)
else:
print('Method index was not 0',self.settings['Particle shape'])
elif method == 'Averaged Permittivity':
self.size_sb.setEnabled(False)
self.sigma_sb.setEnabled(False)
self.settings['Particle shape'] = 'Sphere'
index = self.shape_cb.findText(self.settings['Particle shape'], Qt.MatchFixedString)
if index >=0:
self.shape_cb.model().item(index).setEnabled(True)
self.shape_cb.setCurrentIndex(index)
self.shape_cb.setEnabled(False)
for i,shape in enumerate(self.shapes):
self.shape_cb.model().item(i).setEnabled(False)
elif method == 'Maxwell-Garnett' or method == 'Bruggeman':
self.size_sb.setEnabled(True)
self.sigma_sb.setEnabled(False)
self.shape_cb.setEnabled(True)
for i,shape in enumerate(self.shapes):
self.shape_cb.model().item(i).setEnabled(True)
else:
self.size_sb.setEnabled(False)
self.sigma_sb.setEnabled(False)
self.shape_cb.setEnabled(True)
for i,shape in enumerate(self.shapes):
self.shape_cb.model().item(i).setEnabled(True)
# deal with shapes
if self.settings['Particle shape'] == 'Ellipsoid':
self.h_sb.setEnabled(True)
self.k_sb.setEnabled(True)
self.l_sb.setEnabled(True)
self.hkl_label.setText('Unique direction [abc]')
self.aoverb_sb.setEnabled(True)
elif self.settings['Particle shape'] == 'Plate':
self.h_sb.setEnabled(True)
self.k_sb.setEnabled(True)
self.l_sb.setEnabled(True)
self.hkl_label.setText('Unique direction (hkl)')
self.aoverb_sb.setEnabled(False)
elif self.settings['Particle shape'] == 'Needle':
self.h_sb.setEnabled(True)
self.k_sb.setEnabled(True)
self.l_sb.setEnabled(True)
self.hkl_label.setText('Unique direction [abc]')
self.aoverb_sb.setEnabled(False)
elif self.settings['Particle shape'] == 'Sphere':
self.h_sb.setEnabled(False)
self.k_sb.setEnabled(False)
self.l_sb.setEnabled(False)
self.aoverb_sb.setEnabled(False)
else:
print('ScenarioTab: Shape not recognised', self.settings['Particle shape'])
def setScenarioIndex(self,index):
self.scenarioIndex = index
text = self.legend_le.text()
if text == 'Scenario legend':
self.legend_le.setText('Scenario '+str(index + 1))
return
def print_settings(self):
print('#')
print('# Scenario tab')
print('#')
print('tab = self.notebook.scenarios')
for key in self.settings:
print(key, self.settings[key])
def refresh(self,force=False):
if not self.dirty and not force:
debugger.print('refresh aborted', self.dirty,force)
return
debugger.print('refresh', force)
# Tell the main notebook that we need to recalculate any plot
self.notebook.plottingCalculationRequired = True
self.notebook.fittingCalculationRequired = True
# First see if we can get the reader from the mainTab
self.reader = self.notebook.mainTab.reader
#
# Block signals during refresh
#
for w in self.findChildren(QWidget):
w.blockSignals(True)
# use the settings values to initialise the widgets
index = self.matrix_cb.findText(self.settings['Matrix'], Qt.MatchFixedString)
self.matrix_cb.setCurrentIndex(index)
self.density_sb.setValue(self.settings['Matrix density'])
self.permittivity_sb.setValue(self.settings['Matrix permittivity'])
self.bubble_vf_sb.setValue(100*self.settings['Bubble volume fraction'])
self.bubble_radius_sb.setValue(self.settings['Bubble radius'])
if self.settings['Mass or volume fraction'] == 'volume':
# volume fraction takes precedence
self.update_mf_sb()
self.update_vf_sb()
else:
# mass fraction takes precedence
self.update_vf_sb()
self.update_mf_sb()
#
index = self.methods_cb.findText(self.settings['Effective medium method'], Qt.MatchFixedString)
self.methods_cb.setCurrentIndex(index)
self.size_sb.setValue(self.settings['Particle size(mu)'])
self.sigma_sb.setValue(self.settings['Particle size distribution sigma(mu)'])
index = self.shape_cb.findText(self.settings['Particle shape'], Qt.MatchFixedString)
self.shape_cb.setCurrentIndex(index)
self.h_sb.setValue(self.settings['Unique direction - h'])
self.k_sb.setValue(self.settings['Unique direction - k'])
self.l_sb.setValue(self.settings['Unique direction - l'])
self.aoverb_sb.setValue(self.settings['Ellipsoid a/b'])
self.legend_le.setText(self.settings['Legend'])
self.change_greyed_out()
#
# Unblock signals after refresh
#
for w in self.findChildren(QWidget):
w.blockSignals(False)
self.dirty = False
return
| mit |
sudkannan/xen-hv | dist/install/usr/lib64/python2.6/site-packages/xen/xend/server/tests/test_controllers.py | 49 | 2066 | import os
import re
import unittest
import xen.xend.XendOptions
xen.xend.XendOptions.XendOptions.config_default = '/dev/null'
from xen.xend.server import netif
FAKE_DOMID = 42
FAKE_DEVID = 63
xoptions = xen.xend.XendOptions.instance()
class test_controllers(unittest.TestCase):
def testNetif(self):
controller = self.controllerInstance(netif.NetifController)
self.assertNetif(controller.getDeviceDetails({}), None)
self.assertNetif(
controller.getDeviceDetails({'mac': 'aa:bb:cc:dd:ee:ff'}),
'aa:bb:cc:dd:ee:ff')
def assertNetif(self, results, expectedMac):
(devid, backdets, frontdets) = results
self.assertEqual(devid, FAKE_DEVID)
self.assertEqual(backdets['handle'], str(FAKE_DEVID))
self.assertEqual(backdets['script'],
os.path.join(xoptions.network_script_dir,
xoptions.get_vif_script()))
self.assertValidMac(backdets['mac'], expectedMac)
self.assertEqual(frontdets['handle'], str(FAKE_DEVID))
self.assertValidMac(frontdets['mac'], expectedMac)
MAC_REGEXP = re.compile('^' +
':'.join([r'[0-9a-f][0-9a-f]'
for i in range(0, 6)]) +
'$')
def assertValidMac(self, mac, expected):
if expected:
self.assertEqual(mac, expected)
else:
self.assert_(self.MAC_REGEXP.match(mac))
def controllerInstance(self, cls):
"""Allocate an instance of the given controller class, and override
methods as appropriate so that we can run tests without needing
Xenstored."""
result = cls(FakeXendDomainInfo())
result.allocateDeviceID = fakeID
return result
class FakeXendDomainInfo:
def getDomainPath(self):
return "/test/fake/domain/%d/" % FAKE_DOMID
def fakeID():
return FAKE_DEVID
def test_suite():
return unittest.makeSuite(test_controllers)
| gpl-2.0 |
adsabs/adsabs-pyingest | pyingest/parsers/iop.py | 1 | 4766 | #!/usr/bin/env python
from __future__ import print_function
from __future__ import absolute_import
from past.builtins import basestring
import string
from pyingest.config.utils import u2asc
from .jats import JATSParser
from .author_init import AuthorInitial
from pyingest.config.config import *
from pyingest.parsers.entity_convert import EntityConverter
class NoSchemaException(Exception):
pass
class WrongSchemaException(Exception):
pass
class UnparseableException(Exception):
pass
class IOPJATSParser(JATSParser):
def iop_journals(self, pid):
# mapping journal-meta/journal-id/publisher-id to bibstems
# IOP_PUBLISHER_IDS = {}
# IOP_PUBLISHER_IDS['rnaas'] = u'RNAAS'
try:
bibstem = IOP_PUBLISHER_IDS[pid]
except KeyError:
return 'XSTEM'
else:
return bibstem
def dbfromkw(self, d, **kwargs):
db = []
if isinstance(d, basestring):
keywords = d.split(',')
for k in keywords:
# if k.lower() in AST_WORDS:
if k in AST_WORDS:
db.append('AST')
return db
elif 'UAT:' in k.lower():
db.append('AST')
return db
return db
def parse(self, input_data, **kwargs):
output_metadata = super(self.__class__, self).parse(input_data, parser='lxml', **kwargs)
# Publication +
try:
pubstring = output_metadata['publication']
except Exception as err:
pass
else:
try:
output_metadata['volume']
except Exception as err:
pass
else:
pubstring = pubstring + ', Volume ' + output_metadata['volume']
try:
pubstring = pubstring + ', Issue ' + output_metadata['issue']
except TypeError:
pass
try:
page_id = output_metadata['page']
except Exception as err:
pass
else:
if "-" in page_id:
pubstring = pubstring + ', pp.' + page_id
else:
pubstring = pubstring + ', id.' + page_id
if 'numpages' in output_metadata:
pubstring = pubstring + ', ' + output_metadata['numpages'] + ' pp.'
del(output_metadata['numpages'])
output_metadata['publication'] = pubstring
# Bibcode
try:
j_bibstem = self.iop_journals(output_metadata['pub-id'])
except KeyError:
pass
else:
year = output_metadata['pubdate'][-4:]
bibstem = j_bibstem.ljust(5, '.')
volume = output_metadata['volume'].rjust(4, '.')
# RNAAS used to have a month-letter in column 14, but it was
# deprecated September 2019 by CSG
issue_letter = '.'
idno = output_metadata['page']
if "-" in idno:
idno = idno.split("-")[0]
else:
if len(idno) == 6:
try:
idtwo = string.letters[int(idno[0:2]) - 1]
except Exception as err:
idtwo = idno[0:2]
idfour = idno[2:]
issue_letter = ''
else:
idtwo = ''
idfour = idno.rjust(4, '.')
idno = idtwo + idfour
try:
a = AuthorInitial()
author_init = a.get_author_init(output_metadata['authors'])
except Exception as err:
print(err)
author_init = '.'
if bibstem == u'ApJL.':
bibstem = u'ApJ..'
issue_letter = u'L'
idno = idno.replace('L', '.')
if bibstem in IOP_SPECIAL_ID_HANDLING:
bib_tail = idno + author_init
else:
bib_tail = issue_letter + idno + author_init
while len(bib_tail) > 6:
if bib_tail[0] == '.':
bib_tail = bib_tail[1:]
else:
bib_tail = bib_tail[1:]
bib_tail = bib_tail.rjust(6, '.')
output_metadata['bibcode'] = year + bibstem + volume + bib_tail
del output_metadata['pub-id']
del output_metadata['page']
# Database (from APS keywords)
try:
output_metadata['database'] = self.dbfromkw(output_metadata['keywords'])
except Exception as err:
pass
# Return
return output_metadata
| mit |
Ensighten/ansible | lib/ansible/utils/cmd_functions.py | 160 | 1955 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import shlex
import subprocess
import select
def run_cmd(cmd, live=False, readsize=10):
#readsize = 10
cmdargs = shlex.split(cmd)
p = subprocess.Popen(cmdargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = ''
stderr = ''
rpipes = [p.stdout, p.stderr]
while True:
rfd, wfd, efd = select.select(rpipes, [], rpipes, 1)
if p.stdout in rfd:
dat = os.read(p.stdout.fileno(), readsize)
if live:
sys.stdout.write(dat)
stdout += dat
if dat == '':
rpipes.remove(p.stdout)
if p.stderr in rfd:
dat = os.read(p.stderr.fileno(), readsize)
stderr += dat
if live:
sys.stdout.write(dat)
if dat == '':
rpipes.remove(p.stderr)
# only break out if we've emptied the pipes, or there is nothing to
# read from and the process has finished.
if (not rpipes or not rfd) and p.poll() is not None:
break
# Calling wait while there are still pipes to read can cause a lock
elif not rpipes and p.poll() == None:
p.wait()
return p.returncode, stdout, stderr
| gpl-3.0 |
Chilledheart/gyp | test/copies/gyptest-default.py | 264 | 1268 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies file copies using the build tool default.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('copies.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('copies.gyp', chdir='relocate/src')
test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
test.built_file_must_match('copies-out/file2',
'file2 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/directory/file3',
'file3 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/directory/file4',
'file4 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/directory/subdir/file5',
'file5 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/subdir/file6',
'file6 contents\n',
chdir='relocate/src')
test.pass_test()
| bsd-3-clause |
fldc/CouchPotatoServer | couchpotato/core/media/movie/providers/automation/hummingbird.py | 44 | 2345 | from couchpotato.core.logger import CPLog
from couchpotato.core.media.movie.providers.automation.base import Automation
log = CPLog(__name__)
autoload = 'Hummingbird'
class Hummingbird(Automation):
def getIMDBids(self):
movies = []
for movie in self.getWatchlist():
imdb = self.search(movie[0], movie[1])
if imdb:
movies.append(imdb['imdb'])
return movies
def getWatchlist(self):
if not self.conf('automation_username'):
log.error('You need to fill in a username')
return []
url = "http://hummingbird.me/api/v1/users/%s/library" % self.conf('automation_username')
data = self.getJsonData(url)
chosen_filter = {
'automation_list_current': 'currently-watching',
'automation_list_plan': 'plan-to-watch',
'automation_list_completed': 'completed',
'automation_list_hold': 'on-hold',
'automation_list_dropped': 'dropped',
}
chosen_lists = []
for x in chosen_filter:
if self.conf(x):
chosen_lists.append(chosen_filter[x])
entries = []
for item in data:
if item['anime']['show_type'] != 'Movie' or item['status'] not in chosen_lists:
continue
title = item['anime']['title']
year = item['anime']['started_airing']
if year:
year = year[:4]
entries.append([title, year])
return entries
config = [{
'name': 'hummingbird',
'groups': [
{
'tab': 'automation',
'list': 'watchlist_providers',
'name': 'hummingbird_automation',
'label': 'Hummingbird',
'description': 'Import movies from your Hummingbird.me lists',
'options': [
{
'name': 'automation_enabled',
'default': False,
'type': 'enabler',
},
{
'name': 'automation_username',
'label': 'Username',
},
{
'name': 'automation_list_current',
'type': 'bool',
'label': 'Currently Watching',
'default': False,
},
{
'name': 'automation_list_plan',
'type': 'bool',
'label': 'Plan to Watch',
'default': True,
},
{
'name': 'automation_list_completed',
'type': 'bool',
'label': 'Completed',
'default': False,
},
{
'name': 'automation_list_hold',
'type': 'bool',
'label': 'On Hold',
'default': False,
},
{
'name': 'automation_list_dropped',
'type': 'bool',
'label': 'Dropped',
'default': False,
},
],
},
],
}]
| gpl-3.0 |
joachimmetz/dfvfs | tests/vfs/gzip_file_system.py | 2 | 2511 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for the file system implementation using gzip."""
import unittest
from dfvfs.lib import definitions
from dfvfs.path import factory as path_spec_factory
from dfvfs.resolver import context
from dfvfs.vfs import gzip_file_system
from tests import test_lib as shared_test_lib
class GZIPFileSystemTest(shared_test_lib.BaseTestCase):
"""Tests for the gzip file system."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._resolver_context = context.Context()
test_path = self._GetTestFilePath(['syslog.gz'])
self._SkipIfPathNotExists(test_path)
test_os_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_OS, location=test_path)
self._gzip_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_GZIP, parent=test_os_path_spec)
def tearDown(self):
"""Cleans up the needed objects used throughout the test."""
self._resolver_context.Empty()
def testOpenAndClose(self):
"""Test the open and close functionality."""
file_system = gzip_file_system.GzipFileSystem(
self._resolver_context, self._gzip_path_spec)
self.assertIsNotNone(file_system)
file_system.Open()
def testFileEntryExistsByPathSpec(self):
"""Test the file entry exists by path specification functionality."""
file_system = gzip_file_system.GzipFileSystem(
self._resolver_context, self._gzip_path_spec)
self.assertIsNotNone(file_system)
file_system.Open()
result = file_system.FileEntryExistsByPathSpec(self._gzip_path_spec)
self.assertTrue(result)
def testGetFileEntryByPathSpec(self):
"""Tests the GetFileEntryByPathSpec function."""
file_system = gzip_file_system.GzipFileSystem(
self._resolver_context, self._gzip_path_spec)
self.assertIsNotNone(file_system)
file_system.Open()
file_entry = file_system.GetFileEntryByPathSpec(self._gzip_path_spec)
self.assertIsNotNone(file_entry)
self.assertEqual(file_entry.name, '')
def testGetRootFileEntry(self):
"""Test the get root file entry functionality."""
file_system = gzip_file_system.GzipFileSystem(
self._resolver_context, self._gzip_path_spec)
self.assertIsNotNone(file_system)
file_system.Open()
file_entry = file_system.GetRootFileEntry()
self.assertIsNotNone(file_entry)
self.assertEqual(file_entry.name, '')
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
GNOME/gedit-plugins | plugins/translate/translate/__init__.py | 1 | 7109 | # -*- coding: utf-8 -*-
#
# Copyrignt (C) 2017 Jordi Mas <jmas@softcatala.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('GtkSource', '4')
gi.require_version('PeasGtk', '1.0')
from gi.repository import GObject, Gio, Gtk, Gedit, PeasGtk
from .services.services import Services
from .services.apertium import Apertium
from .translateview import TranslateView
from .preferences import Preferences
from .settings import Settings
try:
import gettext
gettext.bindtextdomain('gedit-plugins')
gettext.textdomain('gedit-plugins')
_ = gettext.gettext
except:
_ = lambda s: s
def _get_translation_service_shared(service_id):
settings = Settings()
service = Services.get(service_id)
if service.has_api_key() is True:
key = settings.get_apikey()
service.set_api_key(key)
if service_id == Services.APERTIUM_ID:
server = settings.get_apertium_server()
service.set_server(server)
service.init()
return service
class TranslateAppActivatable(GObject.Object, Gedit.AppActivatable):
app = GObject.Property(type=Gedit.App)
def __init__(self):
GObject.Object.__init__(self)
def do_activate(self):
self.app.add_accelerator("<Primary>K", "win.translate", None)
def do_deactivate(self):
self.app.remove_accelerator("win.translate", None)
class TranslateWindowActivatable(GObject.Object, Gedit.WindowActivatable, PeasGtk.Configurable):
__gtype_name__ = "TranslateWindowActivatable"
window = GObject.Property(type=Gedit.Window)
def __init__(self):
GObject.Object.__init__(self)
def do_activate(self):
action = Gio.SimpleAction(name="translate")
action.connect('activate', lambda a, p: self.do_translate())
self.window.add_action(action)
global g_console
g_console = TranslateView(namespace = {'__builtins__' : __builtins__,
'gedit' : Gedit,
'window' : self.window})
name = self._get_translation_service_name()
g_console.write(_('Translations powered by {0}').format(name))
bottom = self.window.get_bottom_panel()
g_console.show_all()
bottom.add_titled(g_console, "GeditTranslateConsolePanel", _('Translate Console'))
def do_deactivate(self):
bottom = self.window.get_bottom_panel()
bottom.remove(g_console)
self.window.remove_action("translate")
def do_update_state(self):
sensitive = False
view = self.window.get_active_view()
if view and hasattr(view, "translate_view_activatable"):
sensitive = True
self.window.lookup_action('translate').set_enabled(sensitive)
def _get_translation_service_name(self):
settings = Settings()
service_id = settings.get_service()
return Services.get_name(service_id)
def _get_translation_service(self, service_id):
return _get_translation_service_shared(service_id)
def get_languages_names_codes(self, service_id):
service = self._get_translation_service(service_id)
return service.get_language_names(), service.get_language_codes()
def do_create_configure_widget(self):
config_widget = Preferences(self.plugin_info.get_data_dir(),
self.get_languages_names_codes)
widget = config_widget.configure_widget()
return widget
'''Entry point when user uses keyboard shortcut'''
def do_translate(self, unindent=False):
view = self.window.get_active_view()
if view and view.translate_view_activatable:
view.translate_view_activatable.do_translate(view.get_buffer(), unindent)
class TranslateViewActivatable(GObject.Object, Gedit.ViewActivatable):
view = GObject.Property(type=Gedit.View)
def __init__(self):
self.popup_handler_id = 0
GObject.Object.__init__(self)
self._settings = Settings()
def do_activate(self):
self.view.translate_view_activatable = self
self.popup_handler_id = self.view.connect('populate-popup', self.populate_popup)
def do_deactivate(self):
if self.popup_handler_id != 0:
self.view.disconnect(self.popup_handler_id)
self.popup_handler_id = 0
delattr(self.view, "translate_view_activatable")
def _get_language_pair_name(self):
language_pair = self._settings.get_language_pair()
languages = language_pair.split('|')
service = self._get_translation_service()
return service.get_language_pair_name(languages[0], languages[1])
def populate_popup(self, view, popup):
if not isinstance(popup, Gtk.MenuShell):
return
item = Gtk.SeparatorMenuItem()
item.show()
popup.append(item)
language_pair_name = self._get_language_pair_name()
text = _("Translate selected text [{0}]").format(language_pair_name)
item = Gtk.MenuItem.new_with_mnemonic(text)
item.set_sensitive(self.is_enabled())
item.show()
item.connect('activate', lambda i: self.do_translate(view.get_buffer()))
popup.append(item)
def is_enabled(self):
document = self.view.get_buffer()
if document is None:
return False
start = None
end = None
try:
start, end = document.get_selection_bounds()
except:
pass
return start is not None and end is not None
def _get_translation_service(self):
service_id = self._settings.get_service()
return _get_translation_service_shared(service_id)
def translate_text(self, document, start, end):
doc = self.view.get_buffer()
text = doc.get_text(start, end, False)
language_pair = self._settings.get_language_pair()
service = self._get_translation_service()
translated = service.translate_text(text, language_pair)
if self._settings.get_output_document():
doc.insert(start, translated)
else:
g_console.write(translated)
def do_translate(self, document, unindent=False):
start, end = document.get_selection_bounds()
self.translate_text(document, start, end)
| gpl-2.0 |
kjw0106/boto | tests/integration/logs/test_layer1.py | 114 | 1814 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import boto
from tests.compat import unittest
class TestCloudWatchLogs(unittest.TestCase):
def setUp(self):
self.logs = boto.connect_logs()
def test_logs(self):
logs = self.logs
response = logs.describe_log_groups(log_group_name_prefix='test')
self.assertIsInstance(response['logGroups'], list)
mfilter = '[ip, id, user, ..., status_code=500, size]'
sample = [
'127.0.0.1 - frank "GET /apache_pb.gif HTTP/1.0" 200 1534',
'127.0.0.1 - frank "GET /apache_pb.gif HTTP/1.0" 500 5324',
]
response = logs.test_metric_filter(mfilter, sample)
self.assertEqual(len(response['matches']), 1)
| mit |
tpodowd/boto | tests/unit/ec2/test_spotinstance.py | 114 | 5101 | from tests.unit import AWSMockServiceTestCase
from boto.ec2.connection import EC2Connection
class TestCancelSpotInstanceRequests(AWSMockServiceTestCase):
connection_class = EC2Connection
def default_body(self):
return b"""
<CancelSpotInstanceRequestsResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<spotInstanceRequestSet>
<item>
<spotInstanceRequestId>sir-1a2b3c4d</spotInstanceRequestId>
<state>cancelled</state>
</item>
<item>
<spotInstanceRequestId>sir-9f8e7d6c</spotInstanceRequestId>
<state>cancelled</state>
</item>
</spotInstanceRequestSet>
</CancelSpotInstanceRequestsResponse>
"""
def test_cancel_spot_instance_requests(self):
self.set_http_response(status_code=200)
response = self.service_connection.cancel_spot_instance_requests(['sir-1a2b3c4d',
'sir-9f8e7d6c'])
self.assert_request_parameters({
'Action': 'CancelSpotInstanceRequests',
'SpotInstanceRequestId.1': 'sir-1a2b3c4d',
'SpotInstanceRequestId.2': 'sir-9f8e7d6c'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEqual(len(response), 2)
self.assertEqual(response[0].id, 'sir-1a2b3c4d')
self.assertEqual(response[0].state, 'cancelled')
self.assertEqual(response[1].id, 'sir-9f8e7d6c')
self.assertEqual(response[1].state, 'cancelled')
class TestGetSpotPriceHistory(AWSMockServiceTestCase):
connection_class = EC2Connection
def default_body(self):
return b"""
<DescribeSpotPriceHistoryResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>b6c6978c-bd13-4ad7-9bc8-6f0ac9d32bcc</requestId>
<spotPriceHistorySet>
<item>
<instanceType>c3.large</instanceType>
<productDescription>Linux/UNIX</productDescription>
<spotPrice>0.032000</spotPrice>
<timestamp>2013-12-28T12:17:43.000Z</timestamp>
<availabilityZone>us-west-2c</availabilityZone>
</item>
<item>
<instanceType>c3.large</instanceType>
<productDescription>Windows (Amazon VPC)</productDescription>
<spotPrice>0.104000</spotPrice>
<timestamp>2013-12-28T07:49:40.000Z</timestamp>
<availabilityZone>us-west-2b</availabilityZone>
</item>
</spotPriceHistorySet>
<nextToken>q5GwEl5bMGjKq6YmhpDLJ7hEwyWU54jJC2GQ93n61vZV4s1+fzZ674xzvUlTihrl</nextToken>
</DescribeSpotPriceHistoryResponse>
"""
def test_get_spot_price_history(self):
self.set_http_response(status_code=200)
response = self.service_connection.get_spot_price_history(
instance_type='c3.large')
self.assert_request_parameters({
'Action': 'DescribeSpotPriceHistory',
'InstanceType': 'c3.large'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEqual(len(response), 2)
self.assertEqual(response.next_token,
'q5GwEl5bMGjKq6YmhpDLJ7hEwyWU54jJC2GQ93n61vZV4s1+fzZ674xzvUlTihrl')
self.assertEqual(response.nextToken,
'q5GwEl5bMGjKq6YmhpDLJ7hEwyWU54jJC2GQ93n61vZV4s1+fzZ674xzvUlTihrl')
self.assertEqual(response[0].instance_type, 'c3.large')
self.assertEqual(response[0].availability_zone, 'us-west-2c')
self.assertEqual(response[1].instance_type, 'c3.large')
self.assertEqual(response[1].availability_zone, 'us-west-2b')
response = self.service_connection.get_spot_price_history(
filters={'instance-type': 'c3.large'})
self.assert_request_parameters({
'Action': 'DescribeSpotPriceHistory',
'Filter.1.Name': 'instance-type',
'Filter.1.Value.1': 'c3.large'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
response = self.service_connection.get_spot_price_history(
next_token='foobar')
self.assert_request_parameters({
'Action': 'DescribeSpotPriceHistory',
'NextToken': 'foobar'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
| mit |
alhashash/odoomrp-wip | account_treasury_forecast/wizard/wiz_create_invoice.py | 31 | 2577 | # -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
import openerp.addons.decimal_precision as dp
from openerp import models, fields, api
class WizCreateInvoice(models.TransientModel):
_name = 'wiz.create.invoice'
_description = 'Wizard to create invoices'
partner_id = fields.Many2one("res.partner", string="Partner")
journal_id = fields.Many2one("account.journal", string="Journal",
domain=[("type", "=", "purchase")])
description = fields.Char(string="Description")
amount = fields.Float(string="Amount",
digits_compute=dp.get_precision('Account'))
line_id = fields.Many2one("account.treasury.forecast.line.template",
string="Payment")
@api.one
def button_create_inv(self):
invoice_obj = self.env['account.invoice']
res_inv = invoice_obj.onchange_partner_id('in_invoice',
self.partner_id.id)
values = res_inv['value']
values['name'] = ('Treasury: ' + self.description + '/ Amount: ' +
str(self.amount))
values['reference'] = ('Treasury: ' + self.description + '/ Amount: ' +
str(self.amount))
values['partner_id'] = self.partner_id.id
values['journal_id'] = self.journal_id.id
values['type'] = 'in_invoice'
invoice_id = invoice_obj.create(values)
self.line_id.write({'invoice_id': invoice_id.id, 'paid': 1,
'journal_id': self.journal_id.id,
'partner_id': self.partner_id.id,
'amount': self.amount})
return {'type': 'ir.actions.act_window_close'}
| agpl-3.0 |
zifeo/nest-simulator | pynest/examples/testiaf.py | 9 | 4011 | # -*- coding: utf-8 -*-
#
# testiaf.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
'''
IAF Neuron example
------------------
A DC current is injected into the neuron using a current generator
device. The membrane potential as well as the spiking activity are
recorded by corresponding devices.
It can be observed how the current charges the membrane, a spike
is emitted, the neuron becomes absolute refractory, and finally
starts to recover.
'''
'''
First, we import all necessary modules for simulation and plotting
'''
import nest
import pylab
'''
Second the Function build_network is defined to build the network and
return the handles of the spike detector and the voltmeter
'''
def build_network(dt) :
nest.ResetKernel()
nest.SetKernelStatus({"local_num_threads" : 1, "resolution" : dt})
neuron = nest.Create('iaf_neuron')
nest.SetStatus(neuron, "I_e", 376.0)
vm = nest.Create('voltmeter')
nest.SetStatus(vm, "withtime", True)
sd = nest.Create('spike_detector')
nest.Connect(vm, neuron)
nest.Connect(neuron, sd)
return vm, sd
'''
The function build_network takes the resolution as argument.
First the Kernel is reset and the number of threads is set to zero as
well as the resolution to the specified value dt. The iaf_neuron is
created and the handle is stored in the variable neuron The status of
the neuron is changed so it receives an external current. Next the
voltmeter is created and the handle stored in vm and the option
'withtime' is set, therefore times are given in the times vector in
events. Now the spike_detecor is created and its handle is stored in
sd.
Voltmeter and spikedetector are then connected to the neuron. The
connect function takes the handles as input. The Voltmeter is
connected to the neuron and the neuron to the spikedetector because
the neuron sends spikes to the detector and the voltmeter 'observes'
the neuron.
'''
'''
The neuron is simulated for three different resolutions and then
the voltage trace is plotted
'''
for dt in [0.1, 0.5, 1.0] :
print("Running simulation with dt=%.2f" % dt)
vm, sd = build_network(dt)
'''
First using build_network the network is build and the handles of
the spike detector and the voltmeter are stored in vm and sd
'''
nest.Simulate(1000.0)
'''
The network is simulated using `Simulate`, which takes the desired
simulation time in milliseconds and advances the network state by
this amount of time. During simulation, the `spike_detector`
counts the spikes of the target neuron and the total number is
read out at the end of the simulation period.
'''
potentials = nest.GetStatus(vm, "events")[0]["V_m"]
times = nest.GetStatus(vm, "events")[0]["times"]
'''
The values of the voltage recorded by the voltmeter are read out
and the values for the membrane potential are stored in potential
and the corresponding times in the times array
'''
pylab.plot(times, potentials, label="dt=%.2f" % dt)
print(" Number of spikes: {0}".format(nest.GetStatus(sd, "n_events")[0]))
'''
Using the pylab library the voltage trace is plotted over time
'''
pylab.legend(loc=3)
pylab.xlabel("time (ms)")
pylab.ylabel("V_m (mV)")
'''
Finally the axis are labelled and a legend is generated
'''
| gpl-2.0 |
serensoner/CouchPotatoServer | libs/guessit/transfo/split_path_components.py | 150 | 1292 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2012 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit import fileutils
import os.path
import logging
log = logging.getLogger(__name__)
def process(mtree):
"""Returns the filename split into [ dir*, basename, ext ]."""
components = fileutils.split_path(mtree.value)
basename = components.pop(-1)
components += list(os.path.splitext(basename))
components[-1] = components[-1][1:] # remove the '.' from the extension
mtree.split_on_components(components)
| gpl-3.0 |
appliedx/edx-platform | common/djangoapps/request_cache/middleware.py | 98 | 1225 | import threading
class _RequestCache(threading.local):
"""
A thread-local for storing the per-request cache.
"""
def __init__(self):
super(_RequestCache, self).__init__()
self.data = {}
self.request = None
REQUEST_CACHE = _RequestCache()
class RequestCache(object):
@classmethod
def get_request_cache(cls, name=None):
"""
This method is deprecated. Please use :func:`request_cache.get_cache`.
"""
if name is None:
return REQUEST_CACHE
else:
return REQUEST_CACHE.data.setdefault(name, {})
@classmethod
def get_current_request(cls):
"""
This method is deprecated. Please use :func:`request_cache.get_request`.
"""
return REQUEST_CACHE.request
@classmethod
def clear_request_cache(cls):
"""
Empty the request cache.
"""
REQUEST_CACHE.data = {}
REQUEST_CACHE.request = None
def process_request(self, request):
self.clear_request_cache()
REQUEST_CACHE.request = request
return None
def process_response(self, request, response):
self.clear_request_cache()
return response
| agpl-3.0 |
brocade/pysdn | samples/sampleopenflow/demos/demo32.py | 1 | 7216 | #!/usr/bin/python
# Copyright (c) 2015, BROCADE COMMUNICATIONS SYSTEMS, INC
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
"""
@authors: Sergei Garbuzov
@status: Development
@version: 1.1.0
"""
import time
from pysdn.controller.controller import Controller
from pysdn.controller.inventory import (Inventory,
OpenFlowCapableNode,
GroupFeatures)
from pysdn.common.utils import load_dict_from_file
from pysdn.common.status import STATUS
def of_demo_32():
f = "cfg.yml"
d = {}
if(load_dict_from_file(f, d) is False):
print("Config file '%s' read error: " % f)
exit(0)
try:
ctrlIpAddr = d['ctrlIpAddr']
ctrlPortNum = d['ctrlPortNum']
ctrlUname = d['ctrlUname']
ctrlPswd = d['ctrlPswd']
rundelay = d['rundelay']
except:
print ("Failed to get Controller device attributes")
exit(0)
openflow_nodes = []
print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
print ("<<< Demo 32 Start")
print ("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<")
print "\n"
ctrl = Controller(ctrlIpAddr, ctrlPortNum, ctrlUname, ctrlPswd)
print ("<<< Controller '%s:%s'" % (ctrlIpAddr, ctrlPortNum))
time.sleep(rundelay)
print "\n".strip()
print ("<<< Get OpenFlow switches information")
time.sleep(rundelay)
inv_obj = None
result = ctrl.build_inventory_object()
status = result.get_status()
if(status.eq(STATUS.OK)):
inv_obj = result.get_data()
assert(isinstance(inv_obj, Inventory))
else:
print ("\n")
print ("!!!Error, failed to obtain inventory info, reason: %s" %
status.brief().lower())
exit(0)
assert(inv_obj)
openflow_node_ids = inv_obj.get_openflow_node_ids()
for node_id in openflow_node_ids:
node = inv_obj.get_openflow_node(node_id)
assert(isinstance(node, OpenFlowCapableNode))
openflow_nodes.append(node)
print "\n".strip()
print ("<<< OpenFlow switches in the inventory store")
s1 = 'IP Address'
s2 = 'OpenFlow Id'
sym = '-'
print "\n".strip()
print " {0:<15} {1:<30}".format(s1, s2)
print " {0:<15} {1:<30}".format(sym * 15, sym * 30)
for node in openflow_nodes:
addr = node.get_ip_address()
node_id = node.get_id()
print " {0:<15} {1:<30}".format(addr, node_id)
print "\n".strip()
print ("<<< Get Group Features Information")
time.sleep(rundelay)
for node in openflow_nodes:
assert(isinstance(node, OpenFlowCapableNode))
print "\n".strip()
switch_id = node.get_id()
print (" Switch '%s'") % switch_id
print "\n".strip()
group_features = node.get_group_features()
assert(isinstance(group_features, GroupFeatures))
q = 2 # number of list items to be in a single chunk (output string)
s = 'Max groups'
alist = group_features.get_max_groups()
if alist:
chunks = [alist[x:x + q] for x in xrange(0, len(alist), q)]
print " %s :" % s,
for i in range(0, len(chunks)):
n = 0 if i == 0 else len(s) + 19
print "%s%s" % (" " * n, ", ".join(map(str, chunks[i])))
else:
print " %s : %s" % (s, "n/a")
s = 'Group types'
alist = group_features.get_types()
if alist:
chunks = [alist[x:x + q] for x in xrange(0, len(alist), q)]
print " %s :" % s,
for i in range(0, len(chunks)):
n = 0 if i == 0 else len(s) + 18
print "%s%s" % (" " * n, ", ".join(chunks[i]))
else:
print " %s : %s" % (s, "n/a")
s = 'Capabilities'
alist = group_features.get_capabilities()
if alist:
chunks = [alist[x:x + q] for x in xrange(0, len(alist), q)]
print " %s :" % s,
for i in range(0, len(chunks)):
n = 0 if i == 0 else len(s) + 17
print "%s%s" % (" " * n, ", ".join(chunks[i]))
else:
print " %s : %s" % (s, "n/a")
s = 'Actions'
actions = group_features.get_actions()
if actions:
print " %s :" % s,
for i, alist in enumerate(actions):
n = 0 if i == 0 else len(s) + 12
chunks = [alist[x:x + q] for x in xrange(0, len(alist), q)]
for j in range(0, len(chunks)):
n = 0 if i == 0 and j == 0 else len(s) + 22
print "%s%s" % (" " * n, ", ".join(chunks[j]))
print "\n".strip()
else:
print " %s : %s" % (s, "n/a")
print "\n".strip()
total_num = node.get_groups_total_num()
s = 'Num of groups'
print " %s : %s" % (s, total_num)
s = 'Group IDs'
alist = node.get_group_ids()
if alist:
chunks = [alist[x:x + q] for x in xrange(0, len(alist), q)]
print " %s :" % s,
for i in range(0, len(chunks)):
n = 0 if i == 0 else len(s) + 13
print "%s%s" % (" " * n, ", ".join(map(str, chunks[i])))
else:
print " %s : %s" % (s, "")
print ("\n")
print (">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
print (">>> Demo End")
print (">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>")
if __name__ == "__main__":
of_demo_32()
| bsd-3-clause |
iancmcc/txrestapi | txrestapi/tests.py | 1 | 6276 | import txrestapi
__package__="txrestapi"
import re
import os.path
import doctest
from six import PY2, b, u
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from twisted.web.resource import Resource, NoResource
from twisted.web.server import Request, Site
from twisted.web.client import getPage
from twisted.trial import unittest
from .resource import APIResource
from .methods import GET, PUT
class FakeChannel(object):
transport = None
def getRequest(method, url):
req = Request(FakeChannel(), None)
req.method = method
req.path = url
return req
class APIResourceTest(unittest.TestCase):
def test_returns_normal_resources(self):
r = APIResource()
a = Resource()
r.putChild(b('a'), a)
req = Request(FakeChannel(), None)
a_ = r.getChild(b('a'), req)
self.assertEqual(a, a_)
def test_registry(self):
compiled = re.compile(b('regex'))
r = APIResource()
r.register(b('GET'), b('regex'), None)
self.assertEqual([x[0] for x in r._registry], [b('GET')])
self.assertEqual(r._registry[0], (b('GET'), compiled, None))
def test_method_matching(self):
r = APIResource()
r.register(b('GET'), b('regex'), 1)
r.register(b('PUT'), b('regex'), 2)
r.register(b('GET'), b('another'), 3)
req = getRequest(b('GET'), b('regex'))
result = r._get_callback(req)
self.assert_(result)
self.assertEqual(result[0], 1)
req = getRequest(b('PUT'), b('regex'))
result = r._get_callback(req)
self.assert_(result)
self.assertEqual(result[0], 2)
req = getRequest(b('GET'), b('another'))
result = r._get_callback(req)
self.assert_(result)
self.assertEqual(result[0], 3)
req = getRequest(b('PUT'), b('another'))
result = r._get_callback(req)
self.assertEqual(result, (None, None))
def test_callback(self):
marker = object()
def cb(request):
return marker
r = APIResource()
r.register(b('GET'), b('regex'), cb)
req = getRequest(b('GET'), b('regex'))
result = r.getChild(b('regex'), req)
self.assertEqual(result.render(req), marker)
def test_longerpath(self):
marker = object()
r = APIResource()
def cb(request):
return marker
r.register(b('GET'), b('/regex/a/b/c'), cb)
req = getRequest(b('GET'), b('/regex/a/b/c'))
result = r.getChild(b('regex'), req)
self.assertEqual(result.render(req), marker)
def test_args(self):
r = APIResource()
def cb(request, **kwargs):
return kwargs
r.register(b('GET'), b('/(?P<a>[^/]*)/a/(?P<b>[^/]*)/c'), cb)
req = getRequest(b('GET'), b('/regex/a/b/c'))
result = r.getChild(b('regex'), req)
self.assertEqual(sorted(result.render(req).keys()), ['a', 'b'])
def test_order(self):
r = APIResource()
def cb1(request, **kwargs):
kwargs.update({'cb1':True})
return kwargs
def cb(request, **kwargs):
return kwargs
# Register two regexes that will match
r.register(b('GET'), b('/(?P<a>[^/]*)/a/(?P<b>[^/]*)/c'), cb1)
r.register(b('GET'), b('/(?P<a>[^/]*)/a/(?P<b>[^/]*)'), cb)
req = getRequest(b('GET'), b('/regex/a/b/c'))
result = r.getChild(b('regex'), req)
# Make sure the first one got it
self.assert_('cb1' in result.render(req))
def test_no_resource(self):
r = APIResource()
r.register(b('GET'), b('^/(?P<a>[^/]*)/a/(?P<b>[^/]*)$'), None)
req = getRequest(b('GET'), b('/definitely/not/a/match'))
result = r.getChild(b('regex'), req)
self.assert_(isinstance(result, NoResource))
def test_all(self):
r = APIResource()
def get_cb(r): return b('GET')
def put_cb(r): return b('PUT')
def all_cb(r): return b('ALL')
r.register(b('GET'), b('^path'), get_cb)
r.register(b('ALL'), b('^path'), all_cb)
r.register(b('PUT'), b('^path'), put_cb)
# Test that the ALL registration picks it up before the PUT one
for method in (b('GET'), b('PUT'), b('ALL')):
req = getRequest(method, b('path'))
result = r.getChild(b('path'), req)
self.assertEqual(result.render(req), b('ALL') if method==b('PUT') else method)
class TestResource(Resource):
isLeaf = True
def render(self, request):
return b('aresource')
class TestAPI(APIResource):
@GET(b('^/(?P<a>test[^/]*)/?'))
def _on_test_get(self, request, a):
return b('GET %s') % a
@PUT(b('^/(?P<a>test[^/]*)/?'))
def _on_test_put(self, request, a):
return b('PUT %s') % a
@GET(b('^/gettest'))
def _on_gettest(self, request):
return TestResource()
class DecoratorsTest(unittest.TestCase):
def _listen(self, site):
return reactor.listenTCP(0, site, interface="127.0.0.1")
def setUp(self):
r = TestAPI()
site = Site(r, timeout=None)
self.port = self._listen(site)
self.portno = self.port.getHost().port
def tearDown(self):
return self.port.stopListening()
def getURL(self, path):
return b("http://127.0.0.1:%d/%s" % (self.portno, path))
@inlineCallbacks
def test_get(self):
url = self.getURL('test_thing/')
result = yield getPage(url, method=b('GET'))
self.assertEqual(result, b('GET test_thing'))
@inlineCallbacks
def test_put(self):
url = self.getURL('test_thing/')
result = yield getPage(url, method=b('PUT'))
self.assertEqual(result, b('PUT test_thing'))
@inlineCallbacks
def test_resource_wrapper(self):
url = self.getURL('gettest')
result = yield getPage(url, method=b('GET'))
self.assertEqual(result, b('aresource'))
def test_suite():
import unittest as ut
suite = unittest.TestSuite()
suite.addTest(ut.makeSuite(DecoratorsTest))
suite.addTest(ut.makeSuite(APIResourceTest))
if PY2:
suite.addTest(doctest.DocFileSuite(os.path.join('..', 'README.rst')))
return suite
| mit |
fernandezcuesta/ansible | lib/ansible/modules/notification/hipchat.py | 8 | 6387 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: hipchat
version_added: "1.2"
short_description: Send a message to Hipchat.
description:
- Send a message to a Hipchat room, with options to control the formatting.
options:
token:
description:
- API token.
required: true
room:
description:
- ID or name of the room.
required: true
from:
description:
- Name the message will appear to be sent from. Max length is 15
characters - above this it will be truncated.
required: false
default: Ansible
msg:
description:
- The message body.
required: true
default: null
color:
description:
- Background color for the message.
required: false
default: yellow
choices: [ "yellow", "red", "green", "purple", "gray", "random" ]
msg_format:
description:
- Message format.
required: false
default: text
choices: [ "text", "html" ]
notify:
description:
- If true, a notification will be triggered for users in the room.
required: false
default: 'yes'
choices: [ "yes", "no" ]
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.5.1
api:
description:
- API url if using a self-hosted hipchat server. For Hipchat API version
2 use the default URI with C(/v2) instead of C(/v1).
required: false
default: 'https://api.hipchat.com/v1'
version_added: 1.6.0
requirements: [ ]
author: "WAKAYAMA Shirou (@shirou), BOURDEL Paul (@pb8226)"
'''
EXAMPLES = '''
- hipchat:
room: notif
msg: Ansible task finished
# Use Hipchat API version 2
- hipchat:
api: https://api.hipchat.com/v2/
token: OAUTH2_TOKEN
room: notify
msg: Ansible task finished
'''
# ===========================================
# HipChat module specific support methods.
#
import json
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils.six.moves.urllib.request import pathname2url
from ansible.module_utils._text import to_native
from ansible.module_utils.urls import fetch_url
DEFAULT_URI = "https://api.hipchat.com/v1"
MSG_URI_V1 = "/rooms/message"
NOTIFY_URI_V2 = "/room/{id_or_name}/notification"
def send_msg_v1(module, token, room, msg_from, msg, msg_format='text',
color='yellow', notify=False, api=MSG_URI_V1):
'''sending message to hipchat v1 server'''
params = {}
params['room_id'] = room
params['from'] = msg_from[:15] # max length is 15
params['message'] = msg
params['message_format'] = msg_format
params['color'] = color
params['api'] = api
params['notify'] = int(notify)
url = api + MSG_URI_V1 + "?auth_token=%s" % (token)
data = urlencode(params)
if module.check_mode:
# In check mode, exit before actually sending the message
module.exit_json(changed=False)
response, info = fetch_url(module, url, data=data)
if info['status'] == 200:
return response.read()
else:
module.fail_json(msg="failed to send message, return status=%s" % str(info['status']))
def send_msg_v2(module, token, room, msg_from, msg, msg_format='text',
color='yellow', notify=False, api=NOTIFY_URI_V2):
'''sending message to hipchat v2 server'''
headers = {'Authorization': 'Bearer %s' % token, 'Content-Type': 'application/json'}
body = dict()
body['message'] = msg
body['color'] = color
body['message_format'] = msg_format
body['notify'] = notify
POST_URL = api + NOTIFY_URI_V2
url = POST_URL.replace('{id_or_name}', pathname2url(room))
data = json.dumps(body)
if module.check_mode:
# In check mode, exit before actually sending the message
module.exit_json(changed=False)
response, info = fetch_url(module, url, data=data, headers=headers, method='POST')
# https://www.hipchat.com/docs/apiv2/method/send_room_notification shows
# 204 to be the expected result code.
if info['status'] in [200, 204]:
return response.read()
else:
module.fail_json(msg="failed to send message, return status=%s" % str(info['status']))
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
token=dict(required=True, no_log=True),
room=dict(required=True),
msg=dict(required=True),
msg_from=dict(default="Ansible", aliases=['from']),
color=dict(default="yellow", choices=["yellow", "red", "green",
"purple", "gray", "random"]),
msg_format=dict(default="text", choices=["text", "html"]),
notify=dict(default=True, type='bool'),
validate_certs=dict(default='yes', type='bool'),
api=dict(default=DEFAULT_URI),
),
supports_check_mode=True
)
token = module.params["token"]
room = str(module.params["room"])
msg = module.params["msg"]
msg_from = module.params["msg_from"]
color = module.params["color"]
msg_format = module.params["msg_format"]
notify = module.params["notify"]
api = module.params["api"]
try:
if api.find('/v2') != -1:
send_msg_v2(module, token, room, msg_from, msg, msg_format, color, notify, api)
else:
send_msg_v1(module, token, room, msg_from, msg, msg_format, color, notify, api)
except Exception as e:
module.fail_json(msg="unable to send msg: %s" % to_native(e), exception=traceback.format_exc())
changed = True
module.exit_json(changed=changed, room=room, msg_from=msg_from, msg=msg)
if __name__ == '__main__':
main()
| gpl-3.0 |
apache/airflow | tests/api_connexion/endpoints/test_pool_endpoint.py | 3 | 21313 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from parameterized import parameterized
from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP
from airflow.models.pool import Pool
from airflow.security import permissions
from airflow.utils.session import provide_session
from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user
from tests.test_utils.config import conf_vars
from tests.test_utils.db import clear_db_pools
@pytest.fixture(scope="module")
def configured_app(minimal_app_for_api):
app = minimal_app_for_api
create_user(
app, # type: ignore
username="test",
role_name="Test",
permissions=[
(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_POOL),
(permissions.ACTION_CAN_READ, permissions.RESOURCE_POOL),
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_POOL),
(permissions.ACTION_CAN_DELETE, permissions.RESOURCE_POOL),
],
)
create_user(app, username="test_no_permissions", role_name="TestNoPermissions") # type: ignore
yield app
delete_user(app, username="test") # type: ignore
delete_user(app, username="test_no_permissions") # type: ignore
class TestBasePoolEndpoints:
@pytest.fixture(autouse=True)
def setup_attrs(self, configured_app) -> None:
self.app = configured_app
self.client = self.app.test_client() # type:ignore
clear_db_pools()
def teardown_method(self) -> None:
clear_db_pools()
class TestGetPools(TestBasePoolEndpoints):
def test_response_200(self, session):
pool_model = Pool(pool="test_pool_a", slots=3)
session.add(pool_model)
session.commit()
result = session.query(Pool).all()
assert len(result) == 2 # accounts for the default pool as well
response = self.client.get("/api/v1/pools", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert {
"pools": [
{
"name": "default_pool",
"slots": 128,
"occupied_slots": 0,
"running_slots": 0,
"queued_slots": 0,
"open_slots": 128,
},
{
"name": "test_pool_a",
"slots": 3,
"occupied_slots": 0,
"running_slots": 0,
"queued_slots": 0,
"open_slots": 3,
},
],
"total_entries": 2,
} == response.json
def test_response_200_with_order_by(self, session):
pool_model = Pool(pool="test_pool_a", slots=3)
session.add(pool_model)
session.commit()
result = session.query(Pool).all()
assert len(result) == 2 # accounts for the default pool as well
response = self.client.get("/api/v1/pools?order_by=slots", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert {
"pools": [
{
"name": "test_pool_a",
"slots": 3,
"occupied_slots": 0,
"running_slots": 0,
"queued_slots": 0,
"open_slots": 3,
},
{
"name": "default_pool",
"slots": 128,
"occupied_slots": 0,
"running_slots": 0,
"queued_slots": 0,
"open_slots": 128,
},
],
"total_entries": 2,
} == response.json
def test_should_raises_401_unauthenticated(self):
response = self.client.get("/api/v1/pools")
assert_401(response)
def test_should_raise_403_forbidden(self):
response = self.client.get("/api/v1/pools", environ_overrides={'REMOTE_USER': "test_no_permissions"})
assert response.status_code == 403
class TestGetPoolsPagination(TestBasePoolEndpoints):
@parameterized.expand(
[
# Offset test data
("/api/v1/pools?offset=1", [f"test_pool{i}" for i in range(1, 101)]),
("/api/v1/pools?offset=3", [f"test_pool{i}" for i in range(3, 103)]),
# Limit test data
("/api/v1/pools?limit=2", ["default_pool", "test_pool1"]),
("/api/v1/pools?limit=1", ["default_pool"]),
# Limit and offset test data
(
"/api/v1/pools?limit=100&offset=1",
[f"test_pool{i}" for i in range(1, 101)],
),
("/api/v1/pools?limit=2&offset=1", ["test_pool1", "test_pool2"]),
(
"/api/v1/pools?limit=3&offset=2",
["test_pool2", "test_pool3", "test_pool4"],
),
]
)
@provide_session
def test_limit_and_offset(self, url, expected_pool_ids, session):
pools = [Pool(pool=f"test_pool{i}", slots=1) for i in range(1, 121)]
session.add_all(pools)
session.commit()
result = session.query(Pool).count()
assert result == 121 # accounts for default pool as well
response = self.client.get(url, environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
pool_ids = [pool["name"] for pool in response.json["pools"]]
assert pool_ids == expected_pool_ids
def test_should_respect_page_size_limit_default(self, session):
pools = [Pool(pool=f"test_pool{i}", slots=1) for i in range(1, 121)]
session.add_all(pools)
session.commit()
result = session.query(Pool).count()
assert result == 121
response = self.client.get("/api/v1/pools", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert len(response.json['pools']) == 100
def test_should_raise_400_for_invalid_orderby(self, session):
pools = [Pool(pool=f"test_pool{i}", slots=1) for i in range(1, 121)]
session.add_all(pools)
session.commit()
result = session.query(Pool).count()
assert result == 121
response = self.client.get(
"/api/v1/pools?order_by=open_slots", environ_overrides={'REMOTE_USER': "test"}
)
assert response.status_code == 400
msg = "Ordering with 'open_slots' is disallowed or the attribute does not exist on the model"
assert response.json['detail'] == msg
@conf_vars({("api", "maximum_page_limit"): "150"})
def test_should_return_conf_max_if_req_max_above_conf(self, session):
pools = [Pool(pool=f"test_pool{i}", slots=1) for i in range(1, 200)]
session.add_all(pools)
session.commit()
result = session.query(Pool).count()
assert result == 200
response = self.client.get("/api/v1/pools?limit=180", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert len(response.json['pools']) == 150
class TestGetPool(TestBasePoolEndpoints):
def test_response_200(self, session):
pool_model = Pool(pool="test_pool_a", slots=3)
session.add(pool_model)
session.commit()
response = self.client.get("/api/v1/pools/test_pool_a", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert {
"name": "test_pool_a",
"slots": 3,
"occupied_slots": 0,
"running_slots": 0,
"queued_slots": 0,
"open_slots": 3,
} == response.json
def test_response_404(self):
response = self.client.get("/api/v1/pools/invalid_pool", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 404
assert {
"detail": "Pool with name:'invalid_pool' not found",
"status": 404,
"title": "Not Found",
"type": EXCEPTIONS_LINK_MAP[404],
} == response.json
def test_should_raises_401_unauthenticated(self):
response = self.client.get("/api/v1/pools/default_pool")
assert_401(response)
class TestDeletePool(TestBasePoolEndpoints):
def test_response_204(self, session):
pool_name = "test_pool"
pool_instance = Pool(pool=pool_name, slots=3)
session.add(pool_instance)
session.commit()
response = self.client.delete(f"api/v1/pools/{pool_name}", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 204
# Check if the pool is deleted from the db
response = self.client.get(f"api/v1/pools/{pool_name}", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 404
def test_response_404(self):
response = self.client.delete("api/v1/pools/invalid_pool", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 404
assert {
"detail": "Pool with name:'invalid_pool' not found",
"status": 404,
"title": "Not Found",
"type": EXCEPTIONS_LINK_MAP[404],
} == response.json
def test_should_raises_401_unauthenticated(self, session):
pool_name = "test_pool"
pool_instance = Pool(pool=pool_name, slots=3)
session.add(pool_instance)
session.commit()
response = self.client.delete(f"api/v1/pools/{pool_name}")
assert_401(response)
# Should still exists
response = self.client.get(f"/api/v1/pools/{pool_name}", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
class TestPostPool(TestBasePoolEndpoints):
def test_response_200(self):
response = self.client.post(
"api/v1/pools",
json={"name": "test_pool_a", "slots": 3},
environ_overrides={'REMOTE_USER': "test"},
)
assert response.status_code == 200
assert {
"name": "test_pool_a",
"slots": 3,
"occupied_slots": 0,
"running_slots": 0,
"queued_slots": 0,
"open_slots": 3,
} == response.json
def test_response_409(self, session):
pool_name = "test_pool_a"
pool_instance = Pool(pool=pool_name, slots=3)
session.add(pool_instance)
session.commit()
response = self.client.post(
"api/v1/pools",
json={"name": "test_pool_a", "slots": 3},
environ_overrides={'REMOTE_USER': "test"},
)
assert response.status_code == 409
assert {
"detail": f"Pool: {pool_name} already exists",
"status": 409,
"title": "Conflict",
"type": EXCEPTIONS_LINK_MAP[409],
} == response.json
@parameterized.expand(
[
(
"for missing pool name",
{"slots": 3},
"Missing required property(ies): ['name']",
),
(
"for missing slots",
{"name": "invalid_pool"},
"Missing required property(ies): ['slots']",
),
(
"for missing pool name AND slots",
{},
"Missing required property(ies): ['name', 'slots']",
),
(
"for extra fields",
{"name": "invalid_pool", "slots": 3, "extra_field_1": "extra"},
"{'extra_field_1': ['Unknown field.']}",
),
]
)
def test_response_400(self, name, request_json, error_detail):
del name
response = self.client.post(
"api/v1/pools", json=request_json, environ_overrides={'REMOTE_USER': "test"}
)
assert response.status_code == 400
assert {
"detail": error_detail,
"status": 400,
"title": "Bad Request",
"type": EXCEPTIONS_LINK_MAP[400],
} == response.json
def test_should_raises_401_unauthenticated(self):
response = self.client.post("api/v1/pools", json={"name": "test_pool_a", "slots": 3})
assert_401(response)
class TestPatchPool(TestBasePoolEndpoints):
def test_response_200(self, session):
pool = Pool(pool="test_pool", slots=2)
session.add(pool)
session.commit()
response = self.client.patch(
"api/v1/pools/test_pool",
json={"name": "test_pool_a", "slots": 3},
environ_overrides={'REMOTE_USER': "test"},
)
assert response.status_code == 200
assert {
"occupied_slots": 0,
"queued_slots": 0,
"name": "test_pool_a",
"open_slots": 3,
"running_slots": 0,
"slots": 3,
} == response.json
@parameterized.expand(
[
# Missing properties
("Missing required property(ies): ['name']", {"slots": 3}),
("Missing required property(ies): ['slots']", {"name": "test_pool_a"}),
("Missing required property(ies): ['name', 'slots']", {}),
# Extra properties
(
"{'extra_field': ['Unknown field.']}",
{"name": "test_pool_a", "slots": 3, "extra_field": "extra"},
),
]
)
@provide_session
def test_response_400(self, error_detail, request_json, session):
pool = Pool(pool="test_pool", slots=2)
session.add(pool)
session.commit()
response = self.client.patch(
"api/v1/pools/test_pool", json=request_json, environ_overrides={'REMOTE_USER': "test"}
)
assert response.status_code == 400
assert {
"detail": error_detail,
"status": 400,
"title": "Bad Request",
"type": EXCEPTIONS_LINK_MAP[400],
} == response.json
def test_should_raises_401_unauthenticated(self, session):
pool = Pool(pool="test_pool", slots=2)
session.add(pool)
session.commit()
response = self.client.patch(
"api/v1/pools/test_pool",
json={"name": "test_pool_a", "slots": 3},
)
assert_401(response)
class TestModifyDefaultPool(TestBasePoolEndpoints):
def test_delete_400(self):
response = self.client.delete("api/v1/pools/default_pool", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 400
assert {
"detail": "Default Pool can't be deleted",
"status": 400,
"title": "Bad Request",
"type": EXCEPTIONS_LINK_MAP[400],
} == response.json
@parameterized.expand(
[
(
"400 No update mask",
400,
"api/v1/pools/default_pool",
{"name": "test_pool_a", "slots": 3},
{
"detail": "Default Pool's name can't be modified",
"status": 400,
"title": "Bad Request",
"type": EXCEPTIONS_LINK_MAP[400],
},
),
(
"400 Update mask with both fields",
400,
"api/v1/pools/default_pool?update_mask=name, slots",
{"name": "test_pool_a", "slots": 3},
{
"detail": "Default Pool's name can't be modified",
"status": 400,
"title": "Bad Request",
"type": EXCEPTIONS_LINK_MAP[400],
},
),
(
"200 Update mask with slots",
200,
"api/v1/pools/default_pool?update_mask=slots",
{"name": "test_pool_a", "slots": 3},
{
"occupied_slots": 0,
"queued_slots": 0,
"name": "default_pool",
"open_slots": 3,
"running_slots": 0,
"slots": 3,
},
),
(
"200 Update mask with slots and name",
200,
"api/v1/pools/default_pool?update_mask=name,slots",
{"name": "default_pool", "slots": 3},
{
"occupied_slots": 0,
"queued_slots": 0,
"name": "default_pool",
"open_slots": 3,
"running_slots": 0,
"slots": 3,
},
),
(
"200 no update mask",
200,
"api/v1/pools/default_pool",
{"name": "default_pool", "slots": 3},
{
"occupied_slots": 0,
"queued_slots": 0,
"name": "default_pool",
"open_slots": 3,
"running_slots": 0,
"slots": 3,
},
),
]
)
def test_patch(self, name, status_code, url, json, expected_response):
del name
response = self.client.patch(url, json=json, environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == status_code
assert response.json == expected_response
class TestPatchPoolWithUpdateMask(TestBasePoolEndpoints):
@parameterized.expand(
[
(
"api/v1/pools/test_pool?update_mask=name, slots",
{"name": "test_pool_a", "slots": 2},
"test_pool_a",
2,
),
(
"api/v1/pools/test_pool?update_mask=name",
{"name": "test_pool_a", "slots": 2},
"test_pool_a",
3,
),
(
"api/v1/pools/test_pool?update_mask=slots",
{"name": "test_pool_a", "slots": 2},
"test_pool",
2,
),
(
"api/v1/pools/test_pool?update_mask=slots",
{"slots": 2},
"test_pool",
2,
),
]
)
@provide_session
def test_response_200(self, url, patch_json, expected_name, expected_slots, session):
pool = Pool(pool="test_pool", slots=3)
session.add(pool)
session.commit()
response = self.client.patch(url, json=patch_json, environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
assert {
"name": expected_name,
"slots": expected_slots,
"occupied_slots": 0,
"running_slots": 0,
"queued_slots": 0,
"open_slots": expected_slots,
} == response.json
@parameterized.expand(
[
(
"Patching read only field",
"Property is read-only - 'occupied_slots'",
"api/v1/pools/test_pool?update_mask=slots, name, occupied_slots",
{"name": "test_pool_a", "slots": 2, "occupied_slots": 1},
),
(
"Patching read only field",
"Property is read-only - 'queued_slots'",
"api/v1/pools/test_pool?update_mask=slots, name, queued_slots",
{"name": "test_pool_a", "slots": 2, "queued_slots": 1},
),
(
"Invalid update mask",
"Invalid field: names in update mask",
"api/v1/pools/test_pool?update_mask=slots, names,",
{"name": "test_pool_a", "slots": 2},
),
(
"Invalid update mask",
"Invalid field: slot in update mask",
"api/v1/pools/test_pool?update_mask=slot, name,",
{"name": "test_pool_a", "slots": 2},
),
]
)
@provide_session
def test_response_400(self, name, error_detail, url, patch_json, session):
del name
pool = Pool(pool="test_pool", slots=3)
session.add(pool)
session.commit()
response = self.client.patch(url, json=patch_json, environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 400
assert {
"detail": error_detail,
"status": 400,
"title": "Bad Request",
"type": EXCEPTIONS_LINK_MAP[400],
} == response.json
| apache-2.0 |
zstackorg/zstack-woodpecker | integrationtest/vm/virtualrouter/pf/test_pf_qos.py | 2 | 3086 | '''
New Integration Test for pf vip qos.
@author: chenyuanxu
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.zstack_test.zstack_test_port_forwarding as zstack_pf_header
import apibinding.inventory as inventory
import zstackwoodpecker.operations.net_operations as net_ops
import zstacklib.utils.ssh as ssh
import time
import subprocess
import os
PfRule = test_state.PfRule
Port = test_state.Port
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
def exec_cmd_in_vm(vm, cmd, fail_msg):
ret, output, stderr = ssh.execute(cmd, vm.get_vm().vmNics[0].ip, "root", "password", False, 22)
if ret != 0:
test_util.test_fail(fail_msg)
return output
def test():
vip_bandwidth = 1*1024
pf_vm1 = test_stub.create_dnat_vm()
test_obj_dict.add_vm(pf_vm1)
pf_vm2 = test_stub.create_vlan_sg_vm()
test_obj_dict.add_vm(pf_vm2)
l3_name = os.environ.get('l3VlanNetworkName1')
vr1 = test_stub.create_vr_vm(test_obj_dict, l3_name)
vr1_pub_ip = test_lib.lib_find_vr_pub_ip(vr1)
pf_vm1.check()
pf_vm2.check()
vm1_inv=pf_vm1.get_vm()
vm2_inv=pf_vm2.get_vm()
vm_nic1 = pf_vm1.vm.vmNics[0]
vm_nic_uuid1 = vm_nic1.uuid
pri_l3_uuid = vm_nic1.l3NetworkUuid
vr = test_lib.lib_find_vr_by_l3_uuid(pri_l3_uuid)[0]
vr_pub_nic = test_lib.lib_find_vr_pub_nic(vr)
l3_uuid = vr_pub_nic.l3NetworkUuid
vip = test_stub.create_vip('pf_attach_test', l3_uuid)
test_obj_dict.add_vip(vip)
vip_uuid = vip.get_vip().uuid
vip_ip = vip.get_vip().ip
#pf_creation_opt = PfRule.generate_pf_rule_option(vr1_pub_ip, protocol=inventory.TCP, vip_target_rule=Port.rule1_ports, private_target_rule=Port.rule1_ports)
pf_creation_opt1 = PfRule.generate_pf_rule_option(vr1_pub_ip, protocol=inventory.TCP, vip_target_rule=Port.rule4_ports, private_target_rule=Port.rule4_ports, vip_uuid=vip_uuid)
test_pf1 = zstack_pf_header.ZstackTestPortForwarding()
test_pf1.set_creation_option(pf_creation_opt1)
test_pf1.create()
vip.attach_pf(test_pf1)
pf_vm1.check()
test_pf1.attach(vm_nic_uuid1, pf_vm1)
vip_qos = net_ops.set_vip_qos(vip_uuid=vip_uuid, inboundBandwidth=vip_bandwidth*8*1024, outboundBandwidth=vip_bandwidth*8*1024)
test_stub.make_ssh_no_password(vm1_inv)
test_stub.make_ssh_no_password(vm2_inv)
test_stub.install_iperf(vm1_inv)
test_stub.install_iperf(vm2_inv)
iptables_cmd = "iptables -F"
exec_cmd_in_vm(pf_vm1, iptables_cmd, "Failed to clean iptables.")
test_stub.test_iperf_bandwidth(vm1_inv,vm2_inv,vip_ip,20502,20502,vip_bandwidth)
vip.delete()
test_obj_dict.rm_vip(vip)
pf_vm1.destroy()
test_obj_dict.rm_vm(pf_vm1)
pf_vm2.destroy()
test_obj_dict.rm_vm(pf_vm2)
test_util.test_pass("Test Port Forwarding Vip Qos Successfully.")
#Will be called only if exception happens in test().
def error_cleanup():
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
| apache-2.0 |
i5o/sugar | src/jarabe/model/neighborhood.py | 9 | 45287 | # Copyright (C) 2010 Collabora Ltd. <http://www.collabora.co.uk/>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from functools import partial
from hashlib import sha1
from gi.repository import GObject
from gi.repository import Gio
import dbus
from dbus import PROPERTIES_IFACE
from telepathy.interfaces import ACCOUNT, \
ACCOUNT_MANAGER, \
CHANNEL, \
CHANNEL_INTERFACE_GROUP, \
CHANNEL_TYPE_CONTACT_LIST, \
CHANNEL_TYPE_FILE_TRANSFER, \
CLIENT, \
CONNECTION, \
CONNECTION_INTERFACE_ALIASING, \
CONNECTION_INTERFACE_CONTACTS, \
CONNECTION_INTERFACE_CONTACT_CAPABILITIES, \
CONNECTION_INTERFACE_REQUESTS, \
CONNECTION_INTERFACE_SIMPLE_PRESENCE
from telepathy.constants import HANDLE_TYPE_CONTACT, \
HANDLE_TYPE_LIST, \
CONNECTION_PRESENCE_TYPE_OFFLINE, \
CONNECTION_STATUS_CONNECTED, \
CONNECTION_STATUS_DISCONNECTED
from telepathy.client import Connection, Channel
from sugar3.graphics.xocolor import XoColor
from sugar3.profile import get_profile
from jarabe.model.buddy import BuddyModel, get_owner_instance
from jarabe.model import bundleregistry
from jarabe.model import shell
ACCOUNT_MANAGER_SERVICE = 'org.freedesktop.Telepathy.AccountManager'
ACCOUNT_MANAGER_PATH = '/org/freedesktop/Telepathy/AccountManager'
CHANNEL_DISPATCHER_SERVICE = 'org.freedesktop.Telepathy.ChannelDispatcher'
CHANNEL_DISPATCHER_PATH = '/org/freedesktop/Telepathy/ChannelDispatcher'
SUGAR_CLIENT_SERVICE = 'org.freedesktop.Telepathy.Client.Sugar'
SUGAR_CLIENT_PATH = '/org/freedesktop/Telepathy/Client/Sugar'
CONNECTION_INTERFACE_BUDDY_INFO = 'org.laptop.Telepathy.BuddyInfo'
CONNECTION_INTERFACE_ACTIVITY_PROPERTIES = \
'org.laptop.Telepathy.ActivityProperties'
_QUERY_DBUS_TIMEOUT = 200
"""
Time in seconds to wait when querying contact properties. Some jabber servers
will be very slow in returning these queries, so just be patient.
"""
_model = None
class ActivityModel(GObject.GObject):
__gsignals__ = {
'current-buddy-added': (GObject.SignalFlags.RUN_FIRST, None,
([object])),
'current-buddy-removed': (GObject.SignalFlags.RUN_FIRST, None,
([object])),
'buddy-added': (GObject.SignalFlags.RUN_FIRST, None,
([object])),
'buddy-removed': (GObject.SignalFlags.RUN_FIRST, None,
([object])),
}
def __init__(self, activity_id, room_handle):
GObject.GObject.__init__(self)
self.activity_id = activity_id
self.room_handle = room_handle
self._bundle = None
self._color = None
self._private = True
self._name = None
self._current_buddies = []
self._buddies = []
def get_color(self):
return self._color
def set_color(self, color):
self._color = color
color = GObject.property(type=object, getter=get_color, setter=set_color)
def get_bundle(self):
return self._bundle
def set_bundle(self, bundle):
self._bundle = bundle
bundle = GObject.property(type=object, getter=get_bundle,
setter=set_bundle)
def get_name(self):
return self._name
def set_name(self, name):
self._name = name
name = GObject.property(type=object, getter=get_name, setter=set_name)
def is_private(self):
return self._private
def set_private(self, private):
self._private = private
private = GObject.property(type=object, getter=is_private,
setter=set_private)
def get_buddies(self):
return self._buddies
def add_buddy(self, buddy):
self._buddies.append(buddy)
self.notify('buddies')
self.emit('buddy-added', buddy)
def remove_buddy(self, buddy):
self._buddies.remove(buddy)
self.notify('buddies')
self.emit('buddy-removed', buddy)
buddies = GObject.property(type=object, getter=get_buddies)
def get_current_buddies(self):
return self._current_buddies
def add_current_buddy(self, buddy):
self._current_buddies.append(buddy)
self.notify('current-buddies')
self.emit('current-buddy-added', buddy)
def remove_current_buddy(self, buddy):
self._current_buddies.remove(buddy)
self.notify('current-buddies')
self.emit('current-buddy-removed', buddy)
current_buddies = GObject.property(type=object, getter=get_current_buddies)
class _Account(GObject.GObject):
__gsignals__ = {
'activity-added': (GObject.SignalFlags.RUN_FIRST, None,
([object, object])),
'activity-updated': (GObject.SignalFlags.RUN_FIRST, None,
([object, object])),
'activity-removed': (GObject.SignalFlags.RUN_FIRST, None,
([object])),
'buddy-added': (GObject.SignalFlags.RUN_FIRST, None,
([object, object, object])),
'buddy-updated': (GObject.SignalFlags.RUN_FIRST, None,
([object, object])),
'buddy-removed': (GObject.SignalFlags.RUN_FIRST, None,
([object])),
'buddy-joined-activity': (GObject.SignalFlags.RUN_FIRST, None,
([object, object])),
'buddy-left-activity': (GObject.SignalFlags.RUN_FIRST, None,
([object, object])),
'current-activity-updated': (GObject.SignalFlags.RUN_FIRST,
None, ([object, object])),
'connected': (GObject.SignalFlags.RUN_FIRST, None, ([])),
'disconnected': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self, account_path):
GObject.GObject.__init__(self)
self.object_path = account_path
self._connection = None
self._buddy_handles = {}
self._activity_handles = {}
self._self_handle = None
self._buddies_per_activity = {}
self._activities_per_buddy = {}
self._home_changed_hid = None
self._start_listening()
def _close_connection(self):
self._connection = None
if self._home_changed_hid is not None:
model = shell.get_model()
model.disconnect(self._home_changed_hid)
self._home_changed_hid = None
def _start_listening(self):
bus = dbus.Bus()
obj = bus.get_object(ACCOUNT_MANAGER_SERVICE, self.object_path)
obj.Get(ACCOUNT, 'Connection',
reply_handler=self.__got_connection_cb,
error_handler=partial(self.__error_handler_cb,
'Account.GetConnection'))
obj.connect_to_signal(
'AccountPropertyChanged', self.__account_property_changed_cb)
def __error_handler_cb(self, function_name, error):
raise RuntimeError('Error when calling %s: %s' % (function_name,
error))
def __got_connection_cb(self, connection_path):
logging.debug('_Account.__got_connection_cb %r', connection_path)
if connection_path == '/':
self._check_registration_error()
return
self._prepare_connection(connection_path)
def _check_registration_error(self):
"""
See if a previous connection attempt failed and we need to unset
the register flag.
"""
bus = dbus.Bus()
obj = bus.get_object(ACCOUNT_MANAGER_SERVICE, self.object_path)
obj.Get(ACCOUNT, 'ConnectionError',
reply_handler=self.__got_connection_error_cb,
error_handler=partial(self.__error_handler_cb,
'Account.GetConnectionError'))
def __got_connection_error_cb(self, error):
logging.debug('_Account.__got_connection_error_cb %r', error)
if error == 'org.freedesktop.Telepathy.Error.RegistrationExists':
bus = dbus.Bus()
obj = bus.get_object(ACCOUNT_MANAGER_SERVICE, self.object_path)
obj.UpdateParameters({'register': False}, [],
dbus_interface=ACCOUNT)
def __account_property_changed_cb(self, properties):
logging.debug('_Account.__account_property_changed_cb %r %r %r',
self.object_path, properties.get('Connection', None),
self._connection)
if 'Connection' not in properties:
return
if properties['Connection'] == '/':
self._check_registration_error()
self._close_connection()
elif self._connection is None:
self._prepare_connection(properties['Connection'])
def _prepare_connection(self, connection_path):
connection_name = connection_path.replace('/', '.')[1:]
self._connection = Connection(connection_name, connection_path,
ready_handler=self.__connection_ready_cb)
def __connection_ready_cb(self, connection):
logging.debug('_Account.__connection_ready_cb %r',
connection.object_path)
connection.connect_to_signal('StatusChanged',
self.__status_changed_cb)
connection[PROPERTIES_IFACE].Get(CONNECTION,
'Status',
reply_handler=self.__get_status_cb,
error_handler=partial(
self.__error_handler_cb,
'Connection.GetStatus'))
def __get_status_cb(self, status):
logging.debug('_Account.__get_status_cb %r %r',
self._connection.object_path, status)
self._update_status(status)
def __status_changed_cb(self, status, reason):
logging.debug('_Account.__status_changed_cb %r %r', status, reason)
self._update_status(status)
def _update_status(self, status):
if status == CONNECTION_STATUS_CONNECTED:
self._connection[PROPERTIES_IFACE].Get(
CONNECTION,
'SelfHandle',
reply_handler=self.__get_self_handle_cb,
error_handler=partial(
self.__error_handler_cb,
'Connection.GetSelfHandle'))
self.emit('connected')
else:
for contact_handle, contact_id in self._buddy_handles.items():
if contact_id is not None:
self.emit('buddy-removed', contact_id)
for room_handle, activity_id in self._activity_handles.items():
self.emit('activity-removed', activity_id)
self._buddy_handles = {}
self._activity_handles = {}
self._buddies_per_activity = {}
self._activities_per_buddy = {}
self.emit('disconnected')
if status == CONNECTION_STATUS_DISCONNECTED:
self._close_connection()
def __get_self_handle_cb(self, self_handle):
self._self_handle = self_handle
if CONNECTION_INTERFACE_CONTACT_CAPABILITIES in self._connection:
interface = CONNECTION_INTERFACE_CONTACT_CAPABILITIES
connection = self._connection[interface]
client_name = CLIENT + '.Sugar.FileTransfer'
file_transfer_channel_class = {
CHANNEL + '.ChannelType': CHANNEL_TYPE_FILE_TRANSFER,
CHANNEL + '.TargetHandleType': HANDLE_TYPE_CONTACT}
capabilities = []
connection.UpdateCapabilities(
[(client_name, [file_transfer_channel_class], capabilities)],
reply_handler=self.__update_capabilities_cb,
error_handler=partial(self.__error_handler_cb,
'Connection.UpdateCapabilities'))
connection = self._connection[CONNECTION_INTERFACE_ALIASING]
connection.connect_to_signal('AliasesChanged',
self.__aliases_changed_cb)
connection = self._connection[CONNECTION_INTERFACE_SIMPLE_PRESENCE]
connection.connect_to_signal('PresencesChanged',
self.__presences_changed_cb)
if CONNECTION_INTERFACE_BUDDY_INFO in self._connection:
connection = self._connection[CONNECTION_INTERFACE_BUDDY_INFO]
connection.connect_to_signal('PropertiesChanged',
self.__buddy_info_updated_cb,
byte_arrays=True)
connection.connect_to_signal('ActivitiesChanged',
self.__buddy_activities_changed_cb)
connection.connect_to_signal('CurrentActivityChanged',
self.__current_activity_changed_cb)
if self._home_changed_hid is None:
home_model = shell.get_model()
self._home_changed_hid = home_model.connect(
'active-activity-changed',
self.__active_activity_changed_cb)
else:
logging.warning('Connection %s does not support OLPC buddy '
'properties', self._connection.object_path)
if CONNECTION_INTERFACE_ACTIVITY_PROPERTIES in self._connection:
connection = self._connection[
CONNECTION_INTERFACE_ACTIVITY_PROPERTIES]
connection.connect_to_signal(
'ActivityPropertiesChanged',
self.__activity_properties_changed_cb)
else:
logging.warning('Connection %s does not support OLPC activity '
'properties', self._connection.object_path)
properties = {
CHANNEL + '.ChannelType': CHANNEL_TYPE_CONTACT_LIST,
CHANNEL + '.TargetHandleType': HANDLE_TYPE_LIST,
CHANNEL + '.TargetID': 'subscribe',
}
properties = dbus.Dictionary(properties, signature='sv')
connection = self._connection[CONNECTION_INTERFACE_REQUESTS]
is_ours, channel_path, properties = \
connection.EnsureChannel(properties)
channel = Channel(self._connection.service_name, channel_path)
channel[CHANNEL_INTERFACE_GROUP].connect_to_signal(
'MembersChanged', self.__members_changed_cb)
channel[PROPERTIES_IFACE].Get(
CHANNEL_INTERFACE_GROUP,
'Members',
reply_handler=self.__get_members_ready_cb,
error_handler=partial(
self.__error_handler_cb,
'Connection.GetMembers'))
def __active_activity_changed_cb(self, model, home_activity):
if home_activity is None:
return
room_handle = 0
home_activity_id = home_activity.get_activity_id()
for handle, activity_id in self._activity_handles.items():
if home_activity_id == activity_id:
room_handle = handle
break
if room_handle == 0:
home_activity_id = ''
connection = self._connection[CONNECTION_INTERFACE_BUDDY_INFO]
connection.SetCurrentActivity(
home_activity_id,
room_handle,
reply_handler=self.__set_current_activity_cb,
error_handler=self.__set_current_activity_error_cb)
def __set_current_activity_cb(self):
logging.debug('_Account.__set_current_activity_cb')
def __set_current_activity_error_cb(self, error):
logging.debug('_Account.__set_current_activity__error_cb %r', error)
def __update_capabilities_cb(self):
pass
def __aliases_changed_cb(self, aliases):
logging.debug('_Account.__aliases_changed_cb')
for handle, alias in aliases:
if handle in self._buddy_handles:
logging.debug('Got handle %r with nick %r, going to update',
handle, alias)
properties = {CONNECTION_INTERFACE_ALIASING + '/alias': alias}
self.emit('buddy-updated', self._buddy_handles[handle],
properties)
def __presences_changed_cb(self, presences):
logging.debug('_Account.__presences_changed_cb %r', presences)
for handle, presence in presences.iteritems():
if handle in self._buddy_handles:
presence_type, status_, message_ = presence
if presence_type == CONNECTION_PRESENCE_TYPE_OFFLINE:
contact_id = self._buddy_handles[handle]
del self._buddy_handles[handle]
self.emit('buddy-removed', contact_id)
def __buddy_info_updated_cb(self, handle, properties):
logging.debug('_Account.__buddy_info_updated_cb %r', handle)
if handle in self._buddy_handles:
self.emit('buddy-updated', self._buddy_handles[handle], properties)
def __current_activity_changed_cb(self, contact_handle, activity_id,
room_handle):
logging.debug('_Account.__current_activity_changed_cb %r %r %r',
contact_handle, activity_id, room_handle)
if contact_handle in self._buddy_handles:
contact_id = self._buddy_handles[contact_handle]
if not activity_id and room_handle:
activity_id = self._activity_handles.get(room_handle, '')
self.emit('current-activity-updated', contact_id, activity_id)
def __get_current_activity_cb(self, contact_handle, activity_id,
room_handle):
logging.debug('_Account.__get_current_activity_cb %r %r %r',
contact_handle, activity_id, room_handle)
if contact_handle in self._buddy_handles:
contact_id = self._buddy_handles[contact_handle]
if not activity_id and room_handle:
activity_id = self._activity_handles.get(room_handle, '')
self.emit('current-activity-updated', contact_id, activity_id)
def __buddy_activities_changed_cb(self, buddy_handle, activities):
self._update_buddy_activities(buddy_handle, activities)
def _update_buddy_activities(self, buddy_handle, activities):
logging.debug('_Account._update_buddy_activities')
if buddy_handle not in self._activities_per_buddy:
self._activities_per_buddy[buddy_handle] = set()
for activity_id, room_handle in activities:
if room_handle not in self._activity_handles:
self._activity_handles[room_handle] = activity_id
if buddy_handle == self._self_handle:
home_model = shell.get_model()
activity = home_model.get_active_activity()
if activity.get_activity_id() == activity_id:
connection = self._connection[
CONNECTION_INTERFACE_BUDDY_INFO]
connection.SetCurrentActivity(
activity_id,
room_handle,
reply_handler=self.__set_current_activity_cb,
error_handler=self.__set_current_activity_error_cb)
self.emit('activity-added', room_handle, activity_id)
connection = self._connection[
CONNECTION_INTERFACE_ACTIVITY_PROPERTIES]
connection.GetProperties(
room_handle,
reply_handler=partial(self.__get_properties_cb,
room_handle),
error_handler=partial(self.__error_handler_cb,
'ActivityProperties.GetProperties'))
if buddy_handle != self._self_handle:
# Sometimes we'll get CurrentActivityChanged before we get
# to know about the activity so we miss the event. In that
# case, request again the current activity for this buddy.
connection = self._connection[
CONNECTION_INTERFACE_BUDDY_INFO]
connection.GetCurrentActivity(
buddy_handle,
reply_handler=partial(self.__get_current_activity_cb,
buddy_handle),
error_handler=partial(self.__error_handler_cb,
'BuddyInfo.GetCurrentActivity'))
if activity_id not in self._buddies_per_activity:
self._buddies_per_activity[activity_id] = set()
self._buddies_per_activity[activity_id].add(buddy_handle)
if activity_id not in self._activities_per_buddy[buddy_handle]:
self._activities_per_buddy[buddy_handle].add(activity_id)
if buddy_handle != self._self_handle:
self.emit('buddy-joined-activity',
self._buddy_handles[buddy_handle],
activity_id)
current_activity_ids = \
[activity_id for activity_id, room_handle in activities]
for activity_id in self._activities_per_buddy[buddy_handle].copy():
if activity_id not in current_activity_ids:
self._remove_buddy_from_activity(buddy_handle, activity_id)
def __get_properties_cb(self, room_handle, properties):
logging.debug('_Account.__get_properties_cb %r %r', room_handle,
properties)
if properties:
self._update_activity(room_handle, properties)
def _remove_buddy_from_activity(self, buddy_handle, activity_id):
if buddy_handle in self._buddies_per_activity[activity_id]:
self._buddies_per_activity[activity_id].remove(buddy_handle)
if activity_id in self._activities_per_buddy[buddy_handle]:
self._activities_per_buddy[buddy_handle].remove(activity_id)
if buddy_handle != self._self_handle:
self.emit('buddy-left-activity',
self._buddy_handles[buddy_handle],
activity_id)
if not self._buddies_per_activity[activity_id]:
del self._buddies_per_activity[activity_id]
for room_handle in self._activity_handles.copy():
if self._activity_handles[room_handle] == activity_id:
del self._activity_handles[room_handle]
break
self.emit('activity-removed', activity_id)
def __activity_properties_changed_cb(self, room_handle, properties):
logging.debug('_Account.__activity_properties_changed_cb %r %r',
room_handle, properties)
self._update_activity(room_handle, properties)
def _update_activity(self, room_handle, properties):
if room_handle in self._activity_handles:
self.emit('activity-updated', self._activity_handles[room_handle],
properties)
else:
logging.debug('_Account.__activity_properties_changed_cb unknown '
'activity')
# We don't get ActivitiesChanged for the owner of the connection,
# so we query for its activities in order to find out.
if CONNECTION_INTERFACE_BUDDY_INFO in self._connection:
handle = self._self_handle
connection = self._connection[CONNECTION_INTERFACE_BUDDY_INFO]
connection.GetActivities(
handle,
reply_handler=partial(self.__got_activities_cb, handle),
error_handler=partial(self.__error_handler_cb,
'BuddyInfo.Getactivities'))
def __members_changed_cb(self, message, added, removed, local_pending,
remote_pending, actor, reason):
self._add_buddy_handles(added)
def __get_members_ready_cb(self, handles):
logging.debug('_Account.__get_members_ready_cb %r', handles)
if not handles:
return
self._add_buddy_handles(handles)
def _add_buddy_handles(self, handles):
logging.debug('_Account._add_buddy_handles %r', handles)
interfaces = [CONNECTION, CONNECTION_INTERFACE_ALIASING]
self._connection[CONNECTION_INTERFACE_CONTACTS].GetContactAttributes(
handles, interfaces, False,
reply_handler=self.__get_contact_attributes_cb,
error_handler=partial(self.__error_handler_cb,
'Contacts.GetContactAttributes'))
def __got_buddy_info_cb(self, handle, nick, properties):
logging.debug('_Account.__got_buddy_info_cb %r', handle)
self.emit('buddy-updated', self._buddy_handles[handle], properties)
def __get_contact_attributes_cb(self, attributes):
logging.debug('_Account.__get_contact_attributes_cb %r',
attributes.keys())
for handle in attributes.keys():
nick = attributes[handle][CONNECTION_INTERFACE_ALIASING + '/alias']
if handle == self._self_handle:
logging.debug('_Account.__get_contact_attributes_cb,'
' do not add ourself %r', handle)
continue
if handle in self._buddy_handles and \
not self._buddy_handles[handle] is None:
logging.debug('Got handle %r with nick %r, going to update',
handle, nick)
self.emit('buddy-updated', self._buddy_handles[handle],
attributes[handle])
else:
logging.debug('Got handle %r with nick %r, going to add',
handle, nick)
contact_id = attributes[handle][CONNECTION + '/contact-id']
self._buddy_handles[handle] = contact_id
if CONNECTION_INTERFACE_BUDDY_INFO in self._connection:
connection = \
self._connection[CONNECTION_INTERFACE_BUDDY_INFO]
connection.GetProperties(
handle,
reply_handler=partial(self.__got_buddy_info_cb, handle,
nick),
error_handler=partial(self.__error_handler_cb,
'BuddyInfo.GetProperties'),
byte_arrays=True,
timeout=_QUERY_DBUS_TIMEOUT)
connection.GetActivities(
handle,
reply_handler=partial(self.__got_activities_cb,
handle),
error_handler=partial(self.__error_handler_cb,
'BuddyInfo.GetActivities'),
timeout=_QUERY_DBUS_TIMEOUT)
connection.GetCurrentActivity(
handle,
reply_handler=partial(self.__get_current_activity_cb,
handle),
error_handler=partial(self.__error_handler_cb,
'BuddyInfo.GetCurrentActivity'),
timeout=_QUERY_DBUS_TIMEOUT)
self.emit('buddy-added', contact_id, nick, handle)
def __got_activities_cb(self, buddy_handle, activities):
logging.debug('_Account.__got_activities_cb %r %r', buddy_handle,
activities)
self._update_buddy_activities(buddy_handle, activities)
def enable(self):
logging.debug('_Account.enable %s', self.object_path)
self._set_enabled(True)
def disable(self):
logging.debug('_Account.disable %s', self.object_path)
self._set_enabled(False)
self._close_connection()
def _set_enabled(self, value):
bus = dbus.Bus()
obj = bus.get_object(ACCOUNT_MANAGER_SERVICE, self.object_path)
obj.Set(ACCOUNT, 'Enabled', value,
reply_handler=self.__set_enabled_cb,
error_handler=partial(self.__error_handler_cb,
'Account.SetEnabled'),
dbus_interface=dbus.PROPERTIES_IFACE)
def __set_enabled_cb(self):
logging.debug('_Account.__set_enabled_cb success')
class Neighborhood(GObject.GObject):
__gsignals__ = {
'activity-added': (GObject.SignalFlags.RUN_FIRST, None,
([object])),
'activity-removed': (GObject.SignalFlags.RUN_FIRST, None,
([object])),
'buddy-added': (GObject.SignalFlags.RUN_FIRST, None,
([object])),
'buddy-removed': (GObject.SignalFlags.RUN_FIRST, None,
([object])),
}
def __init__(self):
GObject.GObject.__init__(self)
self._buddies = {None: get_owner_instance()}
self._activities = {}
self._link_local_account = None
self._server_account = None
self._shell_model = shell.get_model()
self._settings_collaboration = \
Gio.Settings('org.sugarlabs.collaboration')
self._settings_collaboration.connect(
'changed::jabber-server', self.__jabber_server_changed_cb)
self._settings_user = Gio.Settings('org.sugarlabs.user')
self._settings_user.connect(
'changed::nick', self.__nick_changed_cb)
bus = dbus.Bus()
obj = bus.get_object(ACCOUNT_MANAGER_SERVICE, ACCOUNT_MANAGER_PATH)
account_manager = dbus.Interface(obj, ACCOUNT_MANAGER)
account_manager.Get(ACCOUNT_MANAGER, 'ValidAccounts',
dbus_interface=PROPERTIES_IFACE,
reply_handler=self.__got_accounts_cb,
error_handler=self.__error_handler_cb)
def __got_accounts_cb(self, account_paths):
self._link_local_account = \
self._ensure_link_local_account(account_paths)
self._connect_to_account(self._link_local_account)
self._server_account = self._ensure_server_account(account_paths)
self._connect_to_account(self._server_account)
def __error_handler_cb(self, error):
raise RuntimeError(error)
def _connect_to_account(self, account):
account.connect('buddy-added', self.__buddy_added_cb)
account.connect('buddy-updated', self.__buddy_updated_cb)
account.connect('buddy-removed', self.__buddy_removed_cb)
account.connect('buddy-joined-activity',
self.__buddy_joined_activity_cb)
account.connect('buddy-left-activity', self.__buddy_left_activity_cb)
account.connect('activity-added', self.__activity_added_cb)
account.connect('activity-updated', self.__activity_updated_cb)
account.connect('activity-removed', self.__activity_removed_cb)
account.connect('current-activity-updated',
self.__current_activity_updated_cb)
account.connect('connected', self.__account_connected_cb)
account.connect('disconnected', self.__account_disconnected_cb)
def __account_connected_cb(self, account):
logging.debug('__account_connected_cb %s', account.object_path)
if account == self._server_account:
self._link_local_account.disable()
def __account_disconnected_cb(self, account):
logging.debug('__account_disconnected_cb %s', account.object_path)
if account == self._server_account:
self._link_local_account.enable()
def _get_published_name(self):
"""Construct the published name based on the public key
Limit the name to be only 8 characters maximum. The avahi
service name has a 64 character limit. It consists of
the room name, the published name and the host name.
"""
public_key_hash = sha1(get_profile().pubkey).hexdigest()
return public_key_hash[:8]
def _ensure_link_local_account(self, account_paths):
for account_path in account_paths:
if 'salut' in account_path:
logging.debug('Already have a Salut account')
account = _Account(account_path)
account.enable()
return account
logging.debug('Still dont have a Salut account, creating one')
nick = self._settings_user.get_string('nick')
params = {
'nickname': nick,
'first-name': '',
'last-name': '',
'jid': self._get_jabber_account_id(),
'published-name': self._get_published_name(),
}
properties = {
ACCOUNT + '.Enabled': True,
ACCOUNT + '.Nickname': nick,
ACCOUNT + '.ConnectAutomatically': True,
}
bus = dbus.Bus()
obj = bus.get_object(ACCOUNT_MANAGER_SERVICE, ACCOUNT_MANAGER_PATH)
account_manager = dbus.Interface(obj, ACCOUNT_MANAGER)
account_path = account_manager.CreateAccount('salut', 'local-xmpp',
'salut', params,
properties)
return _Account(account_path)
def _ensure_server_account(self, account_paths):
for account_path in account_paths:
if 'gabble' in account_path:
logging.debug('Already have a Gabble account')
account = _Account(account_path)
account.enable()
return account
logging.debug('Still dont have a Gabble account, creating one')
nick = self._settings_user.get_string('nick')
server = self._settings_collaboration.get_string('jabber-server')
key_hash = get_profile().privkey_hash
params = {
'account': self._get_jabber_account_id(),
'password': key_hash,
'server': server,
'resource': 'sugar',
'require-encryption': True,
'ignore-ssl-errors': True,
'register': True,
'old-ssl': True,
'port': dbus.UInt32(5223),
}
properties = {
ACCOUNT + '.Enabled': True,
ACCOUNT + '.Nickname': nick,
ACCOUNT + '.ConnectAutomatically': True,
}
bus = dbus.Bus()
obj = bus.get_object(ACCOUNT_MANAGER_SERVICE, ACCOUNT_MANAGER_PATH)
account_manager = dbus.Interface(obj, ACCOUNT_MANAGER)
account_path = account_manager.CreateAccount('gabble', 'jabber',
'jabber', params,
properties)
return _Account(account_path)
def _get_jabber_account_id(self):
public_key_hash = sha1(get_profile().pubkey).hexdigest()
server = self._settings_collaboration.get_string('jabber-server')
return '%s@%s' % (public_key_hash, server)
def __jabber_server_changed_cb(self, settings, key):
logging.debug('__jabber_server_changed_cb')
bus = dbus.Bus()
account = bus.get_object(ACCOUNT_MANAGER_SERVICE,
self._server_account.object_path)
server = settings.get_string('jabber-server')
account_id = self._get_jabber_account_id()
params_needing_reconnect = account.UpdateParameters(
{'server': server,
'account': account_id,
'register': True},
dbus.Array([], 's'), dbus_interface=ACCOUNT)
if params_needing_reconnect:
account.Reconnect()
self._update_jid()
def __nick_changed_cb(self, settings, key):
logging.debug('__nick_changed_cb')
nick = settings.get_string('nick')
bus = dbus.Bus()
server_obj = bus.get_object(ACCOUNT_MANAGER_SERVICE,
self._server_account.object_path)
server_obj.Set(ACCOUNT, 'Nickname', nick,
dbus_interface=PROPERTIES_IFACE)
link_local_obj = bus.get_object(ACCOUNT_MANAGER_SERVICE,
self._link_local_account.object_path)
link_local_obj.Set(ACCOUNT, 'Nickname', nick,
dbus_interface=PROPERTIES_IFACE)
params_needing_reconnect = link_local_obj.UpdateParameters(
{'nickname': nick, 'published-name': self._get_published_name()},
dbus.Array([], 's'), dbus_interface=ACCOUNT)
if params_needing_reconnect:
link_local_obj.Reconnect()
self._update_jid()
def _update_jid(self):
bus = dbus.Bus()
account = bus.get_object(ACCOUNT_MANAGER_SERVICE,
self._link_local_account.object_path)
account_id = self._get_jabber_account_id()
params_needing_reconnect = account.UpdateParameters(
{'jid': account_id}, dbus.Array([], 's'), dbus_interface=ACCOUNT)
if params_needing_reconnect:
account.Reconnect()
def __buddy_added_cb(self, account, contact_id, nick, handle):
logging.debug('__buddy_added_cb %r', contact_id)
if contact_id in self._buddies:
logging.debug('__buddy_added_cb buddy already tracked')
return
buddy = BuddyModel(
nick=nick,
account=account.object_path,
contact_id=contact_id,
handle=handle)
self._buddies[contact_id] = buddy
def __buddy_updated_cb(self, account, contact_id, properties):
logging.debug('__buddy_updated_cb %r', contact_id)
if contact_id is None:
# Don't know the contact-id yet, will get the full state later
return
if contact_id not in self._buddies:
logging.debug('__buddy_updated_cb Unknown buddy with contact_id'
' %r', contact_id)
return
buddy = self._buddies[contact_id]
is_new = buddy.props.key is None and 'key' in properties
if 'color' in properties:
# arrives unicode but we connect with byte_arrays=True - SL #4157
buddy.props.color = XoColor(str(properties['color']))
if 'key' in properties:
buddy.props.key = properties['key']
nick_key = CONNECTION_INTERFACE_ALIASING + '/alias'
if nick_key in properties:
buddy.props.nick = properties[nick_key]
if is_new:
self.emit('buddy-added', buddy)
def __buddy_removed_cb(self, account, contact_id):
logging.debug('Neighborhood.__buddy_removed_cb %r', contact_id)
if contact_id not in self._buddies:
logging.debug('Neighborhood.__buddy_removed_cb Unknown buddy with '
'contact_id %r', contact_id)
return
buddy = self._buddies[contact_id]
del self._buddies[contact_id]
if buddy.props.key is not None:
self.emit('buddy-removed', buddy)
def __activity_added_cb(self, account, room_handle, activity_id):
logging.debug('__activity_added_cb %r %r', room_handle, activity_id)
if activity_id in self._activities:
logging.debug('__activity_added_cb activity already tracked')
return
activity = ActivityModel(activity_id, room_handle)
self._activities[activity_id] = activity
def __activity_updated_cb(self, account, activity_id, properties):
logging.debug('__activity_updated_cb %r %r', activity_id, properties)
if activity_id not in self._activities:
logging.debug('__activity_updated_cb Unknown activity with '
'activity_id %r', activity_id)
return
registry = bundleregistry.get_registry()
bundle = registry.get_bundle(properties['type'])
if not bundle:
logging.warning('Ignoring shared activity we don''t have')
return
activity = self._activities[activity_id]
is_new = activity.props.bundle is None
# arrives unicode but we connect with byte_arrays=True - SL #4157
activity.props.color = XoColor(str(properties['color']))
activity.props.bundle = bundle
activity.props.name = properties['name']
activity.props.private = properties['private']
if is_new:
self._shell_model.add_shared_activity(activity_id,
activity.props.color)
self.emit('activity-added', activity)
def __activity_removed_cb(self, account, activity_id):
logging.debug('__activity_removed_cb %r', activity_id)
if activity_id not in self._activities:
logging.debug('Unknown activity with id %s. Already removed?',
activity_id)
return
activity = self._activities[activity_id]
del self._activities[activity_id]
self._shell_model.remove_shared_activity(activity_id)
if activity.props.bundle is not None:
self.emit('activity-removed', activity)
def __current_activity_updated_cb(self, account, contact_id, activity_id):
logging.debug('__current_activity_updated_cb %r %r', contact_id,
activity_id)
if contact_id not in self._buddies:
logging.debug('__current_activity_updated_cb Unknown buddy with '
'contact_id %r', contact_id)
return
if activity_id and activity_id not in self._activities:
logging.debug('__current_activity_updated_cb Unknown activity with'
' id %s', activity_id)
activity_id = ''
buddy = self._buddies[contact_id]
if buddy.props.current_activity is not None:
if buddy.props.current_activity.activity_id == activity_id:
return
buddy.props.current_activity.remove_current_buddy(buddy)
if activity_id:
activity = self._activities[activity_id]
buddy.props.current_activity = activity
activity.add_current_buddy(buddy)
else:
buddy.props.current_activity = None
def __buddy_joined_activity_cb(self, account, contact_id, activity_id):
if contact_id not in self._buddies:
logging.debug('__buddy_joined_activity_cb Unknown buddy with '
'contact_id %r', contact_id)
return
if activity_id not in self._activities:
logging.debug('__buddy_joined_activity_cb Unknown activity with '
'activity_id %r', activity_id)
return
self._activities[activity_id].add_buddy(self._buddies[contact_id])
def __buddy_left_activity_cb(self, account, contact_id, activity_id):
if contact_id not in self._buddies:
logging.debug('__buddy_left_activity_cb Unknown buddy with '
'contact_id %r', contact_id)
return
if activity_id not in self._activities:
logging.debug('__buddy_left_activity_cb Unknown activity with '
'activity_id %r', activity_id)
return
self._activities[activity_id].remove_buddy(self._buddies[contact_id])
def get_buddies(self):
return self._buddies.values()
def get_buddy_by_key(self, key):
for buddy in self._buddies.values():
if buddy.key == key:
return buddy
return None
def get_buddy_by_handle(self, contact_handle):
for buddy in self._buddies.values():
if not buddy.is_owner() and buddy.handle == contact_handle:
return buddy
return None
def get_activity(self, activity_id):
return self._activities.get(activity_id, None)
def get_activity_by_room(self, room_handle):
for activity in self._activities.values():
if activity.room_handle == room_handle:
return activity
return None
def get_activities(self):
return self._activities.values()
def get_model():
global _model
if _model is None:
_model = Neighborhood()
return _model
| gpl-2.0 |
vjmac15/Lyilis | lib/pip/_vendor/ipaddress.py | 339 | 80176 | # Copyright 2007 Google Inc.
# Licensed to PSF under a Contributor Agreement.
"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
This library is used to create/poke/manipulate IPv4 and IPv6 addresses
and networks.
"""
from __future__ import unicode_literals
import itertools
import struct
__version__ = '1.0.17'
# Compatibility functions
_compat_int_types = (int,)
try:
_compat_int_types = (int, long)
except NameError:
pass
try:
_compat_str = unicode
except NameError:
_compat_str = str
assert bytes != str
if b'\0'[0] == 0: # Python 3 semantics
def _compat_bytes_to_byte_vals(byt):
return byt
else:
def _compat_bytes_to_byte_vals(byt):
return [struct.unpack(b'!B', b)[0] for b in byt]
try:
_compat_int_from_byte_vals = int.from_bytes
except AttributeError:
def _compat_int_from_byte_vals(bytvals, endianess):
assert endianess == 'big'
res = 0
for bv in bytvals:
assert isinstance(bv, _compat_int_types)
res = (res << 8) + bv
return res
def _compat_to_bytes(intval, length, endianess):
assert isinstance(intval, _compat_int_types)
assert endianess == 'big'
if length == 4:
if intval < 0 or intval >= 2 ** 32:
raise struct.error("integer out of range for 'I' format code")
return struct.pack(b'!I', intval)
elif length == 16:
if intval < 0 or intval >= 2 ** 128:
raise struct.error("integer out of range for 'QQ' format code")
return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)
else:
raise NotImplementedError()
if hasattr(int, 'bit_length'):
# Not int.bit_length , since that won't work in 2.7 where long exists
def _compat_bit_length(i):
return i.bit_length()
else:
def _compat_bit_length(i):
for res in itertools.count():
if i >> res == 0:
return res
def _compat_range(start, end, step=1):
assert step > 0
i = start
while i < end:
yield i
i += step
class _TotalOrderingMixin(object):
__slots__ = ()
# Helper that derives the other comparison operations from
# __lt__ and __eq__
# We avoid functools.total_ordering because it doesn't handle
# NotImplemented correctly yet (http://bugs.python.org/issue10042)
def __eq__(self, other):
raise NotImplementedError
def __ne__(self, other):
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not equal
def __lt__(self, other):
raise NotImplementedError
def __le__(self, other):
less = self.__lt__(other)
if less is NotImplemented or not less:
return self.__eq__(other)
return less
def __gt__(self, other):
less = self.__lt__(other)
if less is NotImplemented:
return NotImplemented
equal = self.__eq__(other)
if equal is NotImplemented:
return NotImplemented
return not (less or equal)
def __ge__(self, other):
less = self.__lt__(other)
if less is NotImplemented:
return NotImplemented
return not less
IPV4LENGTH = 32
IPV6LENGTH = 128
class AddressValueError(ValueError):
"""A Value Error related to the address."""
class NetmaskValueError(ValueError):
"""A Value Error related to the netmask."""
def ip_address(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Address or IPv6Address object.
Raises:
ValueError: if the *address* passed isn't either a v4 or a v6
address
"""
try:
return IPv4Address(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Address(address)
except (AddressValueError, NetmaskValueError):
pass
if isinstance(address, bytes):
raise AddressValueError(
'%r does not appear to be an IPv4 or IPv6 address. '
'Did you pass in a bytes (str in Python 2) instead of'
' a unicode object?' % address)
raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
address)
def ip_network(address, strict=True):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP network. Either IPv4 or
IPv6 networks may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Network or IPv6Network object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address. Or if the network has host bits set.
"""
try:
return IPv4Network(address, strict)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Network(address, strict)
except (AddressValueError, NetmaskValueError):
pass
if isinstance(address, bytes):
raise AddressValueError(
'%r does not appear to be an IPv4 or IPv6 network. '
'Did you pass in a bytes (str in Python 2) instead of'
' a unicode object?' % address)
raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
address)
def ip_interface(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Interface or IPv6Interface object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address.
Notes:
The IPv?Interface classes describe an Address on a particular
Network, so they're basically a combination of both the Address
and Network classes.
"""
try:
return IPv4Interface(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Interface(address)
except (AddressValueError, NetmaskValueError):
pass
raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
address)
def v4_int_to_packed(address):
"""Represent an address as 4 packed bytes in network (big-endian) order.
Args:
address: An integer representation of an IPv4 IP address.
Returns:
The integer address packed as 4 bytes in network (big-endian) order.
Raises:
ValueError: If the integer is negative or too large to be an
IPv4 IP address.
"""
try:
return _compat_to_bytes(address, 4, 'big')
except (struct.error, OverflowError):
raise ValueError("Address negative or too large for IPv4")
def v6_int_to_packed(address):
"""Represent an address as 16 packed bytes in network (big-endian) order.
Args:
address: An integer representation of an IPv6 IP address.
Returns:
The integer address packed as 16 bytes in network (big-endian) order.
"""
try:
return _compat_to_bytes(address, 16, 'big')
except (struct.error, OverflowError):
raise ValueError("Address negative or too large for IPv6")
def _split_optional_netmask(address):
"""Helper to split the netmask and raise AddressValueError if needed"""
addr = _compat_str(address).split('/')
if len(addr) > 2:
raise AddressValueError("Only one '/' permitted in %r" % address)
return addr
def _find_address_range(addresses):
"""Find a sequence of sorted deduplicated IPv#Address.
Args:
addresses: a list of IPv#Address objects.
Yields:
A tuple containing the first and last IP addresses in the sequence.
"""
it = iter(addresses)
first = last = next(it)
for ip in it:
if ip._ip != last._ip + 1:
yield first, last
first = ip
last = ip
yield first, last
def _count_righthand_zero_bits(number, bits):
"""Count the number of zero bits on the right hand side.
Args:
number: an integer.
bits: maximum number of bits to count.
Returns:
The number of zero bits on the right hand side of the number.
"""
if number == 0:
return bits
return min(bits, _compat_bit_length(~number & (number - 1)))
def summarize_address_range(first, last):
"""Summarize a network range given the first and last IP addresses.
Example:
>>> list(summarize_address_range(IPv4Address('192.0.2.0'),
... IPv4Address('192.0.2.130')))
... #doctest: +NORMALIZE_WHITESPACE
[IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
IPv4Network('192.0.2.130/32')]
Args:
first: the first IPv4Address or IPv6Address in the range.
last: the last IPv4Address or IPv6Address in the range.
Returns:
An iterator of the summarized IPv(4|6) network objects.
Raise:
TypeError:
If the first and last objects are not IP addresses.
If the first and last objects are not the same version.
ValueError:
If the last object is not greater than the first.
If the version of the first address is not 4 or 6.
"""
if (not (isinstance(first, _BaseAddress) and
isinstance(last, _BaseAddress))):
raise TypeError('first and last must be IP addresses, not networks')
if first.version != last.version:
raise TypeError("%s and %s are not of the same version" % (
first, last))
if first > last:
raise ValueError('last IP address must be greater than first')
if first.version == 4:
ip = IPv4Network
elif first.version == 6:
ip = IPv6Network
else:
raise ValueError('unknown IP version')
ip_bits = first._max_prefixlen
first_int = first._ip
last_int = last._ip
while first_int <= last_int:
nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
_compat_bit_length(last_int - first_int + 1) - 1)
net = ip((first_int, ip_bits - nbits))
yield net
first_int += 1 << nbits
if first_int - 1 == ip._ALL_ONES:
break
def _collapse_addresses_internal(addresses):
"""Loops through the addresses, collapsing concurrent netblocks.
Example:
ip1 = IPv4Network('192.0.2.0/26')
ip2 = IPv4Network('192.0.2.64/26')
ip3 = IPv4Network('192.0.2.128/26')
ip4 = IPv4Network('192.0.2.192/26')
_collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
[IPv4Network('192.0.2.0/24')]
This shouldn't be called directly; it is called via
collapse_addresses([]).
Args:
addresses: A list of IPv4Network's or IPv6Network's
Returns:
A list of IPv4Network's or IPv6Network's depending on what we were
passed.
"""
# First merge
to_merge = list(addresses)
subnets = {}
while to_merge:
net = to_merge.pop()
supernet = net.supernet()
existing = subnets.get(supernet)
if existing is None:
subnets[supernet] = net
elif existing != net:
# Merge consecutive subnets
del subnets[supernet]
to_merge.append(supernet)
# Then iterate over resulting networks, skipping subsumed subnets
last = None
for net in sorted(subnets.values()):
if last is not None:
# Since they are sorted,
# last.network_address <= net.network_address is a given.
if last.broadcast_address >= net.broadcast_address:
continue
yield net
last = net
def collapse_addresses(addresses):
"""Collapse a list of IP objects.
Example:
collapse_addresses([IPv4Network('192.0.2.0/25'),
IPv4Network('192.0.2.128/25')]) ->
[IPv4Network('192.0.2.0/24')]
Args:
addresses: An iterator of IPv4Network or IPv6Network objects.
Returns:
An iterator of the collapsed IPv(4|6)Network objects.
Raises:
TypeError: If passed a list of mixed version objects.
"""
addrs = []
ips = []
nets = []
# split IP addresses and networks
for ip in addresses:
if isinstance(ip, _BaseAddress):
if ips and ips[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, ips[-1]))
ips.append(ip)
elif ip._prefixlen == ip._max_prefixlen:
if ips and ips[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, ips[-1]))
try:
ips.append(ip.ip)
except AttributeError:
ips.append(ip.network_address)
else:
if nets and nets[-1]._version != ip._version:
raise TypeError("%s and %s are not of the same version" % (
ip, nets[-1]))
nets.append(ip)
# sort and dedup
ips = sorted(set(ips))
# find consecutive address ranges in the sorted sequence and summarize them
if ips:
for first, last in _find_address_range(ips):
addrs.extend(summarize_address_range(first, last))
return _collapse_addresses_internal(addrs + nets)
def get_mixed_type_key(obj):
"""Return a key suitable for sorting between networks and addresses.
Address and Network objects are not sortable by default; they're
fundamentally different so the expression
IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
doesn't make any sense. There are some times however, where you may wish
to have ipaddress sort these for you anyway. If you need to do this, you
can use this function as the key= argument to sorted().
Args:
obj: either a Network or Address object.
Returns:
appropriate key.
"""
if isinstance(obj, _BaseNetwork):
return obj._get_networks_key()
elif isinstance(obj, _BaseAddress):
return obj._get_address_key()
return NotImplemented
class _IPAddressBase(_TotalOrderingMixin):
"""The mother class."""
__slots__ = ()
@property
def exploded(self):
"""Return the longhand version of the IP address as a string."""
return self._explode_shorthand_ip_string()
@property
def compressed(self):
"""Return the shorthand version of the IP address as a string."""
return _compat_str(self)
@property
def reverse_pointer(self):
"""The name of the reverse DNS pointer for the IP address, e.g.:
>>> ipaddress.ip_address("127.0.0.1").reverse_pointer
'1.0.0.127.in-addr.arpa'
>>> ipaddress.ip_address("2001:db8::1").reverse_pointer
'1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
"""
return self._reverse_pointer()
@property
def version(self):
msg = '%200s has no version specified' % (type(self),)
raise NotImplementedError(msg)
def _check_int_address(self, address):
if address < 0:
msg = "%d (< 0) is not permitted as an IPv%d address"
raise AddressValueError(msg % (address, self._version))
if address > self._ALL_ONES:
msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
raise AddressValueError(msg % (address, self._max_prefixlen,
self._version))
def _check_packed_address(self, address, expected_len):
address_len = len(address)
if address_len != expected_len:
msg = (
'%r (len %d != %d) is not permitted as an IPv%d address. '
'Did you pass in a bytes (str in Python 2) instead of'
' a unicode object?'
)
raise AddressValueError(msg % (address, address_len,
expected_len, self._version))
@classmethod
def _ip_int_from_prefix(cls, prefixlen):
"""Turn the prefix length into a bitwise netmask
Args:
prefixlen: An integer, the prefix length.
Returns:
An integer.
"""
return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
@classmethod
def _prefix_from_ip_int(cls, ip_int):
"""Return prefix length from the bitwise netmask.
Args:
ip_int: An integer, the netmask in expanded bitwise format
Returns:
An integer, the prefix length.
Raises:
ValueError: If the input intermingles zeroes & ones
"""
trailing_zeroes = _count_righthand_zero_bits(ip_int,
cls._max_prefixlen)
prefixlen = cls._max_prefixlen - trailing_zeroes
leading_ones = ip_int >> trailing_zeroes
all_ones = (1 << prefixlen) - 1
if leading_ones != all_ones:
byteslen = cls._max_prefixlen // 8
details = _compat_to_bytes(ip_int, byteslen, 'big')
msg = 'Netmask pattern %r mixes zeroes & ones'
raise ValueError(msg % details)
return prefixlen
@classmethod
def _report_invalid_netmask(cls, netmask_str):
msg = '%r is not a valid netmask' % netmask_str
raise NetmaskValueError(msg)
@classmethod
def _prefix_from_prefix_string(cls, prefixlen_str):
"""Return prefix length from a numeric string
Args:
prefixlen_str: The string to be converted
Returns:
An integer, the prefix length.
Raises:
NetmaskValueError: If the input is not a valid netmask
"""
# int allows a leading +/- as well as surrounding whitespace,
# so we ensure that isn't the case
if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
cls._report_invalid_netmask(prefixlen_str)
try:
prefixlen = int(prefixlen_str)
except ValueError:
cls._report_invalid_netmask(prefixlen_str)
if not (0 <= prefixlen <= cls._max_prefixlen):
cls._report_invalid_netmask(prefixlen_str)
return prefixlen
@classmethod
def _prefix_from_ip_string(cls, ip_str):
"""Turn a netmask/hostmask string into a prefix length
Args:
ip_str: The netmask/hostmask to be converted
Returns:
An integer, the prefix length.
Raises:
NetmaskValueError: If the input is not a valid netmask/hostmask
"""
# Parse the netmask/hostmask like an IP address.
try:
ip_int = cls._ip_int_from_string(ip_str)
except AddressValueError:
cls._report_invalid_netmask(ip_str)
# Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
# Note that the two ambiguous cases (all-ones and all-zeroes) are
# treated as netmasks.
try:
return cls._prefix_from_ip_int(ip_int)
except ValueError:
pass
# Invert the bits, and try matching a /0+1+/ hostmask instead.
ip_int ^= cls._ALL_ONES
try:
return cls._prefix_from_ip_int(ip_int)
except ValueError:
cls._report_invalid_netmask(ip_str)
def __reduce__(self):
return self.__class__, (_compat_str(self),)
class _BaseAddress(_IPAddressBase):
"""A generic IP object.
This IP class contains the version independent methods which are
used by single IP addresses.
"""
__slots__ = ()
def __int__(self):
return self._ip
def __eq__(self, other):
try:
return (self._ip == other._ip and
self._version == other._version)
except AttributeError:
return NotImplemented
def __lt__(self, other):
if not isinstance(other, _IPAddressBase):
return NotImplemented
if not isinstance(other, _BaseAddress):
raise TypeError('%s and %s are not of the same type' % (
self, other))
if self._version != other._version:
raise TypeError('%s and %s are not of the same version' % (
self, other))
if self._ip != other._ip:
return self._ip < other._ip
return False
# Shorthand for Integer addition and subtraction. This is not
# meant to ever support addition/subtraction of addresses.
def __add__(self, other):
if not isinstance(other, _compat_int_types):
return NotImplemented
return self.__class__(int(self) + other)
def __sub__(self, other):
if not isinstance(other, _compat_int_types):
return NotImplemented
return self.__class__(int(self) - other)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
def __str__(self):
return _compat_str(self._string_from_ip_int(self._ip))
def __hash__(self):
return hash(hex(int(self._ip)))
def _get_address_key(self):
return (self._version, self)
def __reduce__(self):
return self.__class__, (self._ip,)
class _BaseNetwork(_IPAddressBase):
"""A generic IP network object.
This IP class contains the version independent methods which are
used by networks.
"""
def __init__(self, address):
self._cache = {}
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
def __str__(self):
return '%s/%d' % (self.network_address, self.prefixlen)
def hosts(self):
"""Generate Iterator over usable hosts in a network.
This is like __iter__ except it doesn't return the network
or broadcast addresses.
"""
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network + 1, broadcast):
yield self._address_class(x)
def __iter__(self):
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network, broadcast + 1):
yield self._address_class(x)
def __getitem__(self, n):
network = int(self.network_address)
broadcast = int(self.broadcast_address)
if n >= 0:
if network + n > broadcast:
raise IndexError('address out of range')
return self._address_class(network + n)
else:
n += 1
if broadcast + n < network:
raise IndexError('address out of range')
return self._address_class(broadcast + n)
def __lt__(self, other):
if not isinstance(other, _IPAddressBase):
return NotImplemented
if not isinstance(other, _BaseNetwork):
raise TypeError('%s and %s are not of the same type' % (
self, other))
if self._version != other._version:
raise TypeError('%s and %s are not of the same version' % (
self, other))
if self.network_address != other.network_address:
return self.network_address < other.network_address
if self.netmask != other.netmask:
return self.netmask < other.netmask
return False
def __eq__(self, other):
try:
return (self._version == other._version and
self.network_address == other.network_address and
int(self.netmask) == int(other.netmask))
except AttributeError:
return NotImplemented
def __hash__(self):
return hash(int(self.network_address) ^ int(self.netmask))
def __contains__(self, other):
# always false if one is v4 and the other is v6.
if self._version != other._version:
return False
# dealing with another network.
if isinstance(other, _BaseNetwork):
return False
# dealing with another address
else:
# address
return (int(self.network_address) <= int(other._ip) <=
int(self.broadcast_address))
def overlaps(self, other):
"""Tell if self is partly contained in other."""
return self.network_address in other or (
self.broadcast_address in other or (
other.network_address in self or (
other.broadcast_address in self)))
@property
def broadcast_address(self):
x = self._cache.get('broadcast_address')
if x is None:
x = self._address_class(int(self.network_address) |
int(self.hostmask))
self._cache['broadcast_address'] = x
return x
@property
def hostmask(self):
x = self._cache.get('hostmask')
if x is None:
x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
self._cache['hostmask'] = x
return x
@property
def with_prefixlen(self):
return '%s/%d' % (self.network_address, self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self.network_address, self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self.network_address, self.hostmask)
@property
def num_addresses(self):
"""Number of hosts in the current subnet."""
return int(self.broadcast_address) - int(self.network_address) + 1
@property
def _address_class(self):
# Returning bare address objects (rather than interfaces) allows for
# more consistent behaviour across the network address, broadcast
# address and individual host addresses.
msg = '%200s has no associated address class' % (type(self),)
raise NotImplementedError(msg)
@property
def prefixlen(self):
return self._prefixlen
def address_exclude(self, other):
"""Remove an address from a larger block.
For example:
addr1 = ip_network('192.0.2.0/28')
addr2 = ip_network('192.0.2.1/32')
list(addr1.address_exclude(addr2)) =
[IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
or IPv6:
addr1 = ip_network('2001:db8::1/32')
addr2 = ip_network('2001:db8::1/128')
list(addr1.address_exclude(addr2)) =
[ip_network('2001:db8::1/128'),
ip_network('2001:db8::2/127'),
ip_network('2001:db8::4/126'),
ip_network('2001:db8::8/125'),
...
ip_network('2001:db8:8000::/33')]
Args:
other: An IPv4Network or IPv6Network object of the same type.
Returns:
An iterator of the IPv(4|6)Network objects which is self
minus other.
Raises:
TypeError: If self and other are of differing address
versions, or if other is not a network object.
ValueError: If other is not completely contained by self.
"""
if not self._version == other._version:
raise TypeError("%s and %s are not of the same version" % (
self, other))
if not isinstance(other, _BaseNetwork):
raise TypeError("%s is not a network object" % other)
if not other.subnet_of(self):
raise ValueError('%s not contained in %s' % (other, self))
if other == self:
return
# Make sure we're comparing the network of other.
other = other.__class__('%s/%s' % (other.network_address,
other.prefixlen))
s1, s2 = self.subnets()
while s1 != other and s2 != other:
if other.subnet_of(s1):
yield s2
s1, s2 = s1.subnets()
elif other.subnet_of(s2):
yield s1
s1, s2 = s2.subnets()
else:
# If we got here, there's a bug somewhere.
raise AssertionError('Error performing exclusion: '
's1: %s s2: %s other: %s' %
(s1, s2, other))
if s1 == other:
yield s2
elif s2 == other:
yield s1
else:
# If we got here, there's a bug somewhere.
raise AssertionError('Error performing exclusion: '
's1: %s s2: %s other: %s' %
(s1, s2, other))
def compare_networks(self, other):
"""Compare two IP objects.
This is only concerned about the comparison of the integer
representation of the network addresses. This means that the
host bits aren't considered at all in this method. If you want
to compare host bits, you can easily enough do a
'HostA._ip < HostB._ip'
Args:
other: An IP object.
Returns:
If the IP versions of self and other are the same, returns:
-1 if self < other:
eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
IPv6Network('2001:db8::1000/124') <
IPv6Network('2001:db8::2000/124')
0 if self == other
eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
IPv6Network('2001:db8::1000/124') ==
IPv6Network('2001:db8::1000/124')
1 if self > other
eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
IPv6Network('2001:db8::2000/124') >
IPv6Network('2001:db8::1000/124')
Raises:
TypeError if the IP versions are different.
"""
# does this need to raise a ValueError?
if self._version != other._version:
raise TypeError('%s and %s are not of the same type' % (
self, other))
# self._version == other._version below here:
if self.network_address < other.network_address:
return -1
if self.network_address > other.network_address:
return 1
# self.network_address == other.network_address below here:
if self.netmask < other.netmask:
return -1
if self.netmask > other.netmask:
return 1
return 0
def _get_networks_key(self):
"""Network-only key function.
Returns an object that identifies this address' network and
netmask. This function is a suitable "key" argument for sorted()
and list.sort().
"""
return (self._version, self.network_address, self.netmask)
def subnets(self, prefixlen_diff=1, new_prefix=None):
"""The subnets which join to make the current subnet.
In the case that self contains only one IP
(self._prefixlen == 32 for IPv4 or self._prefixlen == 128
for IPv6), yield an iterator with just ourself.
Args:
prefixlen_diff: An integer, the amount the prefix length
should be increased by. This should not be set if
new_prefix is also set.
new_prefix: The desired new prefix length. This must be a
larger number (smaller prefix) than the existing prefix.
This should not be set if prefixlen_diff is also set.
Returns:
An iterator of IPv(4|6) objects.
Raises:
ValueError: The prefixlen_diff is too small or too large.
OR
prefixlen_diff and new_prefix are both set or new_prefix
is a smaller number than the current prefix (smaller
number means a larger network)
"""
if self._prefixlen == self._max_prefixlen:
yield self
return
if new_prefix is not None:
if new_prefix < self._prefixlen:
raise ValueError('new prefix must be longer')
if prefixlen_diff != 1:
raise ValueError('cannot set prefixlen_diff and new_prefix')
prefixlen_diff = new_prefix - self._prefixlen
if prefixlen_diff < 0:
raise ValueError('prefix length diff must be > 0')
new_prefixlen = self._prefixlen + prefixlen_diff
if new_prefixlen > self._max_prefixlen:
raise ValueError(
'prefix length diff %d is invalid for netblock %s' % (
new_prefixlen, self))
start = int(self.network_address)
end = int(self.broadcast_address) + 1
step = (int(self.hostmask) + 1) >> prefixlen_diff
for new_addr in _compat_range(start, end, step):
current = self.__class__((new_addr, new_prefixlen))
yield current
def supernet(self, prefixlen_diff=1, new_prefix=None):
"""The supernet containing the current network.
Args:
prefixlen_diff: An integer, the amount the prefix length of
the network should be decreased by. For example, given a
/24 network and a prefixlen_diff of 3, a supernet with a
/21 netmask is returned.
Returns:
An IPv4 network object.
Raises:
ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
a negative prefix length.
OR
If prefixlen_diff and new_prefix are both set or new_prefix is a
larger number than the current prefix (larger number means a
smaller network)
"""
if self._prefixlen == 0:
return self
if new_prefix is not None:
if new_prefix > self._prefixlen:
raise ValueError('new prefix must be shorter')
if prefixlen_diff != 1:
raise ValueError('cannot set prefixlen_diff and new_prefix')
prefixlen_diff = self._prefixlen - new_prefix
new_prefixlen = self.prefixlen - prefixlen_diff
if new_prefixlen < 0:
raise ValueError(
'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
(self.prefixlen, prefixlen_diff))
return self.__class__((
int(self.network_address) & (int(self.netmask) << prefixlen_diff),
new_prefixlen
))
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is a multicast address.
See RFC 2373 2.7 for details.
"""
return (self.network_address.is_multicast and
self.broadcast_address.is_multicast)
def subnet_of(self, other):
# always false if one is v4 and the other is v6.
if self._version != other._version:
return False
# dealing with another network.
if (hasattr(other, 'network_address') and
hasattr(other, 'broadcast_address')):
return (other.network_address <= self.network_address and
other.broadcast_address >= self.broadcast_address)
# dealing with another address
else:
raise TypeError('Unable to test subnet containment with element '
'of type %s' % type(other))
def supernet_of(self, other):
# always false if one is v4 and the other is v6.
if self._version != other._version:
return False
# dealing with another network.
if (hasattr(other, 'network_address') and
hasattr(other, 'broadcast_address')):
return (other.network_address >= self.network_address and
other.broadcast_address <= self.broadcast_address)
# dealing with another address
else:
raise TypeError('Unable to test subnet containment with element '
'of type %s' % type(other))
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within one of the
reserved IPv6 Network ranges.
"""
return (self.network_address.is_reserved and
self.broadcast_address.is_reserved)
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is reserved per RFC 4291.
"""
return (self.network_address.is_link_local and
self.broadcast_address.is_link_local)
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv4-special-registry or iana-ipv6-special-registry.
"""
return (self.network_address.is_private and
self.broadcast_address.is_private)
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, True if the address is not reserved per
iana-ipv4-special-registry or iana-ipv6-special-registry.
"""
return not self.is_private
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 2373 2.5.2.
"""
return (self.network_address.is_unspecified and
self.broadcast_address.is_unspecified)
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback address as defined in
RFC 2373 2.5.3.
"""
return (self.network_address.is_loopback and
self.broadcast_address.is_loopback)
class _BaseV4(object):
"""Base IPv4 object.
The following methods are used by IPv4 objects in both single IP
addresses and networks.
"""
__slots__ = ()
_version = 4
# Equivalent to 255.255.255.255 or 32 bits of 1's.
_ALL_ONES = (2 ** IPV4LENGTH) - 1
_DECIMAL_DIGITS = frozenset('0123456789')
# the valid octets for host and netmasks. only useful for IPv4.
_valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])
_max_prefixlen = IPV4LENGTH
# There are only a handful of valid v4 netmasks, so we cache them all
# when constructed (see _make_netmask()).
_netmask_cache = {}
def _explode_shorthand_ip_string(self):
return _compat_str(self)
@classmethod
def _make_netmask(cls, arg):
"""Make a (netmask, prefix_len) tuple from the given argument.
Argument can be:
- an integer (the prefix length)
- a string representing the prefix length (e.g. "24")
- a string representing the prefix netmask (e.g. "255.255.255.0")
"""
if arg not in cls._netmask_cache:
if isinstance(arg, _compat_int_types):
prefixlen = arg
else:
try:
# Check for a netmask in prefix length form
prefixlen = cls._prefix_from_prefix_string(arg)
except NetmaskValueError:
# Check for a netmask or hostmask in dotted-quad form.
# This may raise NetmaskValueError.
prefixlen = cls._prefix_from_ip_string(arg)
netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
cls._netmask_cache[arg] = netmask, prefixlen
return cls._netmask_cache[arg]
@classmethod
def _ip_int_from_string(cls, ip_str):
"""Turn the given IP string into an integer for comparison.
Args:
ip_str: A string, the IP ip_str.
Returns:
The IP ip_str as an integer.
Raises:
AddressValueError: if ip_str isn't a valid IPv4 Address.
"""
if not ip_str:
raise AddressValueError('Address cannot be empty')
octets = ip_str.split('.')
if len(octets) != 4:
raise AddressValueError("Expected 4 octets in %r" % ip_str)
try:
return _compat_int_from_byte_vals(
map(cls._parse_octet, octets), 'big')
except ValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
@classmethod
def _parse_octet(cls, octet_str):
"""Convert a decimal octet into an integer.
Args:
octet_str: A string, the number to parse.
Returns:
The octet as an integer.
Raises:
ValueError: if the octet isn't strictly a decimal from [0..255].
"""
if not octet_str:
raise ValueError("Empty octet not permitted")
# Whitelist the characters, since int() allows a lot of bizarre stuff.
if not cls._DECIMAL_DIGITS.issuperset(octet_str):
msg = "Only decimal digits permitted in %r"
raise ValueError(msg % octet_str)
# We do the length check second, since the invalid character error
# is likely to be more informative for the user
if len(octet_str) > 3:
msg = "At most 3 characters permitted in %r"
raise ValueError(msg % octet_str)
# Convert to integer (we know digits are legal)
octet_int = int(octet_str, 10)
# Any octets that look like they *might* be written in octal,
# and which don't look exactly the same in both octal and
# decimal are rejected as ambiguous
if octet_int > 7 and octet_str[0] == '0':
msg = "Ambiguous (octal/decimal) value in %r not permitted"
raise ValueError(msg % octet_str)
if octet_int > 255:
raise ValueError("Octet %d (> 255) not permitted" % octet_int)
return octet_int
@classmethod
def _string_from_ip_int(cls, ip_int):
"""Turns a 32-bit integer into dotted decimal notation.
Args:
ip_int: An integer, the IP address.
Returns:
The IP address as a string in dotted decimal notation.
"""
return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]
if isinstance(b, bytes)
else b)
for b in _compat_to_bytes(ip_int, 4, 'big'))
def _is_hostmask(self, ip_str):
"""Test if the IP string is a hostmask (rather than a netmask).
Args:
ip_str: A string, the potential hostmask.
Returns:
A boolean, True if the IP string is a hostmask.
"""
bits = ip_str.split('.')
try:
parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
except ValueError:
return False
if len(parts) != len(bits):
return False
if parts[0] < parts[-1]:
return True
return False
def _reverse_pointer(self):
"""Return the reverse DNS pointer name for the IPv4 address.
This implements the method described in RFC1035 3.5.
"""
reverse_octets = _compat_str(self).split('.')[::-1]
return '.'.join(reverse_octets) + '.in-addr.arpa'
@property
def max_prefixlen(self):
return self._max_prefixlen
@property
def version(self):
return self._version
class IPv4Address(_BaseV4, _BaseAddress):
"""Represent and manipulate single IPv4 Addresses."""
__slots__ = ('_ip', '__weakref__')
def __init__(self, address):
"""
Args:
address: A string or integer representing the IP
Additionally, an integer can be passed, so
IPv4Address('192.0.2.1') == IPv4Address(3221225985).
or, more generally
IPv4Address(int(IPv4Address('192.0.2.1'))) ==
IPv4Address('192.0.2.1')
Raises:
AddressValueError: If ipaddress isn't a valid IPv4 address.
"""
# Efficient constructor from integer.
if isinstance(address, _compat_int_types):
self._check_int_address(address)
self._ip = address
return
# Constructing from a packed address
if isinstance(address, bytes):
self._check_packed_address(address, 4)
bvs = _compat_bytes_to_byte_vals(address)
self._ip = _compat_int_from_byte_vals(bvs, 'big')
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP string.
addr_str = _compat_str(address)
if '/' in addr_str:
raise AddressValueError("Unexpected '/' in %r" % address)
self._ip = self._ip_int_from_string(addr_str)
@property
def packed(self):
"""The binary representation of this address."""
return v4_int_to_packed(self._ip)
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within the
reserved IPv4 Network range.
"""
return self in self._constants._reserved_network
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv4-special-registry.
"""
return any(self in net for net in self._constants._private_networks)
@property
def is_global(self):
return (
self not in self._constants._public_network and
not self.is_private)
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is multicast.
See RFC 3171 for details.
"""
return self in self._constants._multicast_network
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 5735 3.
"""
return self == self._constants._unspecified_address
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback per RFC 3330.
"""
return self in self._constants._loopback_network
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is link-local per RFC 3927.
"""
return self in self._constants._linklocal_network
class IPv4Interface(IPv4Address):
def __init__(self, address):
if isinstance(address, (bytes, _compat_int_types)):
IPv4Address.__init__(self, address)
self.network = IPv4Network(self._ip)
self._prefixlen = self._max_prefixlen
return
if isinstance(address, tuple):
IPv4Address.__init__(self, address[0])
if len(address) > 1:
self._prefixlen = int(address[1])
else:
self._prefixlen = self._max_prefixlen
self.network = IPv4Network(address, strict=False)
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
return
addr = _split_optional_netmask(address)
IPv4Address.__init__(self, addr[0])
self.network = IPv4Network(address, strict=False)
self._prefixlen = self.network._prefixlen
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
def __str__(self):
return '%s/%d' % (self._string_from_ip_int(self._ip),
self.network.prefixlen)
def __eq__(self, other):
address_equal = IPv4Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except AttributeError:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False
def __lt__(self, other):
address_less = IPv4Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return self.network < other.network
except AttributeError:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False
def __hash__(self):
return self._ip ^ self._prefixlen ^ int(self.network.network_address)
__reduce__ = _IPAddressBase.__reduce__
@property
def ip(self):
return IPv4Address(self._ip)
@property
def with_prefixlen(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.hostmask)
class IPv4Network(_BaseV4, _BaseNetwork):
"""This class represents and manipulates 32-bit IPv4 network + addresses..
Attributes: [examples for IPv4Network('192.0.2.0/27')]
.network_address: IPv4Address('192.0.2.0')
.hostmask: IPv4Address('0.0.0.31')
.broadcast_address: IPv4Address('192.0.2.32')
.netmask: IPv4Address('255.255.255.224')
.prefixlen: 27
"""
# Class to use when creating address objects
_address_class = IPv4Address
def __init__(self, address, strict=True):
"""Instantiate a new IPv4 network object.
Args:
address: A string or integer representing the IP [& network].
'192.0.2.0/24'
'192.0.2.0/255.255.255.0'
'192.0.0.2/0.0.0.255'
are all functionally the same in IPv4. Similarly,
'192.0.2.1'
'192.0.2.1/255.255.255.255'
'192.0.2.1/32'
are also functionally equivalent. That is to say, failing to
provide a subnetmask will create an object with a mask of /32.
If the mask (portion after the / in the argument) is given in
dotted quad form, it is treated as a netmask if it starts with a
non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
starts with a zero field (e.g. 0.255.255.255 == /8), with the
single exception of an all-zero mask which is treated as a
netmask == /0. If no mask is given, a default of /32 is used.
Additionally, an integer can be passed, so
IPv4Network('192.0.2.1') == IPv4Network(3221225985)
or, more generally
IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
IPv4Interface('192.0.2.1')
Raises:
AddressValueError: If ipaddress isn't a valid IPv4 address.
NetmaskValueError: If the netmask isn't valid for
an IPv4 address.
ValueError: If strict is True and a network address is not
supplied.
"""
_BaseNetwork.__init__(self, address)
# Constructing from a packed address or integer
if isinstance(address, (_compat_int_types, bytes)):
self.network_address = IPv4Address(address)
self.netmask, self._prefixlen = self._make_netmask(
self._max_prefixlen)
# fixme: address/network test here.
return
if isinstance(address, tuple):
if len(address) > 1:
arg = address[1]
else:
# We weren't given an address[1]
arg = self._max_prefixlen
self.network_address = IPv4Address(address[0])
self.netmask, self._prefixlen = self._make_netmask(arg)
packed = int(self.network_address)
if packed & int(self.netmask) != packed:
if strict:
raise ValueError('%s has host bits set' % self)
else:
self.network_address = IPv4Address(packed &
int(self.netmask))
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
addr = _split_optional_netmask(address)
self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
if len(addr) == 2:
arg = addr[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
if strict:
if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
self.network_address):
raise ValueError('%s has host bits set' % self)
self.network_address = IPv4Address(int(self.network_address) &
int(self.netmask))
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, True if the address is not reserved per
iana-ipv4-special-registry.
"""
return (not (self.network_address in IPv4Network('100.64.0.0/10') and
self.broadcast_address in IPv4Network('100.64.0.0/10')) and
not self.is_private)
class _IPv4Constants(object):
_linklocal_network = IPv4Network('169.254.0.0/16')
_loopback_network = IPv4Network('127.0.0.0/8')
_multicast_network = IPv4Network('224.0.0.0/4')
_public_network = IPv4Network('100.64.0.0/10')
_private_networks = [
IPv4Network('0.0.0.0/8'),
IPv4Network('10.0.0.0/8'),
IPv4Network('127.0.0.0/8'),
IPv4Network('169.254.0.0/16'),
IPv4Network('172.16.0.0/12'),
IPv4Network('192.0.0.0/29'),
IPv4Network('192.0.0.170/31'),
IPv4Network('192.0.2.0/24'),
IPv4Network('192.168.0.0/16'),
IPv4Network('198.18.0.0/15'),
IPv4Network('198.51.100.0/24'),
IPv4Network('203.0.113.0/24'),
IPv4Network('240.0.0.0/4'),
IPv4Network('255.255.255.255/32'),
]
_reserved_network = IPv4Network('240.0.0.0/4')
_unspecified_address = IPv4Address('0.0.0.0')
IPv4Address._constants = _IPv4Constants
class _BaseV6(object):
"""Base IPv6 object.
The following methods are used by IPv6 objects in both single IP
addresses and networks.
"""
__slots__ = ()
_version = 6
_ALL_ONES = (2 ** IPV6LENGTH) - 1
_HEXTET_COUNT = 8
_HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
_max_prefixlen = IPV6LENGTH
# There are only a bunch of valid v6 netmasks, so we cache them all
# when constructed (see _make_netmask()).
_netmask_cache = {}
@classmethod
def _make_netmask(cls, arg):
"""Make a (netmask, prefix_len) tuple from the given argument.
Argument can be:
- an integer (the prefix length)
- a string representing the prefix length (e.g. "24")
- a string representing the prefix netmask (e.g. "255.255.255.0")
"""
if arg not in cls._netmask_cache:
if isinstance(arg, _compat_int_types):
prefixlen = arg
else:
prefixlen = cls._prefix_from_prefix_string(arg)
netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
cls._netmask_cache[arg] = netmask, prefixlen
return cls._netmask_cache[arg]
@classmethod
def _ip_int_from_string(cls, ip_str):
"""Turn an IPv6 ip_str into an integer.
Args:
ip_str: A string, the IPv6 ip_str.
Returns:
An int, the IPv6 address
Raises:
AddressValueError: if ip_str isn't a valid IPv6 Address.
"""
if not ip_str:
raise AddressValueError('Address cannot be empty')
parts = ip_str.split(':')
# An IPv6 address needs at least 2 colons (3 parts).
_min_parts = 3
if len(parts) < _min_parts:
msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
raise AddressValueError(msg)
# If the address has an IPv4-style suffix, convert it to hexadecimal.
if '.' in parts[-1]:
try:
ipv4_int = IPv4Address(parts.pop())._ip
except AddressValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
parts.append('%x' % (ipv4_int & 0xFFFF))
# An IPv6 address can't have more than 8 colons (9 parts).
# The extra colon comes from using the "::" notation for a single
# leading or trailing zero part.
_max_parts = cls._HEXTET_COUNT + 1
if len(parts) > _max_parts:
msg = "At most %d colons permitted in %r" % (
_max_parts - 1, ip_str)
raise AddressValueError(msg)
# Disregarding the endpoints, find '::' with nothing in between.
# This indicates that a run of zeroes has been skipped.
skip_index = None
for i in _compat_range(1, len(parts) - 1):
if not parts[i]:
if skip_index is not None:
# Can't have more than one '::'
msg = "At most one '::' permitted in %r" % ip_str
raise AddressValueError(msg)
skip_index = i
# parts_hi is the number of parts to copy from above/before the '::'
# parts_lo is the number of parts to copy from below/after the '::'
if skip_index is not None:
# If we found a '::', then check if it also covers the endpoints.
parts_hi = skip_index
parts_lo = len(parts) - skip_index - 1
if not parts[0]:
parts_hi -= 1
if parts_hi:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
parts_lo -= 1
if parts_lo:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
if parts_skipped < 1:
msg = "Expected at most %d other parts with '::' in %r"
raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
else:
# Otherwise, allocate the entire address to parts_hi. The
# endpoints could still be empty, but _parse_hextet() will check
# for that.
if len(parts) != cls._HEXTET_COUNT:
msg = "Exactly %d parts expected without '::' in %r"
raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
if not parts[0]:
msg = "Leading ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # ^: requires ^::
if not parts[-1]:
msg = "Trailing ':' only permitted as part of '::' in %r"
raise AddressValueError(msg % ip_str) # :$ requires ::$
parts_hi = len(parts)
parts_lo = 0
parts_skipped = 0
try:
# Now, parse the hextets into a 128-bit integer.
ip_int = 0
for i in range(parts_hi):
ip_int <<= 16
ip_int |= cls._parse_hextet(parts[i])
ip_int <<= 16 * parts_skipped
for i in range(-parts_lo, 0):
ip_int <<= 16
ip_int |= cls._parse_hextet(parts[i])
return ip_int
except ValueError as exc:
raise AddressValueError("%s in %r" % (exc, ip_str))
@classmethod
def _parse_hextet(cls, hextet_str):
"""Convert an IPv6 hextet string into an integer.
Args:
hextet_str: A string, the number to parse.
Returns:
The hextet as an integer.
Raises:
ValueError: if the input isn't strictly a hex number from
[0..FFFF].
"""
# Whitelist the characters, since int() allows a lot of bizarre stuff.
if not cls._HEX_DIGITS.issuperset(hextet_str):
raise ValueError("Only hex digits permitted in %r" % hextet_str)
# We do the length check second, since the invalid character error
# is likely to be more informative for the user
if len(hextet_str) > 4:
msg = "At most 4 characters permitted in %r"
raise ValueError(msg % hextet_str)
# Length check means we can skip checking the integer value
return int(hextet_str, 16)
@classmethod
def _compress_hextets(cls, hextets):
"""Compresses a list of hextets.
Compresses a list of strings, replacing the longest continuous
sequence of "0" in the list with "" and adding empty strings at
the beginning or at the end of the string such that subsequently
calling ":".join(hextets) will produce the compressed version of
the IPv6 address.
Args:
hextets: A list of strings, the hextets to compress.
Returns:
A list of strings.
"""
best_doublecolon_start = -1
best_doublecolon_len = 0
doublecolon_start = -1
doublecolon_len = 0
for index, hextet in enumerate(hextets):
if hextet == '0':
doublecolon_len += 1
if doublecolon_start == -1:
# Start of a sequence of zeros.
doublecolon_start = index
if doublecolon_len > best_doublecolon_len:
# This is the longest sequence of zeros so far.
best_doublecolon_len = doublecolon_len
best_doublecolon_start = doublecolon_start
else:
doublecolon_len = 0
doublecolon_start = -1
if best_doublecolon_len > 1:
best_doublecolon_end = (best_doublecolon_start +
best_doublecolon_len)
# For zeros at the end of the address.
if best_doublecolon_end == len(hextets):
hextets += ['']
hextets[best_doublecolon_start:best_doublecolon_end] = ['']
# For zeros at the beginning of the address.
if best_doublecolon_start == 0:
hextets = [''] + hextets
return hextets
@classmethod
def _string_from_ip_int(cls, ip_int=None):
"""Turns a 128-bit integer into hexadecimal notation.
Args:
ip_int: An integer, the IP address.
Returns:
A string, the hexadecimal representation of the address.
Raises:
ValueError: The address is bigger than 128 bits of all ones.
"""
if ip_int is None:
ip_int = int(cls._ip)
if ip_int > cls._ALL_ONES:
raise ValueError('IPv6 address is too large')
hex_str = '%032x' % ip_int
hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]
hextets = cls._compress_hextets(hextets)
return ':'.join(hextets)
def _explode_shorthand_ip_string(self):
"""Expand a shortened IPv6 address.
Args:
ip_str: A string, the IPv6 address.
Returns:
A string, the expanded IPv6 address.
"""
if isinstance(self, IPv6Network):
ip_str = _compat_str(self.network_address)
elif isinstance(self, IPv6Interface):
ip_str = _compat_str(self.ip)
else:
ip_str = _compat_str(self)
ip_int = self._ip_int_from_string(ip_str)
hex_str = '%032x' % ip_int
parts = [hex_str[x:x + 4] for x in range(0, 32, 4)]
if isinstance(self, (_BaseNetwork, IPv6Interface)):
return '%s/%d' % (':'.join(parts), self._prefixlen)
return ':'.join(parts)
def _reverse_pointer(self):
"""Return the reverse DNS pointer name for the IPv6 address.
This implements the method described in RFC3596 2.5.
"""
reverse_chars = self.exploded[::-1].replace(':', '')
return '.'.join(reverse_chars) + '.ip6.arpa'
@property
def max_prefixlen(self):
return self._max_prefixlen
@property
def version(self):
return self._version
class IPv6Address(_BaseV6, _BaseAddress):
"""Represent and manipulate single IPv6 Addresses."""
__slots__ = ('_ip', '__weakref__')
def __init__(self, address):
"""Instantiate a new IPv6 address object.
Args:
address: A string or integer representing the IP
Additionally, an integer can be passed, so
IPv6Address('2001:db8::') ==
IPv6Address(42540766411282592856903984951653826560)
or, more generally
IPv6Address(int(IPv6Address('2001:db8::'))) ==
IPv6Address('2001:db8::')
Raises:
AddressValueError: If address isn't a valid IPv6 address.
"""
# Efficient constructor from integer.
if isinstance(address, _compat_int_types):
self._check_int_address(address)
self._ip = address
return
# Constructing from a packed address
if isinstance(address, bytes):
self._check_packed_address(address, 16)
bvs = _compat_bytes_to_byte_vals(address)
self._ip = _compat_int_from_byte_vals(bvs, 'big')
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP string.
addr_str = _compat_str(address)
if '/' in addr_str:
raise AddressValueError("Unexpected '/' in %r" % address)
self._ip = self._ip_int_from_string(addr_str)
@property
def packed(self):
"""The binary representation of this address."""
return v6_int_to_packed(self._ip)
@property
def is_multicast(self):
"""Test if the address is reserved for multicast use.
Returns:
A boolean, True if the address is a multicast address.
See RFC 2373 2.7 for details.
"""
return self in self._constants._multicast_network
@property
def is_reserved(self):
"""Test if the address is otherwise IETF reserved.
Returns:
A boolean, True if the address is within one of the
reserved IPv6 Network ranges.
"""
return any(self in x for x in self._constants._reserved_networks)
@property
def is_link_local(self):
"""Test if the address is reserved for link-local.
Returns:
A boolean, True if the address is reserved per RFC 4291.
"""
return self in self._constants._linklocal_network
@property
def is_site_local(self):
"""Test if the address is reserved for site-local.
Note that the site-local address space has been deprecated by RFC 3879.
Use is_private to test if this address is in the space of unique local
addresses as defined by RFC 4193.
Returns:
A boolean, True if the address is reserved per RFC 3513 2.5.6.
"""
return self in self._constants._sitelocal_network
@property
def is_private(self):
"""Test if this address is allocated for private networks.
Returns:
A boolean, True if the address is reserved per
iana-ipv6-special-registry.
"""
return any(self in net for net in self._constants._private_networks)
@property
def is_global(self):
"""Test if this address is allocated for public networks.
Returns:
A boolean, true if the address is not reserved per
iana-ipv6-special-registry.
"""
return not self.is_private
@property
def is_unspecified(self):
"""Test if the address is unspecified.
Returns:
A boolean, True if this is the unspecified address as defined in
RFC 2373 2.5.2.
"""
return self._ip == 0
@property
def is_loopback(self):
"""Test if the address is a loopback address.
Returns:
A boolean, True if the address is a loopback address as defined in
RFC 2373 2.5.3.
"""
return self._ip == 1
@property
def ipv4_mapped(self):
"""Return the IPv4 mapped address.
Returns:
If the IPv6 address is a v4 mapped address, return the
IPv4 mapped address. Return None otherwise.
"""
if (self._ip >> 32) != 0xFFFF:
return None
return IPv4Address(self._ip & 0xFFFFFFFF)
@property
def teredo(self):
"""Tuple of embedded teredo IPs.
Returns:
Tuple of the (server, client) IPs or None if the address
doesn't appear to be a teredo address (doesn't start with
2001::/32)
"""
if (self._ip >> 96) != 0x20010000:
return None
return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
IPv4Address(~self._ip & 0xFFFFFFFF))
@property
def sixtofour(self):
"""Return the IPv4 6to4 embedded address.
Returns:
The IPv4 6to4-embedded address if present or None if the
address doesn't appear to contain a 6to4 embedded address.
"""
if (self._ip >> 112) != 0x2002:
return None
return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
class IPv6Interface(IPv6Address):
def __init__(self, address):
if isinstance(address, (bytes, _compat_int_types)):
IPv6Address.__init__(self, address)
self.network = IPv6Network(self._ip)
self._prefixlen = self._max_prefixlen
return
if isinstance(address, tuple):
IPv6Address.__init__(self, address[0])
if len(address) > 1:
self._prefixlen = int(address[1])
else:
self._prefixlen = self._max_prefixlen
self.network = IPv6Network(address, strict=False)
self.netmask = self.network.netmask
self.hostmask = self.network.hostmask
return
addr = _split_optional_netmask(address)
IPv6Address.__init__(self, addr[0])
self.network = IPv6Network(address, strict=False)
self.netmask = self.network.netmask
self._prefixlen = self.network._prefixlen
self.hostmask = self.network.hostmask
def __str__(self):
return '%s/%d' % (self._string_from_ip_int(self._ip),
self.network.prefixlen)
def __eq__(self, other):
address_equal = IPv6Address.__eq__(self, other)
if not address_equal or address_equal is NotImplemented:
return address_equal
try:
return self.network == other.network
except AttributeError:
# An interface with an associated network is NOT the
# same as an unassociated address. That's why the hash
# takes the extra info into account.
return False
def __lt__(self, other):
address_less = IPv6Address.__lt__(self, other)
if address_less is NotImplemented:
return NotImplemented
try:
return self.network < other.network
except AttributeError:
# We *do* allow addresses and interfaces to be sorted. The
# unassociated address is considered less than all interfaces.
return False
def __hash__(self):
return self._ip ^ self._prefixlen ^ int(self.network.network_address)
__reduce__ = _IPAddressBase.__reduce__
@property
def ip(self):
return IPv6Address(self._ip)
@property
def with_prefixlen(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self._prefixlen)
@property
def with_netmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.netmask)
@property
def with_hostmask(self):
return '%s/%s' % (self._string_from_ip_int(self._ip),
self.hostmask)
@property
def is_unspecified(self):
return self._ip == 0 and self.network.is_unspecified
@property
def is_loopback(self):
return self._ip == 1 and self.network.is_loopback
class IPv6Network(_BaseV6, _BaseNetwork):
"""This class represents and manipulates 128-bit IPv6 networks.
Attributes: [examples for IPv6('2001:db8::1000/124')]
.network_address: IPv6Address('2001:db8::1000')
.hostmask: IPv6Address('::f')
.broadcast_address: IPv6Address('2001:db8::100f')
.netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
.prefixlen: 124
"""
# Class to use when creating address objects
_address_class = IPv6Address
def __init__(self, address, strict=True):
"""Instantiate a new IPv6 Network object.
Args:
address: A string or integer representing the IPv6 network or the
IP and prefix/netmask.
'2001:db8::/128'
'2001:db8:0000:0000:0000:0000:0000:0000/128'
'2001:db8::'
are all functionally the same in IPv6. That is to say,
failing to provide a subnetmask will create an object with
a mask of /128.
Additionally, an integer can be passed, so
IPv6Network('2001:db8::') ==
IPv6Network(42540766411282592856903984951653826560)
or, more generally
IPv6Network(int(IPv6Network('2001:db8::'))) ==
IPv6Network('2001:db8::')
strict: A boolean. If true, ensure that we have been passed
A true network address, eg, 2001:db8::1000/124 and not an
IP address on a network, eg, 2001:db8::1/124.
Raises:
AddressValueError: If address isn't a valid IPv6 address.
NetmaskValueError: If the netmask isn't valid for
an IPv6 address.
ValueError: If strict was True and a network address was not
supplied.
"""
_BaseNetwork.__init__(self, address)
# Efficient constructor from integer or packed address
if isinstance(address, (bytes, _compat_int_types)):
self.network_address = IPv6Address(address)
self.netmask, self._prefixlen = self._make_netmask(
self._max_prefixlen)
return
if isinstance(address, tuple):
if len(address) > 1:
arg = address[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
self.network_address = IPv6Address(address[0])
packed = int(self.network_address)
if packed & int(self.netmask) != packed:
if strict:
raise ValueError('%s has host bits set' % self)
else:
self.network_address = IPv6Address(packed &
int(self.netmask))
return
# Assume input argument to be string or any object representation
# which converts into a formatted IP prefix string.
addr = _split_optional_netmask(address)
self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
if len(addr) == 2:
arg = addr[1]
else:
arg = self._max_prefixlen
self.netmask, self._prefixlen = self._make_netmask(arg)
if strict:
if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
self.network_address):
raise ValueError('%s has host bits set' % self)
self.network_address = IPv6Address(int(self.network_address) &
int(self.netmask))
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
def hosts(self):
"""Generate Iterator over usable hosts in a network.
This is like __iter__ except it doesn't return the
Subnet-Router anycast address.
"""
network = int(self.network_address)
broadcast = int(self.broadcast_address)
for x in _compat_range(network + 1, broadcast + 1):
yield self._address_class(x)
@property
def is_site_local(self):
"""Test if the address is reserved for site-local.
Note that the site-local address space has been deprecated by RFC 3879.
Use is_private to test if this address is in the space of unique local
addresses as defined by RFC 4193.
Returns:
A boolean, True if the address is reserved per RFC 3513 2.5.6.
"""
return (self.network_address.is_site_local and
self.broadcast_address.is_site_local)
class _IPv6Constants(object):
_linklocal_network = IPv6Network('fe80::/10')
_multicast_network = IPv6Network('ff00::/8')
_private_networks = [
IPv6Network('::1/128'),
IPv6Network('::/128'),
IPv6Network('::ffff:0:0/96'),
IPv6Network('100::/64'),
IPv6Network('2001::/23'),
IPv6Network('2001:2::/48'),
IPv6Network('2001:db8::/32'),
IPv6Network('2001:10::/28'),
IPv6Network('fc00::/7'),
IPv6Network('fe80::/10'),
]
_reserved_networks = [
IPv6Network('::/8'), IPv6Network('100::/8'),
IPv6Network('200::/7'), IPv6Network('400::/6'),
IPv6Network('800::/5'), IPv6Network('1000::/4'),
IPv6Network('4000::/3'), IPv6Network('6000::/3'),
IPv6Network('8000::/3'), IPv6Network('A000::/3'),
IPv6Network('C000::/3'), IPv6Network('E000::/4'),
IPv6Network('F000::/5'), IPv6Network('F800::/6'),
IPv6Network('FE00::/9'),
]
_sitelocal_network = IPv6Network('fec0::/10')
IPv6Address._constants = _IPv6Constants
| gpl-3.0 |
WholeGrainGoats/servo | tests/wpt/web-platform-tests/tools/pytest/testing/test_assertion.py | 170 | 19078 | # -*- coding: utf-8 -*-
import sys
import textwrap
import _pytest.assertion as plugin
import _pytest._code
import py
import pytest
from _pytest.assertion import reinterpret
from _pytest.assertion import util
PY3 = sys.version_info >= (3, 0)
@pytest.fixture
def mock_config():
class Config(object):
verbose = False
def getoption(self, name):
if name == 'verbose':
return self.verbose
raise KeyError('Not mocked out: %s' % name)
return Config()
def interpret(expr):
return reinterpret.reinterpret(expr, _pytest._code.Frame(sys._getframe(1)))
class TestBinReprIntegration:
def test_pytest_assertrepr_compare_called(self, testdir):
testdir.makeconftest("""
l = []
def pytest_assertrepr_compare(op, left, right):
l.append((op, left, right))
def pytest_funcarg__l(request):
return l
""")
testdir.makepyfile("""
def test_hello():
assert 0 == 1
def test_check(l):
assert l == [("==", 0, 1)]
""")
result = testdir.runpytest("-v")
result.stdout.fnmatch_lines([
"*test_hello*FAIL*",
"*test_check*PASS*",
])
def callequal(left, right, verbose=False):
config = mock_config()
config.verbose = verbose
return plugin.pytest_assertrepr_compare(config, '==', left, right)
class TestAssert_reprcompare:
def test_different_types(self):
assert callequal([0, 1], 'foo') is None
def test_summary(self):
summary = callequal([0, 1], [0, 2])[0]
assert len(summary) < 65
def test_text_diff(self):
diff = callequal('spam', 'eggs')[1:]
assert '- spam' in diff
assert '+ eggs' in diff
def test_text_skipping(self):
lines = callequal('a'*50 + 'spam', 'a'*50 + 'eggs')
assert 'Skipping' in lines[1]
for line in lines:
assert 'a'*50 not in line
def test_text_skipping_verbose(self):
lines = callequal('a'*50 + 'spam', 'a'*50 + 'eggs', verbose=True)
assert '- ' + 'a'*50 + 'spam' in lines
assert '+ ' + 'a'*50 + 'eggs' in lines
def test_multiline_text_diff(self):
left = 'foo\nspam\nbar'
right = 'foo\neggs\nbar'
diff = callequal(left, right)
assert '- spam' in diff
assert '+ eggs' in diff
def test_list(self):
expl = callequal([0, 1], [0, 2])
assert len(expl) > 1
@pytest.mark.parametrize(
['left', 'right', 'expected'], [
([0, 1], [0, 2], """
Full diff:
- [0, 1]
? ^
+ [0, 2]
? ^
"""),
({0: 1}, {0: 2}, """
Full diff:
- {0: 1}
? ^
+ {0: 2}
? ^
"""),
(set([0, 1]), set([0, 2]), """
Full diff:
- set([0, 1])
? ^
+ set([0, 2])
? ^
""" if not PY3 else """
Full diff:
- {0, 1}
? ^
+ {0, 2}
? ^
""")
]
)
def test_iterable_full_diff(self, left, right, expected):
"""Test the full diff assertion failure explanation.
When verbose is False, then just a -v notice to get the diff is rendered,
when verbose is True, then ndiff of the pprint is returned.
"""
expl = callequal(left, right, verbose=False)
assert expl[-1] == 'Use -v to get the full diff'
expl = '\n'.join(callequal(left, right, verbose=True))
assert expl.endswith(textwrap.dedent(expected).strip())
def test_list_different_lenghts(self):
expl = callequal([0, 1], [0, 1, 2])
assert len(expl) > 1
expl = callequal([0, 1, 2], [0, 1])
assert len(expl) > 1
def test_dict(self):
expl = callequal({'a': 0}, {'a': 1})
assert len(expl) > 1
def test_dict_omitting(self):
lines = callequal({'a': 0, 'b': 1}, {'a': 1, 'b': 1})
assert lines[1].startswith('Omitting 1 identical item')
assert 'Common items' not in lines
for line in lines[1:]:
assert 'b' not in line
def test_dict_omitting_verbose(self):
lines = callequal({'a': 0, 'b': 1}, {'a': 1, 'b': 1}, verbose=True)
assert lines[1].startswith('Common items:')
assert 'Omitting' not in lines[1]
assert lines[2] == "{'b': 1}"
def test_set(self):
expl = callequal(set([0, 1]), set([0, 2]))
assert len(expl) > 1
def test_frozenzet(self):
expl = callequal(frozenset([0, 1]), set([0, 2]))
assert len(expl) > 1
def test_Sequence(self):
col = py.builtin._tryimport(
"collections.abc",
"collections",
"sys")
if not hasattr(col, "MutableSequence"):
pytest.skip("cannot import MutableSequence")
MutableSequence = col.MutableSequence
class TestSequence(MutableSequence): # works with a Sequence subclass
def __init__(self, iterable):
self.elements = list(iterable)
def __getitem__(self, item):
return self.elements[item]
def __len__(self):
return len(self.elements)
def __setitem__(self, item, value):
pass
def __delitem__(self, item):
pass
def insert(self, item, index):
pass
expl = callequal(TestSequence([0, 1]), list([0, 2]))
assert len(expl) > 1
def test_list_tuples(self):
expl = callequal([], [(1,2)])
assert len(expl) > 1
expl = callequal([(1,2)], [])
assert len(expl) > 1
def test_list_bad_repr(self):
class A:
def __repr__(self):
raise ValueError(42)
expl = callequal([], [A()])
assert 'ValueError' in "".join(expl)
expl = callequal({}, {'1': A()})
assert 'faulty' in "".join(expl)
def test_one_repr_empty(self):
"""
the faulty empty string repr did trigger
a unbound local error in _diff_text
"""
class A(str):
def __repr__(self):
return ''
expl = callequal(A(), '')
assert not expl
def test_repr_no_exc(self):
expl = ' '.join(callequal('foo', 'bar'))
assert 'raised in repr()' not in expl
def test_unicode(self):
left = py.builtin._totext('£€', 'utf-8')
right = py.builtin._totext('£', 'utf-8')
expl = callequal(left, right)
assert expl[0] == py.builtin._totext("'£€' == '£'", 'utf-8')
assert expl[1] == py.builtin._totext('- £€', 'utf-8')
assert expl[2] == py.builtin._totext('+ £', 'utf-8')
def test_nonascii_text(self):
"""
:issue: 877
non ascii python2 str caused a UnicodeDecodeError
"""
class A(str):
def __repr__(self):
return '\xff'
expl = callequal(A(), '1')
assert expl
def test_format_nonascii_explanation(self):
assert util.format_explanation('λ')
def test_mojibake(self):
# issue 429
left = 'e'
right = '\xc3\xa9'
if not isinstance(left, py.builtin.bytes):
left = py.builtin.bytes(left, 'utf-8')
right = py.builtin.bytes(right, 'utf-8')
expl = callequal(left, right)
for line in expl:
assert isinstance(line, py.builtin.text)
msg = py.builtin._totext('\n').join(expl)
assert msg
class TestFormatExplanation:
def test_special_chars_full(self, testdir):
# Issue 453, for the bug this would raise IndexError
testdir.makepyfile("""
def test_foo():
assert '\\n}' == ''
""")
result = testdir.runpytest()
assert result.ret == 1
result.stdout.fnmatch_lines([
"*AssertionError*",
])
def test_fmt_simple(self):
expl = 'assert foo'
assert util.format_explanation(expl) == 'assert foo'
def test_fmt_where(self):
expl = '\n'.join(['assert 1',
'{1 = foo',
'} == 2'])
res = '\n'.join(['assert 1 == 2',
' + where 1 = foo'])
assert util.format_explanation(expl) == res
def test_fmt_and(self):
expl = '\n'.join(['assert 1',
'{1 = foo',
'} == 2',
'{2 = bar',
'}'])
res = '\n'.join(['assert 1 == 2',
' + where 1 = foo',
' + and 2 = bar'])
assert util.format_explanation(expl) == res
def test_fmt_where_nested(self):
expl = '\n'.join(['assert 1',
'{1 = foo',
'{foo = bar',
'}',
'} == 2'])
res = '\n'.join(['assert 1 == 2',
' + where 1 = foo',
' + where foo = bar'])
assert util.format_explanation(expl) == res
def test_fmt_newline(self):
expl = '\n'.join(['assert "foo" == "bar"',
'~- foo',
'~+ bar'])
res = '\n'.join(['assert "foo" == "bar"',
' - foo',
' + bar'])
assert util.format_explanation(expl) == res
def test_fmt_newline_escaped(self):
expl = '\n'.join(['assert foo == bar',
'baz'])
res = 'assert foo == bar\\nbaz'
assert util.format_explanation(expl) == res
def test_fmt_newline_before_where(self):
expl = '\n'.join(['the assertion message here',
'>assert 1',
'{1 = foo',
'} == 2',
'{2 = bar',
'}'])
res = '\n'.join(['the assertion message here',
'assert 1 == 2',
' + where 1 = foo',
' + and 2 = bar'])
assert util.format_explanation(expl) == res
def test_fmt_multi_newline_before_where(self):
expl = '\n'.join(['the assertion',
'~message here',
'>assert 1',
'{1 = foo',
'} == 2',
'{2 = bar',
'}'])
res = '\n'.join(['the assertion',
' message here',
'assert 1 == 2',
' + where 1 = foo',
' + and 2 = bar'])
assert util.format_explanation(expl) == res
def test_python25_compile_issue257(testdir):
testdir.makepyfile("""
def test_rewritten():
assert 1 == 2
# some comment
""")
result = testdir.runpytest()
assert result.ret == 1
result.stdout.fnmatch_lines("""
*E*assert 1 == 2*
*1 failed*
""")
def test_rewritten(testdir):
testdir.makepyfile("""
def test_rewritten():
assert "@py_builtins" in globals()
""")
assert testdir.runpytest().ret == 0
def test_reprcompare_notin(mock_config):
detail = plugin.pytest_assertrepr_compare(
mock_config, 'not in', 'foo', 'aaafoobbb')[1:]
assert detail == ["'foo' is contained here:", ' aaafoobbb', '? +++']
def test_pytest_assertrepr_compare_integration(testdir):
testdir.makepyfile("""
def test_hello():
x = set(range(100))
y = x.copy()
y.remove(50)
assert x == y
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*def test_hello():*",
"*assert x == y*",
"*E*Extra items*left*",
"*E*50*",
])
def test_sequence_comparison_uses_repr(testdir):
testdir.makepyfile("""
def test_hello():
x = set("hello x")
y = set("hello y")
assert x == y
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*def test_hello():*",
"*assert x == y*",
"*E*Extra items*left*",
"*E*'x'*",
"*E*Extra items*right*",
"*E*'y'*",
])
def test_assert_compare_truncate_longmessage(monkeypatch, testdir):
testdir.makepyfile(r"""
def test_long():
a = list(range(200))
b = a[::2]
a = '\n'.join(map(str, a))
b = '\n'.join(map(str, b))
assert a == b
""")
monkeypatch.delenv('CI', raising=False)
result = testdir.runpytest()
# without -vv, truncate the message showing a few diff lines only
result.stdout.fnmatch_lines([
"*- 1",
"*- 3",
"*- 5",
"*- 7",
"*truncated (191 more lines)*use*-vv*",
])
result = testdir.runpytest('-vv')
result.stdout.fnmatch_lines([
"*- 197",
])
monkeypatch.setenv('CI', '1')
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*- 197",
])
def test_assertrepr_loaded_per_dir(testdir):
testdir.makepyfile(test_base=['def test_base(): assert 1 == 2'])
a = testdir.mkdir('a')
a_test = a.join('test_a.py')
a_test.write('def test_a(): assert 1 == 2')
a_conftest = a.join('conftest.py')
a_conftest.write('def pytest_assertrepr_compare(): return ["summary a"]')
b = testdir.mkdir('b')
b_test = b.join('test_b.py')
b_test.write('def test_b(): assert 1 == 2')
b_conftest = b.join('conftest.py')
b_conftest.write('def pytest_assertrepr_compare(): return ["summary b"]')
result = testdir.runpytest()
result.stdout.fnmatch_lines([
'*def test_base():*',
'*E*assert 1 == 2*',
'*def test_a():*',
'*E*assert summary a*',
'*def test_b():*',
'*E*assert summary b*'])
def test_assertion_options(testdir):
testdir.makepyfile("""
def test_hello():
x = 3
assert x == 4
""")
result = testdir.runpytest()
assert "3 == 4" in result.stdout.str()
off_options = (("--no-assert",),
("--nomagic",),
("--no-assert", "--nomagic"),
("--assert=plain",),
("--assert=plain", "--no-assert"),
("--assert=plain", "--nomagic"),
("--assert=plain", "--no-assert", "--nomagic"))
for opt in off_options:
result = testdir.runpytest_subprocess(*opt)
assert "3 == 4" not in result.stdout.str()
def test_old_assert_mode(testdir):
testdir.makepyfile("""
def test_in_old_mode():
assert "@py_builtins" not in globals()
""")
result = testdir.runpytest_subprocess("--assert=reinterp")
assert result.ret == 0
def test_triple_quoted_string_issue113(testdir):
testdir.makepyfile("""
def test_hello():
assert "" == '''
'''""")
result = testdir.runpytest("--fulltrace")
result.stdout.fnmatch_lines([
"*1 failed*",
])
assert 'SyntaxError' not in result.stdout.str()
def test_traceback_failure(testdir):
p1 = testdir.makepyfile("""
def g():
return 2
def f(x):
assert x == g()
def test_onefails():
f(3)
""")
result = testdir.runpytest(p1, "--tb=long")
result.stdout.fnmatch_lines([
"*test_traceback_failure.py F",
"====* FAILURES *====",
"____*____",
"",
" def test_onefails():",
"> f(3)",
"",
"*test_*.py:6: ",
"_ _ _ *",
#"",
" def f(x):",
"> assert x == g()",
"E assert 3 == 2",
"E + where 2 = g()",
"",
"*test_traceback_failure.py:4: AssertionError"
])
result = testdir.runpytest(p1) # "auto"
result.stdout.fnmatch_lines([
"*test_traceback_failure.py F",
"====* FAILURES *====",
"____*____",
"",
" def test_onefails():",
"> f(3)",
"",
"*test_*.py:6: ",
"",
" def f(x):",
"> assert x == g()",
"E assert 3 == 2",
"E + where 2 = g()",
"",
"*test_traceback_failure.py:4: AssertionError"
])
@pytest.mark.skipif("'__pypy__' in sys.builtin_module_names or sys.platform.startswith('java')" )
def test_warn_missing(testdir):
testdir.makepyfile("")
result = testdir.run(sys.executable, "-OO", "-m", "pytest", "-h")
result.stderr.fnmatch_lines([
"*WARNING*assert statements are not executed*",
])
result = testdir.run(sys.executable, "-OO", "-m", "pytest", "--no-assert")
result.stderr.fnmatch_lines([
"*WARNING*assert statements are not executed*",
])
def test_recursion_source_decode(testdir):
testdir.makepyfile("""
def test_something():
pass
""")
testdir.makeini("""
[pytest]
python_files = *.py
""")
result = testdir.runpytest("--collect-only")
result.stdout.fnmatch_lines("""
<Module*>
""")
def test_AssertionError_message(testdir):
testdir.makepyfile("""
def test_hello():
x,y = 1,2
assert 0, (x,y)
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines("""
*def test_hello*
*assert 0, (x,y)*
*AssertionError: (1, 2)*
""")
@pytest.mark.skipif(PY3, reason='This bug does not exist on PY3')
def test_set_with_unsortable_elements():
# issue #718
class UnsortableKey(object):
def __init__(self, name):
self.name = name
def __lt__(self, other):
raise RuntimeError()
def __repr__(self):
return 'repr({0})'.format(self.name)
def __eq__(self, other):
return self.name == other.name
def __hash__(self):
return hash(self.name)
left_set = set(UnsortableKey(str(i)) for i in range(1, 3))
right_set = set(UnsortableKey(str(i)) for i in range(2, 4))
expl = callequal(left_set, right_set, verbose=True)
# skip first line because it contains the "construction" of the set, which does not have a guaranteed order
expl = expl[1:]
dedent = textwrap.dedent("""
Extra items in the left set:
repr(1)
Extra items in the right set:
repr(3)
Full diff (fallback to calling repr on each item):
- repr(1)
repr(2)
+ repr(3)
""").strip()
assert '\n'.join(expl) == dedent
| mpl-2.0 |
azureplus/hue | apps/sqoop/src/sqoop/client/link.py | 33 | 3050 | # Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from desktop.lib.python_util import force_dict_to_strings
from exception import SqoopException
from config import Config
class Link(object):
SKIP = ('id', 'creation_date', 'creation_user', 'update_date', 'update_user')
def __init__(self, name, connector_id, link_config_values=None, enabled=True, creation_user='hue', creation_date=0, update_user='hue', update_date=0, **kwargs):
self.id = kwargs.setdefault('id', -1)
self.creation_user = creation_user
self.creation_date = creation_date
self.update_user = update_user
self.update_date = update_date
self.enabled = enabled
self.name = name
self.connector_id = connector_id
self.link_config_values = link_config_values
@staticmethod
def from_dict(link_dict):
link_dict.setdefault('link_config_values', [])
link_dict['link_config_values'] = [ Config.from_dict(link_config_value_dict) for link_config_value_dict in link_dict['link-config-values'] ]
if not 'connector_id' in link_dict:
link_dict['connector_id'] = link_dict.setdefault('connector-id', -1)
if not 'creation_user' in link_dict:
link_dict['creation_user'] = link_dict.setdefault('creation-user', 'hue')
if not 'creation_date' in link_dict:
link_dict['creation_date'] = link_dict.setdefault('creation-date', 0)
if not 'update_user' in link_dict:
link_dict['update_user'] = link_dict.setdefault('update-user', 'hue')
if not 'update_date' in link_dict:
link_dict['update_date'] = link_dict.setdefault('update-date', 0)
return Link(**force_dict_to_strings(link_dict))
def to_dict(self):
d = {
'id': self.id,
'name': self.name,
'creation-user': self.creation_user,
'creation-date': self.creation_date,
'update-user': self.update_user,
'update-date': self.update_date,
'connector-id': self.connector_id,
'link-config-values': [ config.to_dict() for config in self.link_config_values ],
'enabled': self.enabled
}
return d
def update_from_dict(self, link_dict):
self.update(Link.from_dict(link_dict))
def update(self, link):
for key in self.__dict__:
if key not in Link.SKIP:
if hasattr(link, key):
setattr(self, key, getattr(link, key))
| apache-2.0 |
JonasAZou/hellodjango | hello/passwordsafe/views.py | 1 | 6213 | #-*- encoding: utf-8 -*-
from django.contrib.auth import authenticate, login as login_user, logout as logout_user
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotFound
from django.views.generic.base import View, TemplateView
from django.utils.http import urlquote_plus
from django.template import RequestContext, loader
import json
from pprint import pprint
import logging
from .forms import RegistryForm, LoginForm, PasswordForm
from .models import *
class JsonResponse(HttpResponse):
def __init__(self, data=None, errno=0, errmsg='', *args, **kwargs):
super(JsonResponse, self).__init__(*args, **kwargs)
if errno or errmsg:
ret = {
'errno': errno,
'errmsg': errmsg,
}
else:
ret = {
'data': data
}
self.write( json.dumps(ret) )
class MyView(View):
require_auth = False
def page_redirect(self, url, msg=None, timeout=3000):
return render(self.request, 'passwordsafe/redirect.html', {
'url': url,
'msg': msg,
'timeout': timeout,
})
def __init__(self, *args, **kwargs):
if not getattr(self, 'template_name', None):
self.template_name = 'passwordsafe/{}.html'.format(self.__class__.__name__[:-4].lower())
super(MyView, self).__init__(*args, **kwargs)
def dispatch(self, request, *args, **kwargs):
if self.require_auth and not self.request.user.is_authenticated():
return self.page_redirect(
'{}?next={}'.format(reverse('passwordsafe:login'),urlquote_plus(request.path)),
u'请先登录')
return super(MyView, self).dispatch(request, *args, **kwargs)
class IndexView(MyView):
def get(self, request, *args, **kwargs):
# pwds = Password.default_list(request.user)
return render(request, self.template_name, {
#'passwords': pwds
})
class RegisterView(MyView):
def get(self, request, *args, **kwargs):
form = RegistryForm()
return render(request, 'passwordsafe/registry.html', {
'form': form,
})
def post(self, request, *args, **kwargs):
form = RegistryForm(request.POST)
if form.is_valid():
loginname = form.cleaned_data['loginname']
password = form.cleaned_data['password']
email = form.cleaned_data['email']
user = User.objects.create_user(loginname, email, password)
return self.page_redirect(reverse('passwordsafe:index'), u'注册成功', 2000)
else:
return render(request, 'passwordsafe/registry.html', {
'form': form,
})
class CheckNameView(MyView):
def get(self, request, username):
import re
pat = re.compile(r'''[0-9a-zA-Z.-_$]{4,}''')
if not pat.match(username):
return JsonResponse(errno=100, errmsg=u'用户名不正确')
user = User.objects.filter(username=username)
if user:
return JsonResponse(errno=101, errmsg=u'用户名已存在')
return JsonResponse()
class LoginView(MyView):
def get(self, request, *args, **kwargs):
return render(request, self.template_name, {
'form': LoginForm(),
})
def post(self, request, *args, **kwargs):
form = LoginForm(request.POST)
if form.is_valid():
login_user(request, form.cleaned_data['user'])
url = request.GET.get('next', reverse('passwordsafe:index'))
return HttpResponseRedirect(url)
else:
return render(request, self.template_name, {
'form': form,
})
class LogoutView(MyView):
def get(self, request, *args, **kwargs):
if request.user.is_authenticated():
logout_user(request)
return self.page_redirect(reverse('passwordsafe:login'), u'登出成功')
else:
return HttpResponseRedirect(reverse('passwordsafe:login'))
class AddView(MyView):
require_auth = True
def get(self, request):
return render(request, self.template_name, {
'form': PasswordForm()
})
def post(self, request):
form = PasswordForm(request.POST)
if form.is_valid():
pwd = form.save(commit=False)
pwd.password = Password.make_password(pwd.password)
pwd.user = request.user
pwd.save()
return self.page_redirect(reverse('passwordsafe:index'), u'添加成功')
else:
return render(request, self.template_name, {
'form': form,
})
class EditView(MyView):
require_auth = True
template_name = 'passwordsafe/add.html'
def get(self, request, password_id):
pwd = get_object_or_404(Password, pk=password_id, user=request.user)
return render(request, self.template_name, {
'form': PasswordForm(instance=pwd),
})
def post(self, request, password_id):
original = get_object_or_404(Password, pk=password_id, user=request.user)
form = PasswordForm(request.POST, instance=original)
if form.is_valid():
pwd = form.save(commit=False)
pwd.password = Password.make_password(pwd.password)
pwd.save()
return self.page_redirect(reverse('passwordsafe:index'), u'修改成功')
else:
return render(request, self.template_name, {
'form': form,
})
class DeleteView(MyView):
require_auth = True
def get(self, request, password_id):
pwd = get_object_or_404(Password, pk=password_id, user=request.user)
pwd.delete()
return self.page_redirect(reverse('passwordsafe:index'), u'删除成功')
class TestView(MyView):
require_auth = False
def get(self, request, tpl):
return render(request, 'passwordsafe/{}.html'.format(tpl), {})
| mit |
40123148/40123148 | static/Brython3.1.1-20150328-091302/Lib/reprlib.py | 923 | 5110 | """Redo the builtin repr() (representation) but with limits on most sizes."""
__all__ = ["Repr", "repr", "recursive_repr"]
import builtins
from itertools import islice
try:
from _thread import get_ident
except ImportError:
from _dummy_thread import get_ident
def recursive_repr(fillvalue='...'):
'Decorator to make a repr function return fillvalue for a recursive call'
def decorating_function(user_function):
repr_running = set()
def wrapper(self):
key = id(self), get_ident()
if key in repr_running:
return fillvalue
repr_running.add(key)
try:
result = user_function(self)
finally:
repr_running.discard(key)
return result
# Can't use functools.wraps() here because of bootstrap issues
wrapper.__module__ = getattr(user_function, '__module__')
wrapper.__doc__ = getattr(user_function, '__doc__')
wrapper.__name__ = getattr(user_function, '__name__')
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
return wrapper
return decorating_function
class Repr:
def __init__(self):
self.maxlevel = 6
self.maxtuple = 6
self.maxlist = 6
self.maxarray = 5
self.maxdict = 4
self.maxset = 6
self.maxfrozenset = 6
self.maxdeque = 6
self.maxstring = 30
self.maxlong = 40
self.maxother = 30
def repr(self, x):
return self.repr1(x, self.maxlevel)
def repr1(self, x, level):
typename = type(x).__name__
if ' ' in typename:
parts = typename.split()
typename = '_'.join(parts)
if hasattr(self, 'repr_' + typename):
return getattr(self, 'repr_' + typename)(x, level)
else:
return self.repr_instance(x, level)
def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
n = len(x)
if level <= 0 and n:
s = '...'
else:
newlevel = level - 1
repr1 = self.repr1
pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
if n > maxiter: pieces.append('...')
s = ', '.join(pieces)
if n == 1 and trail: right = trail + right
return '%s%s%s' % (left, s, right)
def repr_tuple(self, x, level):
return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',')
def repr_list(self, x, level):
return self._repr_iterable(x, level, '[', ']', self.maxlist)
def repr_array(self, x, level):
header = "array('%s', [" % x.typecode
return self._repr_iterable(x, level, header, '])', self.maxarray)
def repr_set(self, x, level):
x = _possibly_sorted(x)
return self._repr_iterable(x, level, 'set([', '])', self.maxset)
def repr_frozenset(self, x, level):
x = _possibly_sorted(x)
return self._repr_iterable(x, level, 'frozenset([', '])',
self.maxfrozenset)
def repr_deque(self, x, level):
return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque)
def repr_dict(self, x, level):
n = len(x)
if n == 0: return '{}'
if level <= 0: return '{...}'
newlevel = level - 1
repr1 = self.repr1
pieces = []
for key in islice(_possibly_sorted(x), self.maxdict):
keyrepr = repr1(key, newlevel)
valrepr = repr1(x[key], newlevel)
pieces.append('%s: %s' % (keyrepr, valrepr))
if n > self.maxdict: pieces.append('...')
s = ', '.join(pieces)
return '{%s}' % (s,)
def repr_str(self, x, level):
s = builtins.repr(x[:self.maxstring])
if len(s) > self.maxstring:
i = max(0, (self.maxstring-3)//2)
j = max(0, self.maxstring-3-i)
s = builtins.repr(x[:i] + x[len(x)-j:])
s = s[:i] + '...' + s[len(s)-j:]
return s
def repr_int(self, x, level):
s = builtins.repr(x) # XXX Hope this isn't too slow...
if len(s) > self.maxlong:
i = max(0, (self.maxlong-3)//2)
j = max(0, self.maxlong-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
def repr_instance(self, x, level):
try:
s = builtins.repr(x)
# Bugs in x.__repr__() can cause arbitrary
# exceptions -- then make up something
except Exception:
return '<%s instance at %x>' % (x.__class__.__name__, id(x))
if len(s) > self.maxother:
i = max(0, (self.maxother-3)//2)
j = max(0, self.maxother-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
def _possibly_sorted(x):
# Since not all sequences of items can be sorted and comparison
# functions may raise arbitrary exceptions, return an unsorted
# sequence in that case.
try:
return sorted(x)
except Exception:
return list(x)
aRepr = Repr()
repr = aRepr.repr
| lgpl-3.0 |
hongruiqi/uboot | board/pxa255_idp/pxa_reg_calcs.py | 267 | 11108 | #!/usr/bin/python
# (C) Copyright 2004
# BEC Systems <http://bec-systems.com>
# Cliff Brake <cliff.brake@gmail.com>
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
# calculations for PXA255 registers
class gpio:
dir = '0'
set = '0'
clr = '0'
alt = '0'
desc = ''
def __init__(self, dir=0, set=0, clr=0, alt=0, desc=''):
self.dir = dir
self.set = set
self.clr = clr
self.alt = alt
self.desc = desc
# the following is a dictionary of all GPIOs in the system
# the key is the GPIO number
pxa255_alt_func = {
0: ['gpio', 'none', 'none', 'none'],
1: ['gpio', 'gpio reset', 'none', 'none'],
2: ['gpio', 'none', 'none', 'none'],
3: ['gpio', 'none', 'none', 'none'],
4: ['gpio', 'none', 'none', 'none'],
5: ['gpio', 'none', 'none', 'none'],
6: ['gpio', 'MMC clk', 'none', 'none'],
7: ['gpio', '48MHz clock', 'none', 'none'],
8: ['gpio', 'MMC CS0', 'none', 'none'],
9: ['gpio', 'MMC CS1', 'none', 'none'],
10: ['gpio', 'RTC Clock', 'none', 'none'],
11: ['gpio', '3.6MHz', 'none', 'none'],
12: ['gpio', '32KHz', 'none', 'none'],
13: ['gpio', 'none', 'MBGNT', 'none'],
14: ['gpio', 'MBREQ', 'none', 'none'],
15: ['gpio', 'none', 'nCS_1', 'none'],
16: ['gpio', 'none', 'PWM0', 'none'],
17: ['gpio', 'none', 'PWM1', 'none'],
18: ['gpio', 'RDY', 'none', 'none'],
19: ['gpio', 'DREQ[1]', 'none', 'none'],
20: ['gpio', 'DREQ[0]', 'none', 'none'],
21: ['gpio', 'none', 'none', 'none'],
22: ['gpio', 'none', 'none', 'none'],
23: ['gpio', 'none', 'SSP SCLK', 'none'],
24: ['gpio', 'none', 'SSP SFRM', 'none'],
25: ['gpio', 'none', 'SSP TXD', 'none'],
26: ['gpio', 'SSP RXD', 'none', 'none'],
27: ['gpio', 'SSP EXTCLK', 'none', 'none'],
28: ['gpio', 'AC97 bitclk in, I2S bitclock out', 'I2S bitclock in', 'none'],
29: ['gpio', 'AC97 SDATA_IN0', 'I2S SDATA_IN', 'none'],
30: ['gpio', 'I2S SDATA_OUT', 'AC97 SDATA_OUT', 'none'],
31: ['gpio', 'I2S SYNC', 'AC97 SYNC', 'none'],
32: ['gpio', 'AC97 SDATA_IN1', 'I2S SYSCLK', 'none'],
33: ['gpio', 'none', 'nCS_5', 'none'],
34: ['gpio', 'FF RXD', 'MMC CS0', 'none'],
35: ['gpio', 'FF CTS', 'none', 'none'],
36: ['gpio', 'FF DCD', 'none', 'none'],
37: ['gpio', 'FF DSR', 'none', 'none'],
38: ['gpio', 'FF RI', 'none', 'none'],
39: ['gpio', 'MMC CS1', 'FF TXD', 'none'],
40: ['gpio', 'none', 'FF DTR', 'none'],
41: ['gpio', 'none', 'FF RTS', 'none'],
42: ['gpio', 'BT RXD', 'none', 'HW RXD'],
43: ['gpio', 'none', 'BT TXD', 'HW TXD'],
44: ['gpio', 'BT CTS', 'none', 'HW CTS'],
45: ['gpio', 'none', 'BT RTS', 'HW RTS'],
46: ['gpio', 'ICP_RXD', 'STD RXD', 'none'],
47: ['gpio', 'STD TXD', 'ICP_TXD', 'none'],
48: ['gpio', 'HW TXD', 'nPOE', 'none'],
49: ['gpio', 'HW RXD', 'nPWE', 'none'],
50: ['gpio', 'HW CTS', 'nPIOR', 'none'],
51: ['gpio', 'nPIOW', 'HW RTS', 'none'],
52: ['gpio', 'none', 'nPCE[1]', 'none'],
53: ['gpio', 'MMC CLK', 'nPCE[2]', 'none'],
54: ['gpio', 'MMC CLK', 'nPSKSEL', 'none'],
55: ['gpio', 'none', 'nPREG', 'none'],
56: ['gpio', 'nPWAIT', 'none', 'none'],
57: ['gpio', 'nIOIS16', 'none', 'none'],
58: ['gpio', 'none', 'LDD[0]', 'none'],
59: ['gpio', 'none', 'LDD[1]', 'none'],
60: ['gpio', 'none', 'LDD[2]', 'none'],
61: ['gpio', 'none', 'LDD[3]', 'none'],
62: ['gpio', 'none', 'LDD[4]', 'none'],
63: ['gpio', 'none', 'LDD[5]', 'none'],
64: ['gpio', 'none', 'LDD[6]', 'none'],
65: ['gpio', 'none', 'LDD[7]', 'none'],
66: ['gpio', 'MBREQ', 'LDD[8]', 'none'],
67: ['gpio', 'MMC CS0', 'LDD[9]', 'none'],
68: ['gpio', 'MMC CS1', 'LDD[10]', 'none'],
69: ['gpio', 'MMC CLK', 'LDD[11]', 'none'],
70: ['gpio', 'RTC CLK', 'LDD[12]', 'none'],
71: ['gpio', '3.6 MHz', 'LDD[13]', 'none'],
72: ['gpio', '32 KHz', 'LDD[14]', 'none'],
73: ['gpio', 'MBGNT', 'LDD[15]', 'none'],
74: ['gpio', 'none', 'LCD_FCLK', 'none'],
75: ['gpio', 'none', 'LCD_LCLK', 'none'],
76: ['gpio', 'none', 'LCD_PCLK', 'none'],
77: ['gpio', 'none', 'LCD_ACBIAS', 'none'],
78: ['gpio', 'none', 'nCS_2', 'none'],
79: ['gpio', 'none', 'nCS_3', 'none'],
80: ['gpio', 'none', 'nCS_4', 'none'],
81: ['gpio', 'NSSPSCLK', 'none', 'none'],
82: ['gpio', 'NSSPSFRM', 'none', 'none'],
83: ['gpio', 'NSSPTXD', 'NSSPRXD', 'none'],
84: ['gpio', 'NSSPTXD', 'NSSPRXD', 'none'],
}
#def __init__(self, dir=0, set=0, clr=0, alt=0, desc=''):
gpio_list = []
for i in range(0,85):
gpio_list.append(gpio())
#chip select GPIOs
gpio_list[18] = gpio(0, 0, 0, 1, 'RDY')
gpio_list[33] = gpio(1, 1, 0, 2, 'CS5#')
gpio_list[80] = gpio(1, 1, 0, 2, 'CS4#')
gpio_list[79] = gpio(1, 1, 0, 2, 'CS3#')
gpio_list[78] = gpio(1, 1, 0, 2, 'CS2#')
gpio_list[15] = gpio(1, 1, 0, 2, 'CS1#')
gpio_list[22] = gpio(0, 0, 0, 0, 'Consumer IR, PCC_S1_IRQ_O#')
gpio_list[21] = gpio(0, 0, 0, 0, 'IRQ_IDE, PFI')
gpio_list[19] = gpio(0, 0, 0, 0, 'XB_DREQ1, PCC_SO_IRQ_O#')
gpio_list[20] = gpio(0, 0, 0, 0, 'XB_DREQ0')
gpio_list[20] = gpio(0, 0, 0, 0, 'XB_DREQ0')
gpio_list[17] = gpio(0, 0, 0, 0, 'IRQ_AXB')
gpio_list[16] = gpio(1, 0, 0, 2, 'PWM0')
# PCMCIA stuff
gpio_list[57] = gpio(0, 0, 0, 1, 'PCC_IOIS16#')
gpio_list[56] = gpio(0, 0, 0, 1, 'PCC_WAIT#')
gpio_list[55] = gpio(1, 0, 0, 2, 'PCC_REG#')
gpio_list[54] = gpio(1, 0, 0, 2, 'PCC_SCKSEL')
gpio_list[53] = gpio(1, 1, 0, 2, 'PCC_CE2#')
gpio_list[52] = gpio(1, 1, 0, 2, 'PCC_CE1#')
gpio_list[51] = gpio(1, 1, 0, 1, 'PCC_IOW#')
gpio_list[50] = gpio(1, 1, 0, 2, 'PCC_IOR#')
gpio_list[49] = gpio(1, 1, 0, 2, 'PCC_WE#')
gpio_list[48] = gpio(1, 1, 0, 2, 'PCC_OE#')
# SSP port
gpio_list[26] = gpio(0, 0, 0, 1, 'SSP_RXD')
gpio_list[25] = gpio(0, 0, 0, 0, 'SSP_TXD')
gpio_list[24] = gpio(1, 0, 1, 2, 'SSP_SFRM')
gpio_list[23] = gpio(1, 0, 1, 2, 'SSP_SCLK')
gpio_list[27] = gpio(0, 0, 0, 0, 'SSP_EXTCLK')
# audio codec
gpio_list[32] = gpio(0, 0, 0, 0, 'AUD_SDIN1')
gpio_list[31] = gpio(1, 0, 0, 2, 'AC_SYNC')
gpio_list[30] = gpio(1, 0, 0, 2, 'AC_SDOUT')
gpio_list[29] = gpio(0, 0, 0, 1, 'AUD_SDIN0')
gpio_list[28] = gpio(0, 0, 0, 1, 'AC_BITCLK')
# serial ports
gpio_list[39] = gpio(1, 0, 0, 2, 'FF_TXD')
gpio_list[34] = gpio(0, 0, 0, 1, 'FF_RXD')
gpio_list[41] = gpio(1, 0, 0, 2, 'FF_RTS')
gpio_list[35] = gpio(0, 0, 0, 1, 'FF_CTS')
gpio_list[40] = gpio(1, 0, 0, 2, 'FF_DTR')
gpio_list[37] = gpio(0, 0, 0, 1, 'FF_DSR')
gpio_list[38] = gpio(0, 0, 0, 1, 'FF_RI')
gpio_list[36] = gpio(0, 0, 0, 1, 'FF_DCD')
gpio_list[43] = gpio(1, 0, 0, 2, 'BT_TXD')
gpio_list[42] = gpio(0, 0, 0, 1, 'BT_RXD')
gpio_list[45] = gpio(1, 0, 0, 2, 'BT_RTS')
gpio_list[44] = gpio(0, 0, 0, 1, 'BT_CTS')
gpio_list[47] = gpio(1, 0, 0, 1, 'IR_TXD')
gpio_list[46] = gpio(0, 0, 0, 2, 'IR_RXD')
# misc GPIO signals
gpio_list[14] = gpio(0, 0, 0, 0, 'MBREQ')
gpio_list[13] = gpio(0, 0, 0, 0, 'MBGNT')
gpio_list[12] = gpio(0, 0, 0, 0, 'GPIO_12/32K_CLK')
gpio_list[11] = gpio(0, 0, 0, 0, '3M6_CLK')
gpio_list[10] = gpio(1, 0, 1, 0, 'GPIO_10/RTC_CLK/debug LED')
gpio_list[9] = gpio(0, 0, 0, 0, 'MMC_CD#')
gpio_list[8] = gpio(0, 0, 0, 0, 'PCC_S1_CD#')
gpio_list[7] = gpio(0, 0, 0, 0, 'PCC_S0_CD#')
gpio_list[6] = gpio(1, 0, 0, 1, 'MMC_CLK')
gpio_list[5] = gpio(0, 0, 0, 0, 'IRQ_TOUCH#')
gpio_list[4] = gpio(0, 0, 0, 0, 'IRQ_ETH')
gpio_list[3] = gpio(0, 0, 0, 0, 'MQ_IRQ#')
gpio_list[2] = gpio(0, 0, 0, 0, 'BAT_DATA')
gpio_list[1] = gpio(0, 0, 0, 1, 'USER_RESET#')
gpio_list[0] = gpio(0, 0, 0, 1, 'USER_RESET#')
# LCD GPIOs
gpio_list[58] = gpio(1, 0, 0, 2, 'LDD0')
gpio_list[59] = gpio(1, 0, 0, 2, 'LDD1')
gpio_list[60] = gpio(1, 0, 0, 2, 'LDD2')
gpio_list[61] = gpio(1, 0, 0, 2, 'LDD3')
gpio_list[62] = gpio(1, 0, 0, 2, 'LDD4')
gpio_list[63] = gpio(1, 0, 0, 2, 'LDD5')
gpio_list[64] = gpio(1, 0, 0, 2, 'LDD6')
gpio_list[65] = gpio(1, 0, 0, 2, 'LDD7')
gpio_list[66] = gpio(1, 0, 0, 2, 'LDD8')
gpio_list[67] = gpio(1, 0, 0, 2, 'LDD9')
gpio_list[68] = gpio(1, 0, 0, 2, 'LDD10')
gpio_list[69] = gpio(1, 0, 0, 2, 'LDD11')
gpio_list[70] = gpio(1, 0, 0, 2, 'LDD12')
gpio_list[71] = gpio(1, 0, 0, 2, 'LDD13')
gpio_list[72] = gpio(1, 0, 0, 2, 'LDD14')
gpio_list[73] = gpio(1, 0, 0, 2, 'LDD15')
gpio_list[74] = gpio(1, 0, 0, 2, 'FCLK')
gpio_list[75] = gpio(1, 0, 0, 2, 'LCLK')
gpio_list[76] = gpio(1, 0, 0, 2, 'PCLK')
gpio_list[77] = gpio(1, 0, 0, 2, 'ACBIAS')
# calculate registers
pxa_regs = {
'gpdr0':0, 'gpdr1':0, 'gpdr2':0,
'gpsr0':0, 'gpsr1':0, 'gpsr2':0,
'gpcr0':0, 'gpcr1':0, 'gpcr2':0,
'gafr0_l':0, 'gafr0_u':0,
'gafr1_l':0, 'gafr1_u':0,
'gafr2_l':0, 'gafr2_u':0,
}
# U-boot define names
uboot_reg_names = {
'gpdr0':'CONFIG_SYS_GPDR0_VAL', 'gpdr1':'CONFIG_SYS_GPDR1_VAL', 'gpdr2':'CONFIG_SYS_GPDR2_VAL',
'gpsr0':'CONFIG_SYS_GPSR0_VAL', 'gpsr1':'CONFIG_SYS_GPSR1_VAL', 'gpsr2':'CONFIG_SYS_GPSR2_VAL',
'gpcr0':'CONFIG_SYS_GPCR0_VAL', 'gpcr1':'CONFIG_SYS_GPCR1_VAL', 'gpcr2':'CONFIG_SYS_GPCR2_VAL',
'gafr0_l':'CONFIG_SYS_GAFR0_L_VAL', 'gafr0_u':'CONFIG_SYS_GAFR0_U_VAL',
'gafr1_l':'CONFIG_SYS_GAFR1_L_VAL', 'gafr1_u':'CONFIG_SYS_GAFR1_U_VAL',
'gafr2_l':'CONFIG_SYS_GAFR2_L_VAL', 'gafr2_u':'CONFIG_SYS_GAFR2_U_VAL',
}
# bit mappings
bit_mappings = [
{ 'gpio':(0,32), 'shift':1, 'regs':{'dir':'gpdr0', 'set':'gpsr0', 'clr':'gpcr0'} },
{ 'gpio':(32,64), 'shift':1, 'regs':{'dir':'gpdr1', 'set':'gpsr1', 'clr':'gpcr1'} },
{ 'gpio':(64,85), 'shift':1, 'regs':{'dir':'gpdr2', 'set':'gpsr2', 'clr':'gpcr2'} },
{ 'gpio':(0,16), 'shift':2, 'regs':{'alt':'gafr0_l'} },
{ 'gpio':(16,32), 'shift':2, 'regs':{'alt':'gafr0_u'} },
{ 'gpio':(32,48), 'shift':2, 'regs':{'alt':'gafr1_l'} },
{ 'gpio':(48,64), 'shift':2, 'regs':{'alt':'gafr1_u'} },
{ 'gpio':(64,80), 'shift':2, 'regs':{'alt':'gafr2_l'} },
{ 'gpio':(80,85), 'shift':2, 'regs':{'alt':'gafr2_u'} },
]
def stuff_bits(bit_mapping, gpio_list):
gpios = range( bit_mapping['gpio'][0], bit_mapping['gpio'][1])
for gpio in gpios:
for reg in bit_mapping['regs'].keys():
value = eval( 'gpio_list[gpio].%s' % (reg) )
if ( value ):
# we have a high bit
bit_shift = (gpio - bit_mapping['gpio'][0]) * bit_mapping['shift']
bit = value << (bit_shift)
pxa_regs[bit_mapping['regs'][reg]] |= bit
for i in bit_mappings:
stuff_bits(i, gpio_list)
# now print out all regs
registers = pxa_regs.keys()
registers.sort()
for reg in registers:
print '%s: 0x%x' % (reg, pxa_regs[reg])
# print define to past right into U-Boot source code
print
print
for reg in registers:
print '#define %s 0x%x' % (uboot_reg_names[reg], pxa_regs[reg])
# print all GPIOS
print
print
for i in range(len(gpio_list)):
gpio_i = gpio_list[i]
alt_func_desc = pxa255_alt_func[i][gpio_i.alt]
print 'GPIO: %i, dir=%i, set=%i, clr=%i, alt=%s, desc=%s' % (i, gpio_i.dir, gpio_i.set, gpio_i.clr, alt_func_desc, gpio_i.desc)
| gpl-2.0 |
camptocamp/odoo | addons/point_of_sale/controllers/main.py | 9 | 3010 | # -*- coding: utf-8 -*-
import logging
import simplejson
import os
import openerp
import time
import random
from openerp import http
from openerp.http import request
from openerp.addons.web.controllers.main import module_boot, login_redirect
_logger = logging.getLogger(__name__)
html_template = """<!DOCTYPE html>
<html>
<head>
<title>OpenERP POS</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/>
<meta http-equiv="content-type" content="text/html, charset=utf-8" />
<meta name="viewport" content=" width=1024, user-scalable=no">
<meta name="apple-mobile-web-app-capable" content="yes">
<meta name="mobile-web-app-capable" content="yes">
<link rel="shortcut icon" sizes="196x196" href="/point_of_sale/static/src/img/touch-icon-196.png">
<link rel="shortcut icon" sizes="128x128" href="/point_of_sale/static/src/img/touch-icon-128.png">
<link rel="apple-touch-icon" href="/point_of_sale/static/src/img/touch-icon-iphone.png">
<link rel="apple-touch-icon" sizes="76x76" href="/point_of_sale/static/src/img/touch-icon-ipad.png">
<link rel="apple-touch-icon" sizes="120x120" href="/point_of_sale/static/src/img/touch-icon-iphone-retina.png">
<link rel="apple-touch-icon" sizes="152x152" href="/point_of_sale/static/src/img/touch-icon-ipad-retina.png">
<link rel="shortcut icon" href="/web/static/src/img/favicon.ico" type="image/x-icon"/>
<link rel="stylesheet" href="/point_of_sale/static/src/fonts/lato/stylesheet.css" />
<link rel="stylesheet" href="/point_of_sale/static/src/fonts/font-awesome-4.0.3/css/font-awesome.min.css" />
<link rel="stylesheet" href="/point_of_sale/static/src/css/pos.css" />
<link rel="stylesheet" href="/point_of_sale/static/src/css/keyboard.css" />
<script type="text/javascript" src="/web/js/web.assets_backend"></script>
<script type="text/javascript">
$(function() {
var s = new openerp.init(%(modules)s);
%(init)s
});
</script>
</head>
<body>
<!--[if lte IE 8]>
<script src="//ajax.googleapis.com/ajax/libs/chrome-frame/1/CFInstall.min.js"></script>
<script>CFInstall.check({mode: "overlay"});</script>
<![endif]-->
</body>
</html>
"""
class PosController(http.Controller):
@http.route('/pos/web', type='http', auth='none')
def a(self, debug=False, **k):
if not request.session.uid:
return login_redirect()
r = html_template % {
'modules': simplejson.dumps(module_boot(request.db)),
'init': """
var wc = new s.web.WebClient();
wc.show_application = function(){
wc.action_manager.do_action("pos.ui");
};
wc.appendTo($(document.body));
"""
}
return r
| agpl-3.0 |
aktech/sympy | sympy/physics/quantum/tests/test_represent.py | 124 | 5124 | from sympy import Float, I, Integer, Matrix
from sympy.external import import_module
from sympy.utilities.pytest import skip
from sympy.physics.quantum.dagger import Dagger
from sympy.physics.quantum.represent import (represent, rep_innerproduct,
rep_expectation, enumerate_states)
from sympy.physics.quantum.state import Bra, Ket
from sympy.physics.quantum.operator import Operator, OuterProduct
from sympy.physics.quantum.tensorproduct import TensorProduct
from sympy.physics.quantum.tensorproduct import matrix_tensor_product
from sympy.physics.quantum.commutator import Commutator
from sympy.physics.quantum.anticommutator import AntiCommutator
from sympy.physics.quantum.innerproduct import InnerProduct
from sympy.physics.quantum.matrixutils import (numpy_ndarray,
scipy_sparse_matrix, to_numpy,
to_scipy_sparse, to_sympy)
from sympy.physics.quantum.cartesian import XKet, XOp, XBra
from sympy.physics.quantum.qapply import qapply
from sympy.physics.quantum.operatorset import operators_to_state
Amat = Matrix([[1, I], [-I, 1]])
Bmat = Matrix([[1, 2], [3, 4]])
Avec = Matrix([[1], [I]])
class AKet(Ket):
@classmethod
def dual_class(self):
return ABra
def _represent_default_basis(self, **options):
return self._represent_AOp(None, **options)
def _represent_AOp(self, basis, **options):
return Avec
class ABra(Bra):
@classmethod
def dual_class(self):
return AKet
class AOp(Operator):
def _represent_default_basis(self, **options):
return self._represent_AOp(None, **options)
def _represent_AOp(self, basis, **options):
return Amat
class BOp(Operator):
def _represent_default_basis(self, **options):
return self._represent_AOp(None, **options)
def _represent_AOp(self, basis, **options):
return Bmat
k = AKet('a')
b = ABra('a')
A = AOp('A')
B = BOp('B')
_tests = [
# Bra
(b, Dagger(Avec)),
(Dagger(b), Avec),
# Ket
(k, Avec),
(Dagger(k), Dagger(Avec)),
# Operator
(A, Amat),
(Dagger(A), Dagger(Amat)),
# OuterProduct
(OuterProduct(k, b), Avec*Avec.H),
# TensorProduct
(TensorProduct(A, B), matrix_tensor_product(Amat, Bmat)),
# Pow
(A**2, Amat**2),
# Add/Mul
(A*B + 2*A, Amat*Bmat + 2*Amat),
# Commutator
(Commutator(A, B), Amat*Bmat - Bmat*Amat),
# AntiCommutator
(AntiCommutator(A, B), Amat*Bmat + Bmat*Amat),
# InnerProduct
(InnerProduct(b, k), (Avec.H*Avec)[0])
]
def test_format_sympy():
for test in _tests:
lhs = represent(test[0], basis=A, format='sympy')
rhs = to_sympy(test[1])
assert lhs == rhs
def test_scalar_sympy():
assert represent(Integer(1)) == Integer(1)
assert represent(Float(1.0)) == Float(1.0)
assert represent(1.0 + I) == 1.0 + I
np = import_module('numpy')
def test_format_numpy():
if not np:
skip("numpy not installed.")
for test in _tests:
lhs = represent(test[0], basis=A, format='numpy')
rhs = to_numpy(test[1])
if isinstance(lhs, numpy_ndarray):
assert (lhs == rhs).all()
else:
assert lhs == rhs
def test_scalar_numpy():
if not np:
skip("numpy not installed.")
assert represent(Integer(1), format='numpy') == 1
assert represent(Float(1.0), format='numpy') == 1.0
assert represent(1.0 + I, format='numpy') == 1.0 + 1.0j
scipy = import_module('scipy', __import__kwargs={'fromlist': ['sparse']})
def test_format_scipy_sparse():
if not np:
skip("numpy not installed.")
if not scipy:
skip("scipy not installed.")
for test in _tests:
lhs = represent(test[0], basis=A, format='scipy.sparse')
rhs = to_scipy_sparse(test[1])
if isinstance(lhs, scipy_sparse_matrix):
assert np.linalg.norm((lhs - rhs).todense()) == 0.0
else:
assert lhs == rhs
def test_scalar_scipy_sparse():
if not np:
skip("numpy not installed.")
if not scipy:
skip("scipy not installed.")
assert represent(Integer(1), format='scipy.sparse') == 1
assert represent(Float(1.0), format='scipy.sparse') == 1.0
assert represent(1.0 + I, format='scipy.sparse') == 1.0 + 1.0j
x_ket = XKet('x')
x_bra = XBra('x')
x_op = XOp('X')
def test_innerprod_represent():
assert rep_innerproduct(x_ket) == InnerProduct(XBra("x_1"), x_ket).doit()
assert rep_innerproduct(x_bra) == InnerProduct(x_bra, XKet("x_1")).doit()
try:
rep_innerproduct(x_op)
except TypeError:
return True
def test_operator_represent():
basis_kets = enumerate_states(operators_to_state(x_op), 1, 2)
assert rep_expectation(
x_op) == qapply(basis_kets[1].dual*x_op*basis_kets[0])
def test_enumerate_states():
test = XKet("foo")
assert enumerate_states(test, 1, 1) == [XKet("foo_1")]
assert enumerate_states(
test, [1, 2, 4]) == [XKet("foo_1"), XKet("foo_2"), XKet("foo_4")]
| bsd-3-clause |
ikoula/cloudstack | tools/ngui/config.py | 7 | 1025 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
apikey='DNi_vTVLPNfTEFuqu5F9MrPI3iecf8iRQ3QtGUH1IM2Nd96wNwNlf7BzmF1W8aw6cE2ejZCgyE53wT5VpzauuA'
secretkey='x4jM12uE4LNho3ZNJa8J-Ve6WsgEXd8df1mGGfeuJHMtolkaSBkD5pLX0tvj8YrWhBgtZbKgYsTB00kb7z_3BA'
path='/client/api'
host='localhost'
port='8080'
protocol='http'
| gpl-2.0 |
martinbuc/missionplanner | Lib/site-packages/scipy/optimize/tests/test_linesearch.py | 53 | 8856 | """
Tests for line search routines
"""
import sys
from numpy.testing import assert_, assert_equal, dec, \
assert_array_almost_equal, assert_array_almost_equal_nulp
import scipy.optimize.linesearch as ls
import numpy as np
def assert_wolfe(s, phi, derphi, c1=1e-4, c2=0.9, err_msg=""):
"""
Check that strong Wolfe conditions apply
"""
phi1 = phi(s)
phi0 = phi(0)
derphi0 = derphi(0)
derphi1 = derphi(s)
msg = "s = %s; phi(0) = %s; phi(s) = %s; phi'(0) = %s; phi'(s) = %s; %s" % (
s, phi0, phi1, derphi0, derphi1, err_msg)
assert_(phi1 <= phi0 + c1*s*derphi0, "Wolfe 1 failed: "+ msg)
assert_(abs(derphi1) <= abs(c2*derphi0), "Wolfe 2 failed: "+ msg)
def assert_armijo(s, phi, c1=1e-4, err_msg=""):
"""
Check that Armijo condition applies
"""
phi1 = phi(s)
phi0 = phi(0)
msg = "s = %s; phi(0) = %s; phi(s) = %s; %s" % (s, phi0, phi1, err_msg)
assert_(phi1 <= (1 - c1*s)*phi0, msg)
def assert_line_wolfe(x, p, s, f, fprime, **kw):
assert_wolfe(s, phi=lambda sp: f(x + p*sp),
derphi=lambda sp: np.dot(fprime(x + p*sp), p), **kw)
def assert_line_armijo(x, p, s, f, **kw):
assert_armijo(s, phi=lambda sp: f(x + p*sp), **kw)
def assert_fp_equal(x, y, err_msg="", nulp=50):
"""Assert two arrays are equal, up to some floating-point rounding error"""
try:
assert_array_almost_equal_nulp(x, y, nulp)
except AssertionError, e:
raise AssertionError("%s\n%s" % (e, err_msg))
class TestLineSearch(object):
# -- scalar functions; must have dphi(0.) < 0
def _scalar_func_1(self, s):
self.fcount += 1
p = -s - s**3 + s**4
dp = -1 - 3*s**2 + 4*s**3
return p, dp
def _scalar_func_2(self, s):
self.fcount += 1
p = np.exp(-4*s) + s**2
dp = -4*np.exp(-4*s) + 2*s
return p, dp
def _scalar_func_3(self, s):
self.fcount += 1
p = -np.sin(10*s)
dp = -10*np.cos(10*s)
return p, dp
# -- n-d functions
def _line_func_1(self, x):
self.fcount += 1
f = np.dot(x, x)
df = 2*x
return f, df
def _line_func_2(self, x):
self.fcount += 1
f = np.dot(x, np.dot(self.A, x)) + 1
df = np.dot(self.A + self.A.T, x)
return f, df
# --
def __init__(self):
self.scalar_funcs = []
self.line_funcs = []
self.N = 20
self.fcount = 0
def bind_index(func, idx):
# Remember Python's closure semantics!
return lambda *a, **kw: func(*a, **kw)[idx]
for name in sorted(dir(self)):
if name.startswith('_scalar_func_'):
value = getattr(self, name)
self.scalar_funcs.append(
(name, bind_index(value, 0), bind_index(value, 1)))
elif name.startswith('_line_func_'):
value = getattr(self, name)
self.line_funcs.append(
(name, bind_index(value, 0), bind_index(value, 1)))
def setUp(self):
np.random.seed(1234)
self.A = np.random.randn(self.N, self.N)
def scalar_iter(self):
for name, phi, derphi in self.scalar_funcs:
for old_phi0 in np.random.randn(3):
yield name, phi, derphi, old_phi0
def line_iter(self):
for name, f, fprime in self.line_funcs:
k = 0
while k < 9:
x = np.random.randn(self.N)
p = np.random.randn(self.N)
if np.dot(p, fprime(x)) >= 0:
# always pick a descent direction
continue
k += 1
old_fv = float(np.random.randn())
yield name, f, fprime, x, p, old_fv
# -- Generic scalar searches
@dec.knownfailureif(sys.platform == 'cli', "Line search isn't supported on IronPython due to issues w/ bytes vs strings (strings are unicode by default)")
def test_scalar_search_wolfe1(self):
c = 0
for name, phi, derphi, old_phi0 in self.scalar_iter():
c += 1
s, phi1, phi0 = ls.scalar_search_wolfe1(phi, derphi, phi(0),
old_phi0, derphi(0))
assert_fp_equal(phi0, phi(0), name)
assert_fp_equal(phi1, phi(s), name)
assert_wolfe(s, phi, derphi, err_msg=name)
assert_(c > 3) # check that the iterator really works...
def test_scalar_search_wolfe2(self):
for name, phi, derphi, old_phi0 in self.scalar_iter():
s, phi1, phi0, derphi1 = ls.scalar_search_wolfe2(
phi, derphi, phi(0), old_phi0, derphi(0))
assert_fp_equal(phi0, phi(0), name)
assert_fp_equal(phi1, phi(s), name)
if derphi1 is not None:
assert_fp_equal(derphi1, derphi(s), name)
assert_wolfe(s, phi, derphi, err_msg="%s %g" % (name, old_phi0))
def test_scalar_search_armijo(self):
for name, phi, derphi, old_phi0 in self.scalar_iter():
s, phi1 = ls.scalar_search_armijo(phi, phi(0), derphi(0))
assert_fp_equal(phi1, phi(s), name)
assert_armijo(s, phi, err_msg="%s %g" % (name, old_phi0))
# -- Generic line searches
@dec.knownfailureif(sys.platform == 'cli', "Line search isn't supported on IronPython due to issues w/ bytes vs strings (strings are unicode by default)")
def test_line_search_wolfe1(self):
c = 0
smax = 100
for name, f, fprime, x, p, old_f in self.line_iter():
f0 = f(x)
g0 = fprime(x)
self.fcount = 0
s, fc, gc, fv, ofv, gv = ls.line_search_wolfe1(f, fprime, x, p,
g0, f0, old_f,
amax=smax)
assert_equal(self.fcount, fc+gc)
assert_fp_equal(ofv, f(x))
if s is None:
continue
assert_fp_equal(fv, f(x + s*p))
assert_array_almost_equal(gv, fprime(x + s*p), decimal=14)
if s < smax:
c += 1
assert_line_wolfe(x, p, s, f, fprime, err_msg=name)
assert_(c > 3) # check that the iterator really works...
def test_line_search_wolfe2(self):
c = 0
smax = 100
for name, f, fprime, x, p, old_f in self.line_iter():
f0 = f(x)
g0 = fprime(x)
self.fcount = 0
s, fc, gc, fv, ofv, gv = ls.line_search_wolfe2(f, fprime, x, p,
g0, f0, old_f,
amax=smax)
assert_equal(self.fcount, fc+gc)
assert_fp_equal(ofv, f(x))
assert_fp_equal(fv, f(x + s*p))
if gv is not None:
assert_array_almost_equal(gv, fprime(x + s*p), decimal=14)
if s < smax:
c += 1
assert_line_wolfe(x, p, s, f, fprime, err_msg=name)
assert_(c > 3) # check that the iterator really works...
def test_line_search_armijo(self):
c = 0
for name, f, fprime, x, p, old_f in self.line_iter():
f0 = f(x)
g0 = fprime(x)
self.fcount = 0
s, fc, fv = ls.line_search_armijo(f, x, p, g0, f0)
c += 1
assert_equal(self.fcount, fc)
assert_fp_equal(fv, f(x + s*p))
assert_line_armijo(x, p, s, f, err_msg=name)
assert_(c >= 9)
# -- More specific tests
def test_armijo_terminate_1(self):
# Armijo should evaluate the function only once if the trial step
# is already suitable
count = [0]
def phi(s):
count[0] += 1
return -s + 0.01*s**2
s, phi1 = ls.scalar_search_armijo(phi, phi(0), -1, alpha0=1)
assert_equal(s, 1)
assert_equal(count[0], 2)
assert_armijo(s, phi)
@dec.knownfailureif(sys.platform == 'cli', "Line search isn't supported on IronPython due to issues w/ bytes vs strings (strings are unicode by default)")
def test_wolfe_terminate(self):
# wolfe1 and wolfe2 should also evaluate the function only a few
# times if the trial step is already suitable
def phi(s):
count[0] += 1
return -s + 0.05*s**2
def derphi(s):
count[0] += 1
return -1 + 0.05*2*s
for func in [ls.scalar_search_wolfe1, ls.scalar_search_wolfe2]:
count = [0]
r = func(phi, derphi, phi(0), None, derphi(0))
assert_(r[0] is not None, (r, func))
assert_(count[0] <= 2 + 2, (count, func))
assert_wolfe(r[0], phi, derphi, err_msg=str(func))
| gpl-3.0 |
vineodd/PIMSim | GEM5Simulation/gem5/src/dev/arm/RealView.py | 2 | 44262 | # Copyright (c) 2009-2018 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ali Saidi
# Gabe Black
# William Wang
# Glenn Bergmans
from m5.defines import buildEnv
from m5.params import *
from m5.proxy import *
from m5.util.fdthelper import *
from ClockDomain import ClockDomain
from VoltageDomain import VoltageDomain
from Device import BasicPioDevice, PioDevice, IsaFake, BadAddr, DmaDevice
from PciHost import *
from Ethernet import NSGigE, IGbE_igb, IGbE_e1000
from Ide import *
from Platform import Platform
from Terminal import Terminal
from Uart import Uart
from SimpleMemory import SimpleMemory
from Gic import *
from EnergyCtrl import EnergyCtrl
from ClockedObject import ClockedObject
from ClockDomain import SrcClockDomain
from SubSystem import SubSystem
from Graphics import ImageFormat
from ClockedObject import ClockedObject
from PS2 import *
from VirtIOMMIO import MmioVirtIO
# Platforms with KVM support should generally use in-kernel GIC
# emulation. Use a GIC model that automatically switches between
# gem5's GIC model and KVM's GIC model if KVM is available.
try:
from KvmGic import MuxingKvmGic
kvm_gicv2_class = MuxingKvmGic
except ImportError:
# KVM support wasn't compiled into gem5. Fallback to a
# software-only GIC.
kvm_gicv2_class = GicV2
pass
class AmbaPioDevice(BasicPioDevice):
type = 'AmbaPioDevice'
abstract = True
cxx_header = "dev/arm/amba_device.hh"
amba_id = Param.UInt32("ID of AMBA device for kernel detection")
class AmbaIntDevice(AmbaPioDevice):
type = 'AmbaIntDevice'
abstract = True
cxx_header = "dev/arm/amba_device.hh"
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
int_num = Param.UInt32("Interrupt number that connects to GIC")
int_delay = Param.Latency("100ns",
"Time between action and interrupt generation by device")
class AmbaDmaDevice(DmaDevice):
type = 'AmbaDmaDevice'
abstract = True
cxx_header = "dev/arm/amba_device.hh"
pio_addr = Param.Addr("Address for AMBA slave interface")
pio_latency = Param.Latency("10ns", "Time between action and write/read result by AMBA DMA Device")
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
int_num = Param.UInt32("Interrupt number that connects to GIC")
amba_id = Param.UInt32("ID of AMBA device for kernel detection")
class A9SCU(BasicPioDevice):
type = 'A9SCU'
cxx_header = "dev/arm/a9scu.hh"
class ArmPciIntRouting(Enum): vals = [
'ARM_PCI_INT_STATIC',
'ARM_PCI_INT_DEV',
'ARM_PCI_INT_PIN',
]
class GenericArmPciHost(GenericPciHost):
type = 'GenericArmPciHost'
cxx_header = "dev/arm/pci_host.hh"
int_policy = Param.ArmPciIntRouting("PCI interrupt routing policy")
int_base = Param.Unsigned("PCI interrupt base")
int_count = Param.Unsigned("Maximum number of interrupts used by this host")
def generateDeviceTree(self, state):
local_state = FdtState(addr_cells=3, size_cells=2, cpu_cells=1)
intterrupt_cells = 1
node = FdtNode("pci")
if int(self.conf_device_bits) == 8:
node.appendCompatible("pci-host-cam-generic")
elif int(self.conf_device_bits) == 12:
node.appendCompatible("pci-host-ecam-generic")
else:
m5.fatal("No compatibility string for the set conf_device_width")
node.append(FdtPropertyStrings("device_type", ["pci"]))
# Cell sizes of child nodes/peripherals
node.append(local_state.addrCellsProperty())
node.append(local_state.sizeCellsProperty())
node.append(FdtPropertyWords("#interrupt-cells", intterrupt_cells))
# PCI address for CPU
node.append(FdtPropertyWords("reg",
state.addrCells(self.conf_base) +
state.sizeCells(self.conf_size) ))
# Ranges mapping
# For now some of this is hard coded, because the PCI module does not
# have a proper full understanding of the memory map, but adapting the
# PCI module is beyond the scope of what I'm trying to do here.
# Values are taken from the VExpress_GEM5_V1 platform.
ranges = []
# Pio address range
ranges += self.pciFdtAddr(space=1, addr=0)
ranges += state.addrCells(self.pci_pio_base)
ranges += local_state.sizeCells(0x10000) # Fixed size
# AXI memory address range
ranges += self.pciFdtAddr(space=2, addr=0)
ranges += state.addrCells(0x40000000) # Fixed offset
ranges += local_state.sizeCells(0x40000000) # Fixed size
node.append(FdtPropertyWords("ranges", ranges))
if str(self.int_policy) == 'ARM_PCI_INT_DEV':
int_phandle = state.phandle(self._parent.unproxy(self).gic)
# Interrupt mapping
interrupts = []
for i in range(int(self.int_count)):
interrupts += self.pciFdtAddr(device=i, addr=0) + \
[0x0, int_phandle, 0, int(self.int_base) - 32 + i, 1]
node.append(FdtPropertyWords("interrupt-map", interrupts))
int_count = int(self.int_count)
if int_count & (int_count - 1):
fatal("PCI interrupt count should be power of 2")
intmask = self.pciFdtAddr(device=int_count - 1, addr=0) + [0x0]
node.append(FdtPropertyWords("interrupt-map-mask", intmask))
else:
m5.fatal("Unsupported PCI interrupt policy " +
"for Device Tree generation")
node.append(FdtProperty("dma-coherent"))
yield node
class RealViewCtrl(BasicPioDevice):
type = 'RealViewCtrl'
cxx_header = "dev/arm/rv_ctrl.hh"
proc_id0 = Param.UInt32(0x0C000000, "Processor ID, SYS_PROCID")
proc_id1 = Param.UInt32(0x0C000222, "Processor ID, SYS_PROCID1")
idreg = Param.UInt32(0x00000000, "ID Register, SYS_ID")
def generateDeviceTree(self, state):
node = FdtNode("sysreg@%x" % long(self.pio_addr))
node.appendCompatible("arm,vexpress-sysreg")
node.append(FdtPropertyWords("reg",
state.addrCells(self.pio_addr) +
state.sizeCells(0x1000) ))
node.append(FdtProperty("gpio-controller"))
node.append(FdtPropertyWords("#gpio-cells", [2]))
node.appendPhandle(self)
yield node
class RealViewOsc(ClockDomain):
type = 'RealViewOsc'
cxx_header = "dev/arm/rv_ctrl.hh"
parent = Param.RealViewCtrl(Parent.any, "RealView controller")
# TODO: We currently don't have the notion of a clock source,
# which means we have to associate oscillators with a voltage
# source.
voltage_domain = Param.VoltageDomain(Parent.voltage_domain,
"Voltage domain")
# See ARM DUI 0447J (ARM Motherboard Express uATX -- V2M-P1) and
# the individual core/logic tile reference manuals for details
# about the site/position/dcc/device allocation.
site = Param.UInt8("Board Site")
position = Param.UInt8("Position in device stack")
dcc = Param.UInt8("Daughterboard Configuration Controller")
device = Param.UInt8("Device ID")
freq = Param.Clock("Default frequency")
def generateDeviceTree(self, state):
phandle = state.phandle(self)
node = FdtNode("osc@" + format(long(phandle), 'x'))
node.appendCompatible("arm,vexpress-osc")
node.append(FdtPropertyWords("arm,vexpress-sysreg,func",
[0x1, int(self.device)]))
node.append(FdtPropertyWords("#clock-cells", [0]))
freq = int(1.0/self.freq.value) # Values are stored as a clock period
node.append(FdtPropertyWords("freq-range", [freq, freq]))
node.append(FdtPropertyStrings("clock-output-names",
["oscclk" + str(phandle)]))
node.appendPhandle(self)
yield node
class RealViewTemperatureSensor(SimObject):
type = 'RealViewTemperatureSensor'
cxx_header = "dev/arm/rv_ctrl.hh"
parent = Param.RealViewCtrl(Parent.any, "RealView controller")
system = Param.System(Parent.any, "system")
# See ARM DUI 0447J (ARM Motherboard Express uATX -- V2M-P1) and
# the individual core/logic tile reference manuals for details
# about the site/position/dcc/device allocation.
site = Param.UInt8("Board Site")
position = Param.UInt8("Position in device stack")
dcc = Param.UInt8("Daughterboard Configuration Controller")
device = Param.UInt8("Device ID")
class VExpressMCC(SubSystem):
"""ARM V2M-P1 Motherboard Configuration Controller
This subsystem describes a subset of the devices that sit behind the
motherboard configuration controller on the the ARM Motherboard
Express (V2M-P1) motherboard. See ARM DUI 0447J for details.
"""
class Osc(RealViewOsc):
site, position, dcc = (0, 0, 0)
class Temperature(RealViewTemperatureSensor):
site, position, dcc = (0, 0, 0)
osc_mcc = Osc(device=0, freq="50MHz")
osc_clcd = Osc(device=1, freq="23.75MHz")
osc_peripheral = Osc(device=2, freq="24MHz")
osc_system_bus = Osc(device=4, freq="24MHz")
# See Table 4.19 in ARM DUI 0447J (Motherboard Express uATX TRM).
temp_crtl = Temperature(device=0)
def generateDeviceTree(self, state):
node = FdtNode("mcc")
node.appendCompatible("arm,vexpress,config-bus")
node.append(FdtPropertyWords("arm,vexpress,site", [0]))
for obj in self._children.values():
if issubclass(type(obj), SimObject):
node.append(obj.generateDeviceTree(state))
io_phandle = state.phandle(self.osc_mcc.parent.unproxy(self))
node.append(FdtPropertyWords("arm,vexpress,config-bridge", io_phandle))
yield node
class CoreTile2A15DCC(SubSystem):
"""ARM CoreTile Express A15x2 Daughterboard Configuration Controller
This subsystem describes a subset of the devices that sit behind the
daughterboard configuration controller on a CoreTile Express A15x2. See
ARM DUI 0604E for details.
"""
class Osc(RealViewOsc):
site, position, dcc = (1, 0, 0)
# See Table 2.8 in ARM DUI 0604E (CoreTile Express A15x2 TRM)
osc_cpu = Osc(device=0, freq="60MHz")
osc_hsbm = Osc(device=4, freq="40MHz")
osc_pxl = Osc(device=5, freq="23.75MHz")
osc_smb = Osc(device=6, freq="50MHz")
osc_sys = Osc(device=7, freq="60MHz")
osc_ddr = Osc(device=8, freq="40MHz")
def generateDeviceTree(self, state):
node = FdtNode("dcc")
node.appendCompatible("arm,vexpress,config-bus")
for obj in self._children.values():
if isinstance(obj, SimObject):
node.append(obj.generateDeviceTree(state))
io_phandle = state.phandle(self.osc_cpu.parent.unproxy(self))
node.append(FdtPropertyWords("arm,vexpress,config-bridge", io_phandle))
yield node
class VGic(PioDevice):
type = 'VGic'
cxx_header = "dev/arm/vgic.hh"
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
platform = Param.Platform(Parent.any, "Platform this device is part of.")
vcpu_addr = Param.Addr(0, "Address for vcpu interfaces")
hv_addr = Param.Addr(0, "Address for hv control")
pio_delay = Param.Latency('10ns', "Delay for PIO r/w")
# The number of list registers is not currently configurable at runtime.
ppint = Param.UInt32("HV maintenance interrupt number")
def generateDeviceTree(self, state):
gic = self.gic.unproxy(self)
node = FdtNode("interrupt-controller")
node.appendCompatible(["gem5,gic", "arm,cortex-a15-gic",
"arm,cortex-a9-gic"])
node.append(FdtPropertyWords("#interrupt-cells", [3]))
node.append(FdtPropertyWords("#address-cells", [0]))
node.append(FdtProperty("interrupt-controller"))
regs = (
state.addrCells(gic.dist_addr) +
state.sizeCells(0x1000) +
state.addrCells(gic.cpu_addr) +
state.sizeCells(0x1000) +
state.addrCells(self.hv_addr) +
state.sizeCells(0x2000) +
state.addrCells(self.vcpu_addr) +
state.sizeCells(0x2000) )
node.append(FdtPropertyWords("reg", regs))
node.append(FdtPropertyWords("interrupts",
[1, int(self.ppint)-16, 0xf04]))
node.appendPhandle(gic)
yield node
class AmbaFake(AmbaPioDevice):
type = 'AmbaFake'
cxx_header = "dev/arm/amba_fake.hh"
ignore_access = Param.Bool(False, "Ignore reads/writes to this device, (e.g. IsaFake + AMBA)")
amba_id = 0;
class Pl011(Uart):
type = 'Pl011'
cxx_header = "dev/arm/pl011.hh"
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
int_num = Param.UInt32("Interrupt number that connects to GIC")
end_on_eot = Param.Bool(False, "End the simulation when a EOT is received on the UART")
int_delay = Param.Latency("100ns", "Time between action and interrupt generation by UART")
def generateDeviceTree(self, state):
node = self.generateBasicPioDeviceNode(state, 'uart', self.pio_addr,
0x1000, [int(self.int_num)])
node.appendCompatible(["arm,pl011", "arm,primecell"])
# Hardcoded reference to the realview platform clocks, because the
# clk_domain can only store one clock (i.e. it is not a VectorParam)
realview = self._parent.unproxy(self)
node.append(FdtPropertyWords("clocks",
[state.phandle(realview.mcc.osc_peripheral),
state.phandle(realview.dcc.osc_smb)]))
node.append(FdtPropertyStrings("clock-names", ["uartclk", "apb_pclk"]))
yield node
class Sp804(AmbaPioDevice):
type = 'Sp804'
cxx_header = "dev/arm/timer_sp804.hh"
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
int_num0 = Param.UInt32("Interrupt number that connects to GIC")
clock0 = Param.Clock('1MHz', "Clock speed of the input")
int_num1 = Param.UInt32("Interrupt number that connects to GIC")
clock1 = Param.Clock('1MHz', "Clock speed of the input")
amba_id = 0x00141804
class A9GlobalTimer(BasicPioDevice):
type = 'A9GlobalTimer'
cxx_header = "dev/arm/timer_a9global.hh"
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
int_num = Param.UInt32("Interrrupt number that connects to GIC")
class CpuLocalTimer(BasicPioDevice):
type = 'CpuLocalTimer'
cxx_header = "dev/arm/timer_cpulocal.hh"
int_timer = Param.ArmPPI("Interrrupt used per-cpu to GIC")
int_watchdog = Param.ArmPPI("Interrupt for per-cpu watchdog to GIC")
class GenericTimer(ClockedObject):
type = 'GenericTimer'
cxx_header = "dev/arm/generic_timer.hh"
system = Param.ArmSystem(Parent.any, "system")
int_phys_s = Param.ArmPPI("Physical (S) timer interrupt")
int_phys_ns = Param.ArmPPI("Physical (NS) timer interrupt")
int_virt = Param.ArmPPI("Virtual timer interrupt")
int_hyp = Param.ArmPPI("Hypervisor timer interrupt")
def generateDeviceTree(self, state):
node = FdtNode("timer")
node.appendCompatible(["arm,cortex-a15-timer",
"arm,armv7-timer",
"arm,armv8-timer"])
node.append(FdtPropertyWords("interrupts", [
1, int(self.int_phys_s.num) - 16, 0xf08,
1, int(self.int_phys_ns.num) - 16, 0xf08,
1, int(self.int_virt.num) - 16, 0xf08,
1, int(self.int_hyp.num) - 16, 0xf08,
]))
clock = state.phandle(self.clk_domain.unproxy(self))
node.append(FdtPropertyWords("clocks", clock))
yield node
class GenericTimerMem(PioDevice):
type = 'GenericTimerMem'
cxx_header = "dev/arm/generic_timer.hh"
base = Param.Addr(0, "Base address")
int_phys = Param.ArmSPI("Physical Interrupt")
int_virt = Param.ArmSPI("Virtual Interrupt")
class PL031(AmbaIntDevice):
type = 'PL031'
cxx_header = "dev/arm/rtc_pl031.hh"
time = Param.Time('01/01/2009', "System time to use ('Now' for actual time)")
amba_id = 0x00341031
def generateDeviceTree(self, state):
node = self.generateBasicPioDeviceNode(state, 'rtc', self.pio_addr,
0x1000, [int(self.int_num)])
node.appendCompatible(["arm,pl031", "arm,primecell"])
clock = state.phandle(self.clk_domain.unproxy(self))
node.append(FdtPropertyWords("clocks", clock))
yield node
class Pl050(AmbaIntDevice):
type = 'Pl050'
cxx_header = "dev/arm/kmi.hh"
amba_id = 0x00141050
ps2 = Param.PS2Device("PS/2 device")
def generateDeviceTree(self, state):
node = self.generateBasicPioDeviceNode(state, 'kmi', self.pio_addr,
0x1000, [int(self.int_num)])
node.appendCompatible(["arm,pl050", "arm,primecell"])
clock = state.phandle(self.clk_domain.unproxy(self))
node.append(FdtPropertyWords("clocks", clock))
yield node
class Pl111(AmbaDmaDevice):
type = 'Pl111'
cxx_header = "dev/arm/pl111.hh"
pixel_clock = Param.Clock('24MHz', "Pixel clock")
vnc = Param.VncInput(Parent.any, "Vnc server for remote frame buffer display")
amba_id = 0x00141111
enable_capture = Param.Bool(True, "capture frame to system.framebuffer.bmp")
class HDLcd(AmbaDmaDevice):
type = 'HDLcd'
cxx_header = "dev/arm/hdlcd.hh"
vnc = Param.VncInput(Parent.any, "Vnc server for remote frame buffer "
"display")
amba_id = 0x00141000
workaround_swap_rb = Param.Bool(False, "Workaround incorrect color "
"selector order in some kernels")
workaround_dma_line_count = Param.Bool(True, "Workaround incorrect "
"DMA line count (off by 1)")
enable_capture = Param.Bool(True, "capture frame to "
"system.framebuffer.{extension}")
frame_format = Param.ImageFormat("Auto",
"image format of the captured frame")
pixel_buffer_size = Param.MemorySize32("2kB", "Size of address range")
pxl_clk = Param.ClockDomain("Pixel clock source")
pixel_chunk = Param.Unsigned(32, "Number of pixels to handle in one batch")
virt_refresh_rate = Param.Frequency("20Hz", "Frame refresh rate "
"in KVM mode")
def generateDeviceTree(self, state):
# Interrupt number is hardcoded; it is not a property of this class
node = self.generateBasicPioDeviceNode(state, 'hdlcd',
self.pio_addr, 0x1000, [63])
node.appendCompatible(["arm,hdlcd"])
node.append(FdtPropertyWords("clocks", state.phandle(self.pxl_clk)))
node.append(FdtPropertyStrings("clock-names", ["pxlclk"]))
# This driver is disabled by default since the required DT nodes
# haven't been standardized yet. To use it, override this status to
# "ok" and add the display configuration nodes required by the driver.
# See the driver for more information.
node.append(FdtPropertyStrings("status", ["disabled"]))
yield node
class RealView(Platform):
type = 'RealView'
cxx_header = "dev/arm/realview.hh"
system = Param.System(Parent.any, "system")
_mem_regions = [(Addr(0), Addr('256MB'))]
def _on_chip_devices(self):
return []
def _off_chip_devices(self):
return []
_off_chip_ranges = []
def _attach_device(self, device, bus, dma_ports=None):
if hasattr(device, "pio"):
device.pio = bus.master
if hasattr(device, "dma"):
if dma_ports is None:
device.dma = bus.slave
else:
dma_ports.append(device.dma)
def _attach_io(self, devices, *args, **kwargs):
for d in devices:
self._attach_device(d, *args, **kwargs)
def _attach_clk(self, devices, clkdomain):
for d in devices:
if hasattr(d, "clk_domain"):
d.clk_domain = clkdomain
def attachPciDevices(self):
pass
def enableMSIX(self):
pass
def onChipIOClkDomain(self, clkdomain):
self._attach_clk(self._on_chip_devices(), clkdomain)
def offChipIOClkDomain(self, clkdomain):
self._attach_clk(self._off_chip_devices(), clkdomain)
def attachOnChipIO(self, bus, bridge=None, *args, **kwargs):
self._attach_io(self._on_chip_devices(), bus, *args, **kwargs)
if bridge:
bridge.ranges = self._off_chip_ranges
def attachIO(self, *args, **kwargs):
self._attach_io(self._off_chip_devices(), *args, **kwargs)
def setupBootLoader(self, mem_bus, cur_sys, loc):
cur_sys.bootmem = SimpleMemory(
range = AddrRange('2GB', size = '64MB'),
conf_table_reported = False)
if mem_bus is not None:
cur_sys.bootmem.port = mem_bus.master
cur_sys.boot_loader = loc('boot.arm')
cur_sys.atags_addr = 0x100
cur_sys.load_offset = 0
def generateDeviceTree(self, state):
node = FdtNode("/") # Things in this module need to end up in the root
node.append(FdtPropertyWords("interrupt-parent",
state.phandle(self.gic)))
for subnode in self.recurseDeviceTree(state):
node.append(subnode)
yield node
def annotateCpuDeviceNode(self, cpu, state):
cpu.append(FdtPropertyStrings("enable-method", "spin-table"))
cpu.append(FdtPropertyWords("cpu-release-addr", \
state.addrCells(0x8000fff8)))
# Reference for memory map and interrupt number
# RealView Platform Baseboard Explore for Cortex-A9 User Guide(ARM DUI 0440A)
# Chapter 4: Programmer's Reference
class RealViewPBX(RealView):
uart = Pl011(pio_addr=0x10009000, int_num=44)
realview_io = RealViewCtrl(pio_addr=0x10000000)
mcc = VExpressMCC()
dcc = CoreTile2A15DCC()
gic = GicV2(cpu_addr=0x1f000100, dist_addr=0x1f001000, cpu_size=0x100)
pci_host = GenericPciHost(
conf_base=0x30000000, conf_size='256MB', conf_device_bits=16,
pci_pio_base=0)
timer0 = Sp804(int_num0=36, int_num1=36, pio_addr=0x10011000)
timer1 = Sp804(int_num0=37, int_num1=37, pio_addr=0x10012000)
global_timer = A9GlobalTimer(int_num=27, pio_addr=0x1f000200)
local_cpu_timer = CpuLocalTimer(int_timer=ArmPPI(num=29),
int_watchdog=ArmPPI(num=30),
pio_addr=0x1f000600)
clcd = Pl111(pio_addr=0x10020000, int_num=55)
kmi0 = Pl050(pio_addr=0x10006000, int_num=52, ps2=PS2Keyboard())
kmi1 = Pl050(pio_addr=0x10007000, int_num=53, ps2=PS2TouchKit())
a9scu = A9SCU(pio_addr=0x1f000000)
cf_ctrl = IdeController(disks=[], pci_func=0, pci_dev=7, pci_bus=2,
io_shift = 1, ctrl_offset = 2, Command = 0x1,
BAR0 = 0x18000000, BAR0Size = '16B',
BAR1 = 0x18000100, BAR1Size = '1B',
BAR0LegacyIO = True, BAR1LegacyIO = True)
l2x0_fake = IsaFake(pio_addr=0x1f002000, pio_size=0xfff)
flash_fake = IsaFake(pio_addr=0x40000000, pio_size=0x20000000,
fake_mem=True)
dmac_fake = AmbaFake(pio_addr=0x10030000)
uart1_fake = AmbaFake(pio_addr=0x1000a000)
uart2_fake = AmbaFake(pio_addr=0x1000b000)
uart3_fake = AmbaFake(pio_addr=0x1000c000)
smc_fake = AmbaFake(pio_addr=0x100e1000)
sp810_fake = AmbaFake(pio_addr=0x10001000, ignore_access=True)
watchdog_fake = AmbaFake(pio_addr=0x10010000)
gpio0_fake = AmbaFake(pio_addr=0x10013000)
gpio1_fake = AmbaFake(pio_addr=0x10014000)
gpio2_fake = AmbaFake(pio_addr=0x10015000)
ssp_fake = AmbaFake(pio_addr=0x1000d000)
sci_fake = AmbaFake(pio_addr=0x1000e000)
aaci_fake = AmbaFake(pio_addr=0x10004000)
mmc_fake = AmbaFake(pio_addr=0x10005000)
rtc = PL031(pio_addr=0x10017000, int_num=42)
energy_ctrl = EnergyCtrl(pio_addr=0x1000f000)
# Attach I/O devices that are on chip and also set the appropriate
# ranges for the bridge
def attachOnChipIO(self, bus, bridge):
self.gic.pio = bus.master
self.l2x0_fake.pio = bus.master
self.a9scu.pio = bus.master
self.global_timer.pio = bus.master
self.local_cpu_timer.pio = bus.master
# Bridge ranges based on excluding what is part of on-chip I/O
# (gic, l2x0, a9scu, local_cpu_timer)
bridge.ranges = [AddrRange(self.realview_io.pio_addr,
self.a9scu.pio_addr - 1),
AddrRange(self.flash_fake.pio_addr,
self.flash_fake.pio_addr + \
self.flash_fake.pio_size - 1)]
# Set the clock domain for IO objects that are considered
# to be "close" to the cores.
def onChipIOClkDomain(self, clkdomain):
self.gic.clk_domain = clkdomain
self.l2x0_fake.clk_domain = clkdomain
self.a9scu.clkdomain = clkdomain
self.local_cpu_timer.clk_domain = clkdomain
# Attach I/O devices to specified bus object. Can't do this
# earlier, since the bus object itself is typically defined at the
# System level.
def attachIO(self, bus):
self.uart.pio = bus.master
self.realview_io.pio = bus.master
self.pci_host.pio = bus.master
self.timer0.pio = bus.master
self.timer1.pio = bus.master
self.clcd.pio = bus.master
self.clcd.dma = bus.slave
self.kmi0.pio = bus.master
self.kmi1.pio = bus.master
self.cf_ctrl.pio = bus.master
self.cf_ctrl.dma = bus.slave
self.dmac_fake.pio = bus.master
self.uart1_fake.pio = bus.master
self.uart2_fake.pio = bus.master
self.uart3_fake.pio = bus.master
self.smc_fake.pio = bus.master
self.sp810_fake.pio = bus.master
self.watchdog_fake.pio = bus.master
self.gpio0_fake.pio = bus.master
self.gpio1_fake.pio = bus.master
self.gpio2_fake.pio = bus.master
self.ssp_fake.pio = bus.master
self.sci_fake.pio = bus.master
self.aaci_fake.pio = bus.master
self.mmc_fake.pio = bus.master
self.rtc.pio = bus.master
self.flash_fake.pio = bus.master
self.energy_ctrl.pio = bus.master
# Set the clock domain for IO objects that are considered
# to be "far" away from the cores.
def offChipIOClkDomain(self, clkdomain):
self.uart.clk_domain = clkdomain
self.realview_io.clk_domain = clkdomain
self.timer0.clk_domain = clkdomain
self.timer1.clk_domain = clkdomain
self.clcd.clk_domain = clkdomain
self.kmi0.clk_domain = clkdomain
self.kmi1.clk_domain = clkdomain
self.cf_ctrl.clk_domain = clkdomain
self.dmac_fake.clk_domain = clkdomain
self.uart1_fake.clk_domain = clkdomain
self.uart2_fake.clk_domain = clkdomain
self.uart3_fake.clk_domain = clkdomain
self.smc_fake.clk_domain = clkdomain
self.sp810_fake.clk_domain = clkdomain
self.watchdog_fake.clk_domain = clkdomain
self.gpio0_fake.clk_domain = clkdomain
self.gpio1_fake.clk_domain = clkdomain
self.gpio2_fake.clk_domain = clkdomain
self.ssp_fake.clk_domain = clkdomain
self.sci_fake.clk_domain = clkdomain
self.aaci_fake.clk_domain = clkdomain
self.mmc_fake.clk_domain = clkdomain
self.rtc.clk_domain = clkdomain
self.flash_fake.clk_domain = clkdomain
self.energy_ctrl.clk_domain = clkdomain
class VExpress_EMM(RealView):
_mem_regions = [(Addr('2GB'), Addr('2GB'))]
# Ranges based on excluding what is part of on-chip I/O (gic,
# a9scu)
_off_chip_ranges = [AddrRange(0x2F000000, size='16MB'),
AddrRange(0x30000000, size='256MB'),
AddrRange(0x40000000, size='512MB'),
AddrRange(0x18000000, size='64MB'),
AddrRange(0x1C000000, size='64MB')]
# Platform control device (off-chip)
realview_io = RealViewCtrl(proc_id0=0x14000000, proc_id1=0x14000000,
idreg=0x02250000, pio_addr=0x1C010000)
mcc = VExpressMCC()
dcc = CoreTile2A15DCC()
### On-chip devices ###
gic = GicV2(dist_addr=0x2C001000, cpu_addr=0x2C002000)
vgic = VGic(vcpu_addr=0x2c006000, hv_addr=0x2c004000, ppint=25)
local_cpu_timer = CpuLocalTimer(int_timer=ArmPPI(num=29),
int_watchdog=ArmPPI(num=30),
pio_addr=0x2C080000)
hdlcd = HDLcd(pxl_clk=dcc.osc_pxl,
pio_addr=0x2b000000, int_num=117,
workaround_swap_rb=True)
def _on_chip_devices(self):
devices = [
self.gic, self.vgic,
self.local_cpu_timer
]
if hasattr(self, "gicv2m"):
devices.append(self.gicv2m)
devices.append(self.hdlcd)
return devices
### Off-chip devices ###
uart = Pl011(pio_addr=0x1c090000, int_num=37)
pci_host = GenericPciHost(
conf_base=0x30000000, conf_size='256MB', conf_device_bits=16,
pci_pio_base=0)
generic_timer = GenericTimer(int_phys_s=ArmPPI(num=29),
int_phys_ns=ArmPPI(num=30),
int_virt=ArmPPI(num=27),
int_hyp=ArmPPI(num=26))
timer0 = Sp804(int_num0=34, int_num1=34, pio_addr=0x1C110000, clock0='1MHz', clock1='1MHz')
timer1 = Sp804(int_num0=35, int_num1=35, pio_addr=0x1C120000, clock0='1MHz', clock1='1MHz')
clcd = Pl111(pio_addr=0x1c1f0000, int_num=46)
kmi0 = Pl050(pio_addr=0x1c060000, int_num=44, ps2=PS2Keyboard())
kmi1 = Pl050(pio_addr=0x1c070000, int_num=45, ps2=PS2TouchKit())
cf_ctrl = IdeController(disks=[], pci_func=0, pci_dev=0, pci_bus=2,
io_shift = 2, ctrl_offset = 2, Command = 0x1,
BAR0 = 0x1C1A0000, BAR0Size = '256B',
BAR1 = 0x1C1A0100, BAR1Size = '4096B',
BAR0LegacyIO = True, BAR1LegacyIO = True)
vram = SimpleMemory(range = AddrRange(0x18000000, size='32MB'),
conf_table_reported = False)
rtc = PL031(pio_addr=0x1C170000, int_num=36)
l2x0_fake = IsaFake(pio_addr=0x2C100000, pio_size=0xfff)
uart1_fake = AmbaFake(pio_addr=0x1C0A0000)
uart2_fake = AmbaFake(pio_addr=0x1C0B0000)
uart3_fake = AmbaFake(pio_addr=0x1C0C0000)
sp810_fake = AmbaFake(pio_addr=0x1C020000, ignore_access=True)
watchdog_fake = AmbaFake(pio_addr=0x1C0F0000)
aaci_fake = AmbaFake(pio_addr=0x1C040000)
lan_fake = IsaFake(pio_addr=0x1A000000, pio_size=0xffff)
usb_fake = IsaFake(pio_addr=0x1B000000, pio_size=0x1ffff)
mmc_fake = AmbaFake(pio_addr=0x1c050000)
energy_ctrl = EnergyCtrl(pio_addr=0x1c080000)
def _off_chip_devices(self):
devices = [
self.uart,
self.realview_io,
self.pci_host,
self.timer0,
self.timer1,
self.clcd,
self.kmi0,
self.kmi1,
self.cf_ctrl,
self.rtc,
self.vram,
self.l2x0_fake,
self.uart1_fake,
self.uart2_fake,
self.uart3_fake,
self.sp810_fake,
self.watchdog_fake,
self.aaci_fake,
self.lan_fake,
self.usb_fake,
self.mmc_fake,
self.energy_ctrl,
]
# Try to attach the I/O if it exists
if hasattr(self, "ide"):
devices.append(self.ide)
if hasattr(self, "ethernet"):
devices.append(self.ethernet)
return devices
# Attach any PCI devices that are supported
def attachPciDevices(self):
self.ethernet = IGbE_e1000(pci_bus=0, pci_dev=0, pci_func=0,
InterruptLine=1, InterruptPin=1)
self.ide = IdeController(disks = [], pci_bus=0, pci_dev=1, pci_func=0,
InterruptLine=2, InterruptPin=2)
def enableMSIX(self):
self.gic = GicV2(dist_addr=0x2C001000, cpu_addr=0x2C002000, it_lines=512)
self.gicv2m = Gicv2m()
self.gicv2m.frames = [Gicv2mFrame(spi_base=256, spi_len=64, addr=0x2C1C0000)]
def setupBootLoader(self, mem_bus, cur_sys, loc):
cur_sys.bootmem = SimpleMemory(range = AddrRange('64MB'),
conf_table_reported = False)
if mem_bus is not None:
cur_sys.bootmem.port = mem_bus.master
if not cur_sys.boot_loader:
cur_sys.boot_loader = loc('boot_emm.arm')
cur_sys.atags_addr = 0x8000000
cur_sys.load_offset = 0x80000000
class VExpress_EMM64(VExpress_EMM):
# Three memory regions are specified totalling 512GB
_mem_regions = [(Addr('2GB'), Addr('2GB')), (Addr('34GB'), Addr('30GB')),
(Addr('512GB'), Addr('480GB'))]
pci_host = GenericPciHost(
conf_base=0x30000000, conf_size='256MB', conf_device_bits=12,
pci_pio_base=0x2f000000)
def setupBootLoader(self, mem_bus, cur_sys, loc):
cur_sys.bootmem = SimpleMemory(range=AddrRange(0, size='64MB'),
conf_table_reported=False)
if mem_bus is not None:
cur_sys.bootmem.port = mem_bus.master
if not cur_sys.boot_loader:
cur_sys.boot_loader = loc('boot_emm.arm64')
cur_sys.atags_addr = 0x8000000
cur_sys.load_offset = 0x80000000
class VExpress_GEM5_V1_Base(RealView):
"""
The VExpress gem5 memory map is loosely based on a modified
Versatile Express RS1 memory map.
The gem5 platform has been designed to implement a subset of the
original Versatile Express RS1 memory map. Off-chip peripherals should,
when possible, adhere to the Versatile Express memory map. Non-PCI
off-chip devices that are gem5-specific should live in the CS5 memory
space to avoid conflicts with existing devices that we might want to
model in the future. Such devices should normally have interrupts in
the gem5-specific SPI range.
On-chip peripherals are loosely modeled after the ARM CoreTile Express
A15x2 A7x3 memory and interrupt map. In particular, the GIC and
Generic Timer have the same interrupt lines and base addresses. Other
on-chip devices are gem5 specific.
Unlike the original Versatile Express RS2 extended platform, gem5 implements a
large contigious DRAM space, without aliases or holes, starting at the
2GiB boundary. This means that PCI memory is limited to 1GiB.
Memory map:
0x00000000-0x03ffffff: Boot memory (CS0)
0x04000000-0x07ffffff: Reserved
0x08000000-0x0bffffff: Reserved (CS0 alias)
0x0c000000-0x0fffffff: Reserved (Off-chip, CS4)
0x10000000-0x13ffffff: gem5-specific peripherals (Off-chip, CS5)
0x10000000-0x1000ffff: gem5 energy controller
0x10010000-0x1001ffff: gem5 pseudo-ops
0x14000000-0x17ffffff: Reserved (Off-chip, PSRAM, CS1)
0x18000000-0x1bffffff: Reserved (Off-chip, Peripherals, CS2)
0x1c000000-0x1fffffff: Peripheral block 1 (Off-chip, CS3):
0x1c010000-0x1c01ffff: realview_io (VE system control regs.)
0x1c060000-0x1c06ffff: KMI0 (keyboard)
0x1c070000-0x1c07ffff: KMI1 (mouse)
0x1c090000-0x1c09ffff: UART0
0x1c0a0000-0x1c0affff: UART1 (reserved)
0x1c0b0000-0x1c0bffff: UART2 (reserved)
0x1c0c0000-0x1c0cffff: UART3 (reserved)
0x1c130000-0x1c13ffff: VirtIO (gem5/FM extension)
0x1c140000-0x1c14ffff: VirtIO (gem5/FM extension)
0x1c170000-0x1c17ffff: RTC
0x20000000-0x3fffffff: On-chip peripherals:
0x2b000000-0x2b00ffff: HDLCD
0x2c001000-0x2c001fff: GIC (distributor)
0x2c002000-0x2c003fff: GIC (CPU interface)
0x2c004000-0x2c005fff: vGIC (HV)
0x2c006000-0x2c007fff: vGIC (VCPU)
0x2c1c0000-0x2c1cffff: GICv2m MSI frame 0
0x2d000000-0x2d00ffff: GPU (reserved)
0x2f000000-0x2fffffff: PCI IO space
0x30000000-0x3fffffff: PCI config space
0x40000000-0x7fffffff: Ext. AXI: Used as PCI memory
0x80000000-X: DRAM
Interrupts:
0- 15: Software generated interrupts (SGIs)
16- 31: On-chip private peripherals (PPIs)
25 : vgic
26 : generic_timer (hyp)
27 : generic_timer (virt)
28 : Reserved (Legacy FIQ)
29 : generic_timer (phys, sec)
30 : generic_timer (phys, non-sec)
31 : Reserved (Legacy IRQ)
32- 95: Mother board peripherals (SPIs)
32 : Reserved (SP805)
33 : Reserved (IOFPGA SW int)
34-35: Reserved (SP804)
36 : RTC
37-40: uart0-uart3
41-42: Reserved (PL180)
43 : Reserved (AACI)
44-45: kmi0-kmi1
46 : Reserved (CLCD)
47 : Reserved (Ethernet)
48 : Reserved (USB)
95-255: On-chip interrupt sources (we use these for
gem5-specific devices, SPIs)
74 : VirtIO (gem5/FM extension)
75 : VirtIO (gem5/FM extension)
95 : HDLCD
96- 98: GPU (reserved)
100-103: PCI
256-319: MSI frame 0 (gem5-specific, SPIs)
320-511: Unused
"""
# Everything above 2GiB is memory
_mem_regions = [(Addr('2GB'), Addr('510GB'))]
_off_chip_ranges = [
# CS1-CS5
AddrRange(0x0c000000, 0x1fffffff),
# External AXI interface (PCI)
AddrRange(0x2f000000, 0x7fffffff),
]
# Platform control device (off-chip)
realview_io = RealViewCtrl(proc_id0=0x14000000, proc_id1=0x14000000,
idreg=0x02250000, pio_addr=0x1c010000)
mcc = VExpressMCC()
dcc = CoreTile2A15DCC()
### On-chip devices ###
gic = kvm_gicv2_class(dist_addr=0x2c001000, cpu_addr=0x2c002000,
it_lines=512)
vgic = VGic(vcpu_addr=0x2c006000, hv_addr=0x2c004000, ppint=25)
gicv2m = Gicv2m()
gicv2m.frames = [
Gicv2mFrame(spi_base=256, spi_len=64, addr=0x2c1c0000),
]
generic_timer = GenericTimer(int_phys_s=ArmPPI(num=29),
int_phys_ns=ArmPPI(num=30),
int_virt=ArmPPI(num=27),
int_hyp=ArmPPI(num=26))
def _on_chip_devices(self):
return [
self.gic, self.vgic, self.gicv2m,
self.generic_timer,
]
### Off-chip devices ###
clock24MHz = SrcClockDomain(clock="24MHz",
voltage_domain=VoltageDomain(voltage="3.3V"))
uart = [
Pl011(pio_addr=0x1c090000, int_num=37),
]
kmi0 = Pl050(pio_addr=0x1c060000, int_num=44, ps2=PS2Keyboard())
kmi1 = Pl050(pio_addr=0x1c070000, int_num=45, ps2=PS2TouchKit())
rtc = PL031(pio_addr=0x1c170000, int_num=36)
### gem5-specific off-chip devices ###
pci_host = GenericArmPciHost(
conf_base=0x30000000, conf_size='256MB', conf_device_bits=12,
pci_pio_base=0x2f000000,
int_policy="ARM_PCI_INT_DEV", int_base=100, int_count=4)
energy_ctrl = EnergyCtrl(pio_addr=0x10000000)
vio = [
MmioVirtIO(pio_addr=0x1c130000, pio_size=0x1000,
interrupt=ArmSPI(num=74)),
MmioVirtIO(pio_addr=0x1c140000, pio_size=0x1000,
interrupt=ArmSPI(num=75)),
]
def _off_chip_devices(self):
return [
self.realview_io,
self.uart[0],
self.kmi0,
self.kmi1,
self.rtc,
self.pci_host,
self.energy_ctrl,
self.clock24MHz,
self.vio[0],
self.vio[1],
]
def attachPciDevice(self, device, *args, **kwargs):
device.host = self.pci_host
self._attach_device(device, *args, **kwargs)
def setupBootLoader(self, mem_bus, cur_sys, loc):
cur_sys.bootmem = SimpleMemory(range=AddrRange(0, size='64MB'),
conf_table_reported=False)
if mem_bus is not None:
cur_sys.bootmem.port = mem_bus.master
if not cur_sys.boot_loader:
cur_sys.boot_loader = [ loc('boot_emm.arm64'), loc('boot_emm.arm') ]
cur_sys.atags_addr = 0x8000000
cur_sys.load_offset = 0x80000000
# Setup m5ops. It's technically not a part of the boot
# loader, but this is the only place we can configure the
# system.
cur_sys.m5ops_base = 0x10010000
def generateDeviceTree(self, state):
# Generate using standard RealView function
dt = list(super(VExpress_GEM5_V1_Base, self).generateDeviceTree(state))
if len(dt) > 1:
raise Exception("System returned too many DT nodes")
node = dt[0]
node.appendCompatible(["arm,vexpress"])
node.append(FdtPropertyStrings("model", ["V2P-CA15"]))
node.append(FdtPropertyWords("arm,hbi", [0x0]))
node.append(FdtPropertyWords("arm,vexpress,site", [0xf]))
yield node
class VExpress_GEM5_V1(VExpress_GEM5_V1_Base):
hdlcd = HDLcd(pxl_clk=VExpress_GEM5_V1_Base.dcc.osc_pxl,
pio_addr=0x2b000000, int_num=95)
def _on_chip_devices(self):
return super(VExpress_GEM5_V1,self)._on_chip_devices() + [
self.hdlcd,
]
| gpl-3.0 |
dragondjf/md2html | Cheetah/ImportManager.py | 16 | 17547 | """
Provides an emulator/replacement for Python's standard import system.
@@TR: Be warned that Import Hooks are in the deepest, darkest corner of Python's
jungle. If you need to start hacking with this, be prepared to get lost for a
while. Also note, this module predates the newstyle import hooks in Python 2.3
http://www.python.org/peps/pep-0302.html.
This is a hacked/documented version of Gordon McMillan's iu.py. I have:
- made it a little less terse
- added docstrings and explanatations
- standardized the variable naming scheme
- reorganized the code layout to enhance readability
"""
import sys
import imp
import marshal
_installed = False
# _globalOwnerTypes is defined at the bottom of this file
_os_stat = _os_path_join = _os_getcwd = _os_path_dirname = None
##################################################
## FUNCTIONS
def _os_bootstrap():
"""Set up 'os' module replacement functions for use during import bootstrap."""
names = sys.builtin_module_names
join = dirname = None
if 'posix' in names:
sep = '/'
from posix import stat, getcwd
elif 'nt' in names:
sep = '\\'
from nt import stat, getcwd
elif 'dos' in names:
sep = '\\'
from dos import stat, getcwd
elif 'os2' in names:
sep = '\\'
from os2 import stat, getcwd
elif 'mac' in names:
from mac import stat, getcwd
def join(a, b):
if a == '':
return b
if ':' not in a:
a = ':' + a
if a[-1:] != ':':
a = a + ':'
return a + b
else:
raise ImportError('no os specific module found')
if join is None:
def join(a, b, sep=sep):
if a == '':
return b
lastchar = a[-1:]
if lastchar == '/' or lastchar == sep:
return a + b
return a + sep + b
if dirname is None:
def dirname(a, sep=sep):
for i in range(len(a)-1, -1, -1):
c = a[i]
if c == '/' or c == sep:
return a[:i]
return ''
global _os_stat
_os_stat = stat
global _os_path_join
_os_path_join = join
global _os_path_dirname
_os_path_dirname = dirname
global _os_getcwd
_os_getcwd = getcwd
_os_bootstrap()
def packageName(s):
for i in range(len(s)-1, -1, -1):
if s[i] == '.':
break
else:
return ''
return s[:i]
def nameSplit(s):
rslt = []
i = j = 0
for j in range(len(s)):
if s[j] == '.':
rslt.append(s[i:j])
i = j+1
if i < len(s):
rslt.append(s[i:])
return rslt
def getPathExt(fnm):
for i in range(len(fnm)-1, -1, -1):
if fnm[i] == '.':
return fnm[i:]
return ''
def pathIsDir(pathname):
"Local replacement for os.path.isdir()."
try:
s = _os_stat(pathname)
except OSError:
return None
return (s[0] & 0170000) == 0040000
def getDescr(fnm):
ext = getPathExt(fnm)
for (suffix, mode, typ) in imp.get_suffixes():
if suffix == ext:
return (suffix, mode, typ)
##################################################
## CLASSES
class Owner:
"""An Owner does imports from a particular piece of turf That is, there's
an Owner for each thing on sys.path There are owners for directories and
.pyz files. There could be owners for zip files, or even URLs. A
shadowpath (a dictionary mapping the names in sys.path to their owners) is
used so that sys.path (or a package's __path__) is still a bunch of strings,
"""
def __init__(self, path):
self.path = path
def __str__(self):
return self.path
def getmod(self, nm):
return None
class DirOwner(Owner):
def __init__(self, path):
if path == '':
path = _os_getcwd()
if not pathIsDir(path):
raise ValueError("%s is not a directory" % path)
Owner.__init__(self, path)
def getmod(self, nm,
getsuffixes=imp.get_suffixes, loadco=marshal.loads, newmod=imp.new_module):
pth = _os_path_join(self.path, nm)
possibles = [(pth, 0, None)]
if pathIsDir(pth):
possibles.insert(0, (_os_path_join(pth, '__init__'), 1, pth))
py = pyc = None
for pth, ispkg, pkgpth in possibles:
for ext, mode, typ in getsuffixes():
attempt = pth+ext
try:
st = _os_stat(attempt)
except:
pass
else:
if typ == imp.C_EXTENSION:
fp = open(attempt, 'rb')
mod = imp.load_module(nm, fp, attempt, (ext, mode, typ))
mod.__file__ = attempt
return mod
elif typ == imp.PY_SOURCE:
py = (attempt, st)
else:
pyc = (attempt, st)
if py or pyc:
break
if py is None and pyc is None:
return None
while True:
if pyc is None or py and pyc[1][8] < py[1][8]:
try:
co = compile(open(py[0], 'r').read()+'\n', py[0], 'exec')
break
except SyntaxError, e:
print("Invalid syntax in %s" % py[0])
print(e.args)
raise
elif pyc:
stuff = open(pyc[0], 'rb').read()
try:
co = loadco(stuff[8:])
break
except (ValueError, EOFError):
pyc = None
else:
return None
mod = newmod(nm)
mod.__file__ = co.co_filename
if ispkg:
mod.__path__ = [pkgpth]
subimporter = PathImportDirector(mod.__path__)
mod.__importsub__ = subimporter.getmod
mod.__co__ = co
return mod
class ImportDirector(Owner):
"""ImportDirectors live on the metapath There's one for builtins, one for
frozen modules, and one for sys.path Windows gets one for modules gotten
from the Registry Mac would have them for PY_RESOURCE modules etc. A
generalization of Owner - their concept of 'turf' is broader"""
pass
class BuiltinImportDirector(ImportDirector):
"""Directs imports of builtin modules"""
def __init__(self):
self.path = 'Builtins'
def getmod(self, nm, isbuiltin=imp.is_builtin):
if isbuiltin(nm):
mod = imp.load_module(nm, None, nm, ('', '', imp.C_BUILTIN))
return mod
return None
class FrozenImportDirector(ImportDirector):
"""Directs imports of frozen modules"""
def __init__(self):
self.path = 'FrozenModules'
def getmod(self, nm,
isFrozen=imp.is_frozen, loadMod=imp.load_module):
if isFrozen(nm):
mod = loadMod(nm, None, nm, ('', '', imp.PY_FROZEN))
if hasattr(mod, '__path__'):
mod.__importsub__ = lambda name, pname=nm, owner=self: owner.getmod(pname+'.'+name)
return mod
return None
class RegistryImportDirector(ImportDirector):
"""Directs imports of modules stored in the Windows Registry"""
def __init__(self):
self.path = "WindowsRegistry"
self.map = {}
try:
import win32api
## import win32con
except ImportError:
pass
else:
HKEY_CURRENT_USER = -2147483647
HKEY_LOCAL_MACHINE = -2147483646
KEY_ALL_ACCESS = 983103
subkey = r"Software\Python\PythonCore\%s\Modules" % sys.winver
for root in (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE):
try:
hkey = win32api.RegOpenKeyEx(root, subkey, 0, KEY_ALL_ACCESS)
except:
pass
else:
numsubkeys, numvalues, lastmodified = win32api.RegQueryInfoKey(hkey)
for i in range(numsubkeys):
subkeyname = win32api.RegEnumKey(hkey, i)
hskey = win32api.RegOpenKeyEx(hkey, subkeyname, 0, KEY_ALL_ACCESS)
val = win32api.RegQueryValueEx(hskey, '')
desc = getDescr(val[0])
self.map[subkeyname] = (val[0], desc)
hskey.Close()
hkey.Close()
break
def getmod(self, nm):
stuff = self.map.get(nm)
if stuff:
fnm, desc = stuff
fp = open(fnm, 'rb')
mod = imp.load_module(nm, fp, fnm, desc)
mod.__file__ = fnm
return mod
return None
class PathImportDirector(ImportDirector):
"""Directs imports of modules stored on the filesystem."""
def __init__(self, pathlist=None, importers=None, ownertypes=None):
if pathlist is None:
self.path = sys.path
else:
self.path = pathlist
if ownertypes == None:
self._ownertypes = _globalOwnerTypes
else:
self._ownertypes = ownertypes
if importers:
self._shadowPath = importers
else:
self._shadowPath = {}
self._inMakeOwner = False
self._building = {}
def getmod(self, nm):
mod = None
for thing in self.path:
if isinstance(thing, basestring):
owner = self._shadowPath.get(thing, -1)
if owner == -1:
owner = self._shadowPath[thing] = self._makeOwner(thing)
if owner:
mod = owner.getmod(nm)
else:
mod = thing.getmod(nm)
if mod:
break
return mod
def _makeOwner(self, path):
if self._building.get(path):
return None
self._building[path] = 1
owner = None
for klass in self._ownertypes:
try:
# this may cause an import, which may cause recursion
# hence the protection
owner = klass(path)
except:
pass
else:
break
del self._building[path]
return owner
#=================ImportManager============================#
# The one-and-only ImportManager
# ie, the builtin import
UNTRIED = -1
class ImportManager:
# really the equivalent of builtin import
def __init__(self):
self.metapath = [
BuiltinImportDirector(),
FrozenImportDirector(),
RegistryImportDirector(),
PathImportDirector()
]
self.threaded = 0
self.rlock = None
self.locker = None
self.setThreaded()
def setThreaded(self):
thread = sys.modules.get('thread', None)
if thread and not self.threaded:
self.threaded = 1
self.rlock = thread.allocate_lock()
self._get_ident = thread.get_ident
def install(self):
import __builtin__
__builtin__.__import__ = self.importHook
__builtin__.reload = self.reloadHook
def importHook(self, name, globals=None, locals=None, fromlist=None, level=-1):
'''
NOTE: Currently importHook will accept the keyword-argument "level"
but it will *NOT* use it (currently). Details about the "level" keyword
argument can be found here: http://www.python.org/doc/2.5.2/lib/built-in-funcs.html
'''
# first see if we could be importing a relative name
#print "importHook(%s, %s, locals, %s)" % (name, globals['__name__'], fromlist)
_sys_modules_get = sys.modules.get
contexts = [None]
if globals:
importernm = globals.get('__name__', '')
if importernm:
if hasattr(_sys_modules_get(importernm), '__path__'):
contexts.insert(0, importernm)
else:
pkgnm = packageName(importernm)
if pkgnm:
contexts.insert(0, pkgnm)
# so contexts is [pkgnm, None] or just [None]
# now break the name being imported up so we get:
# a.b.c -> [a, b, c]
nmparts = nameSplit(name)
_self_doimport = self.doimport
threaded = self.threaded
for context in contexts:
ctx = context
for i in range(len(nmparts)):
nm = nmparts[i]
#print " importHook trying %s in %s" % (nm, ctx)
if ctx:
fqname = ctx + '.' + nm
else:
fqname = nm
if threaded:
self._acquire()
mod = _sys_modules_get(fqname, UNTRIED)
if mod is UNTRIED:
mod = _self_doimport(nm, ctx, fqname)
if threaded:
self._release()
if mod:
ctx = fqname
else:
break
else:
# no break, point i beyond end
i = i + 1
if i:
break
if i<len(nmparts):
if ctx and hasattr(sys.modules[ctx], nmparts[i]):
#print "importHook done with %s %s %s (case 1)" % (name, globals['__name__'], fromlist)
return sys.modules[nmparts[0]]
del sys.modules[fqname]
raise ImportError("No module named %s" % fqname)
if fromlist is None:
#print "importHook done with %s %s %s (case 2)" % (name, globals['__name__'], fromlist)
if context:
return sys.modules[context+'.'+nmparts[0]]
return sys.modules[nmparts[0]]
bottommod = sys.modules[ctx]
if hasattr(bottommod, '__path__'):
fromlist = list(fromlist)
i = 0
while i < len(fromlist):
nm = fromlist[i]
if nm == '*':
fromlist[i:i+1] = list(getattr(bottommod, '__all__', []))
if i >= len(fromlist):
break
nm = fromlist[i]
i = i + 1
if not hasattr(bottommod, nm):
if self.threaded:
self._acquire()
mod = self.doimport(nm, ctx, ctx+'.'+nm)
if self.threaded:
self._release()
if not mod:
raise ImportError("%s not found in %s" % (nm, ctx))
#print "importHook done with %s %s %s (case 3)" % (name, globals['__name__'], fromlist)
return bottommod
def doimport(self, nm, parentnm, fqname):
# Not that nm is NEVER a dotted name at this point
#print "doimport(%s, %s, %s)" % (nm, parentnm, fqname)
if parentnm:
parent = sys.modules[parentnm]
if hasattr(parent, '__path__'):
importfunc = getattr(parent, '__importsub__', None)
if not importfunc:
subimporter = PathImportDirector(parent.__path__)
importfunc = parent.__importsub__ = subimporter.getmod
mod = importfunc(nm)
if mod:
setattr(parent, nm, mod)
else:
#print "..parent not a package"
return None
else:
# now we're dealing with an absolute import
for director in self.metapath:
mod = director.getmod(nm)
if mod:
break
if mod:
mod.__name__ = fqname
sys.modules[fqname] = mod
if hasattr(mod, '__co__'):
co = mod.__co__
del mod.__co__
exec(co, mod.__dict__)
if fqname == 'thread' and not self.threaded:
## print "thread detected!"
self.setThreaded()
else:
sys.modules[fqname] = None
#print "..found %s" % mod
return mod
def reloadHook(self, mod):
fqnm = mod.__name__
nm = nameSplit(fqnm)[-1]
parentnm = packageName(fqnm)
newmod = self.doimport(nm, parentnm, fqnm)
mod.__dict__.update(newmod.__dict__)
## return newmod
def _acquire(self):
if self.rlock.locked():
if self.locker == self._get_ident():
self.lockcount = self.lockcount + 1
## print "_acquire incrementing lockcount to", self.lockcount
return
self.rlock.acquire()
self.locker = self._get_ident()
self.lockcount = 0
## print "_acquire first time!"
def _release(self):
if self.lockcount:
self.lockcount = self.lockcount - 1
## print "_release decrementing lockcount to", self.lockcount
else:
self.rlock.release()
## print "_release releasing lock!"
##################################################
## MORE CONSTANTS & GLOBALS
_globalOwnerTypes = [
DirOwner,
Owner,
]
| mit |
awatts/boto | boto/cloudtrail/layer1.py | 125 | 17002 | # Copyright (c) 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.cloudtrail import exceptions
from boto.compat import json
class CloudTrailConnection(AWSQueryConnection):
"""
AWS CloudTrail
This is the CloudTrail API Reference. It provides descriptions of
actions, data types, common parameters, and common errors for
CloudTrail.
CloudTrail is a web service that records AWS API calls for your
AWS account and delivers log files to an Amazon S3 bucket. The
recorded information includes the identity of the user, the start
time of the AWS API call, the source IP address, the request
parameters, and the response elements returned by the service.
As an alternative to using the API, you can use one of the AWS
SDKs, which consist of libraries and sample code for various
programming languages and platforms (Java, Ruby, .NET, iOS,
Android, etc.). The SDKs provide a convenient way to create
programmatic access to AWSCloudTrail. For example, the SDKs take
care of cryptographically signing requests, managing errors, and
retrying requests automatically. For information about the AWS
SDKs, including how to download and install them, see the `Tools
for Amazon Web Services page`_.
See the CloudTrail User Guide for information about the data that
is included with each AWS API call listed in the log files.
"""
APIVersion = "2013-11-01"
DefaultRegionName = "us-east-1"
DefaultRegionEndpoint = "cloudtrail.us-east-1.amazonaws.com"
ServiceName = "CloudTrail"
TargetPrefix = "com.amazonaws.cloudtrail.v20131101.CloudTrail_20131101"
ResponseError = JSONResponseError
_faults = {
"InvalidMaxResultsException": exceptions.InvalidMaxResultsException,
"InvalidSnsTopicNameException": exceptions.InvalidSnsTopicNameException,
"InvalidS3BucketNameException": exceptions.InvalidS3BucketNameException,
"TrailAlreadyExistsException": exceptions.TrailAlreadyExistsException,
"InvalidTimeRangeException": exceptions.InvalidTimeRangeException,
"InvalidLookupAttributesException": exceptions.InvalidLookupAttributesException,
"InsufficientSnsTopicPolicyException": exceptions.InsufficientSnsTopicPolicyException,
"InvalidCloudWatchLogsLogGroupArnException": exceptions.InvalidCloudWatchLogsLogGroupArnException,
"InvalidCloudWatchLogsRoleArnException": exceptions.InvalidCloudWatchLogsRoleArnException,
"InvalidTrailNameException": exceptions.InvalidTrailNameException,
"CloudWatchLogsDeliveryUnavailableException": exceptions.CloudWatchLogsDeliveryUnavailableException,
"TrailNotFoundException": exceptions.TrailNotFoundException,
"S3BucketDoesNotExistException": exceptions.S3BucketDoesNotExistException,
"InvalidNextTokenException": exceptions.InvalidNextTokenException,
"InvalidS3PrefixException": exceptions.InvalidS3PrefixException,
"MaximumNumberOfTrailsExceededException": exceptions.MaximumNumberOfTrailsExceededException,
"InsufficientS3BucketPolicyException": exceptions.InsufficientS3BucketPolicyException,
}
def __init__(self, **kwargs):
region = kwargs.pop('region', None)
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
if 'host' not in kwargs or kwargs['host'] is None:
kwargs['host'] = region.endpoint
super(CloudTrailConnection, self).__init__(**kwargs)
self.region = region
def _required_auth_capability(self):
return ['hmac-v4']
def create_trail(self, name, s3_bucket_name, s3_key_prefix=None,
sns_topic_name=None, include_global_service_events=None,
cloud_watch_logs_log_group_arn=None,
cloud_watch_logs_role_arn=None):
"""
From the command line, use `create-subscription`.
Creates a trail that specifies the settings for delivery of
log data to an Amazon S3 bucket.
:type name: string
:param name: Specifies the name of the trail.
:type s3_bucket_name: string
:param s3_bucket_name: Specifies the name of the Amazon S3 bucket
designated for publishing log files.
:type s3_key_prefix: string
:param s3_key_prefix: Specifies the Amazon S3 key prefix that precedes
the name of the bucket you have designated for log file delivery.
:type sns_topic_name: string
:param sns_topic_name: Specifies the name of the Amazon SNS topic
defined for notification of log file delivery.
:type include_global_service_events: boolean
:param include_global_service_events: Specifies whether the trail is
publishing events from global services such as IAM to the log
files.
:type cloud_watch_logs_log_group_arn: string
:param cloud_watch_logs_log_group_arn: Specifies a log group name using
an Amazon Resource Name (ARN), a unique identifier that represents
the log group to which CloudTrail logs will be delivered. Not
required unless you specify CloudWatchLogsRoleArn.
:type cloud_watch_logs_role_arn: string
:param cloud_watch_logs_role_arn: Specifies the role for the CloudWatch
Logs endpoint to assume to write to a users log group.
"""
params = {'Name': name, 'S3BucketName': s3_bucket_name, }
if s3_key_prefix is not None:
params['S3KeyPrefix'] = s3_key_prefix
if sns_topic_name is not None:
params['SnsTopicName'] = sns_topic_name
if include_global_service_events is not None:
params['IncludeGlobalServiceEvents'] = include_global_service_events
if cloud_watch_logs_log_group_arn is not None:
params['CloudWatchLogsLogGroupArn'] = cloud_watch_logs_log_group_arn
if cloud_watch_logs_role_arn is not None:
params['CloudWatchLogsRoleArn'] = cloud_watch_logs_role_arn
return self.make_request(action='CreateTrail',
body=json.dumps(params))
def delete_trail(self, name):
"""
Deletes a trail.
:type name: string
:param name: The name of a trail to be deleted.
"""
params = {'Name': name, }
return self.make_request(action='DeleteTrail',
body=json.dumps(params))
def describe_trails(self, trail_name_list=None):
"""
Retrieves settings for the trail associated with the current
region for your account.
:type trail_name_list: list
:param trail_name_list: The trail returned.
"""
params = {}
if trail_name_list is not None:
params['trailNameList'] = trail_name_list
return self.make_request(action='DescribeTrails',
body=json.dumps(params))
def get_trail_status(self, name):
"""
Returns a JSON-formatted list of information about the
specified trail. Fields include information on delivery
errors, Amazon SNS and Amazon S3 errors, and start and stop
logging times for each trail.
:type name: string
:param name: The name of the trail for which you are requesting the
current status.
"""
params = {'Name': name, }
return self.make_request(action='GetTrailStatus',
body=json.dumps(params))
def lookup_events(self, lookup_attributes=None, start_time=None,
end_time=None, max_results=None, next_token=None):
"""
Looks up API activity events captured by CloudTrail that
create, update, or delete resources in your account. Events
for a region can be looked up for the times in which you had
CloudTrail turned on in that region during the last seven
days. Lookup supports five different attributes: time range
(defined by a start time and end time), user name, event name,
resource type, and resource name. All attributes are optional.
The maximum number of attributes that can be specified in any
one lookup request are time range and one other attribute. The
default number of results returned is 10, with a maximum of 50
possible. The response includes a token that you can use to
get the next page of results.
The rate of lookup requests is limited to one per second per
account. If this limit is exceeded, a throttling error occurs.
Events that occurred during the selected time range will not
be available for lookup if CloudTrail logging was not enabled
when the events occurred.
:type lookup_attributes: list
:param lookup_attributes: Contains a list of lookup attributes.
Currently the list can contain only one item.
:type start_time: timestamp
:param start_time: Specifies that only events that occur after or at
the specified time are returned. If the specified start time is
after the specified end time, an error is returned.
:type end_time: timestamp
:param end_time: Specifies that only events that occur before or at the
specified time are returned. If the specified end time is before
the specified start time, an error is returned.
:type max_results: integer
:param max_results: The number of events to return. Possible values are
1 through 50. The default is 10.
:type next_token: string
:param next_token: The token to use to get the next page of results
after a previous API call. This token must be passed in with the
same parameters that were specified in the the original call. For
example, if the original call specified an AttributeKey of
'Username' with a value of 'root', the call with NextToken should
include those same parameters.
"""
params = {}
if lookup_attributes is not None:
params['LookupAttributes'] = lookup_attributes
if start_time is not None:
params['StartTime'] = start_time
if end_time is not None:
params['EndTime'] = end_time
if max_results is not None:
params['MaxResults'] = max_results
if next_token is not None:
params['NextToken'] = next_token
return self.make_request(action='LookupEvents',
body=json.dumps(params))
def start_logging(self, name):
"""
Starts the recording of AWS API calls and log file delivery
for a trail.
:type name: string
:param name: The name of the trail for which CloudTrail logs AWS API
calls.
"""
params = {'Name': name, }
return self.make_request(action='StartLogging',
body=json.dumps(params))
def stop_logging(self, name):
"""
Suspends the recording of AWS API calls and log file delivery
for the specified trail. Under most circumstances, there is no
need to use this action. You can update a trail without
stopping it first. This action is the only way to stop
recording.
:type name: string
:param name: Communicates to CloudTrail the name of the trail for which
to stop logging AWS API calls.
"""
params = {'Name': name, }
return self.make_request(action='StopLogging',
body=json.dumps(params))
def update_trail(self, name, s3_bucket_name=None, s3_key_prefix=None,
sns_topic_name=None, include_global_service_events=None,
cloud_watch_logs_log_group_arn=None,
cloud_watch_logs_role_arn=None):
"""
From the command line, use `update-subscription`.
Updates the settings that specify delivery of log files.
Changes to a trail do not require stopping the CloudTrail
service. Use this action to designate an existing bucket for
log delivery. If the existing bucket has previously been a
target for CloudTrail log files, an IAM policy exists for the
bucket.
:type name: string
:param name: Specifies the name of the trail.
:type s3_bucket_name: string
:param s3_bucket_name: Specifies the name of the Amazon S3 bucket
designated for publishing log files.
:type s3_key_prefix: string
:param s3_key_prefix: Specifies the Amazon S3 key prefix that precedes
the name of the bucket you have designated for log file delivery.
:type sns_topic_name: string
:param sns_topic_name: Specifies the name of the Amazon SNS topic
defined for notification of log file delivery.
:type include_global_service_events: boolean
:param include_global_service_events: Specifies whether the trail is
publishing events from global services such as IAM to the log
files.
:type cloud_watch_logs_log_group_arn: string
:param cloud_watch_logs_log_group_arn: Specifies a log group name using
an Amazon Resource Name (ARN), a unique identifier that represents
the log group to which CloudTrail logs will be delivered. Not
required unless you specify CloudWatchLogsRoleArn.
:type cloud_watch_logs_role_arn: string
:param cloud_watch_logs_role_arn: Specifies the role for the CloudWatch
Logs endpoint to assume to write to a users log group.
"""
params = {'Name': name, }
if s3_bucket_name is not None:
params['S3BucketName'] = s3_bucket_name
if s3_key_prefix is not None:
params['S3KeyPrefix'] = s3_key_prefix
if sns_topic_name is not None:
params['SnsTopicName'] = sns_topic_name
if include_global_service_events is not None:
params['IncludeGlobalServiceEvents'] = include_global_service_events
if cloud_watch_logs_log_group_arn is not None:
params['CloudWatchLogsLogGroupArn'] = cloud_watch_logs_log_group_arn
if cloud_watch_logs_role_arn is not None:
params['CloudWatchLogsRoleArn'] = cloud_watch_logs_role_arn
return self.make_request(action='UpdateTrail',
body=json.dumps(params))
def make_request(self, action, body):
headers = {
'X-Amz-Target': '%s.%s' % (self.TargetPrefix, action),
'Host': self.region.endpoint,
'Content-Type': 'application/x-amz-json-1.1',
'Content-Length': str(len(body)),
}
http_request = self.build_base_http_request(
method='POST', path='/', auth_path='/', params={},
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
return json.loads(response_body)
else:
json_body = json.loads(response_body)
fault_name = json_body.get('__type', None)
exception_class = self._faults.get(fault_name, self.ResponseError)
raise exception_class(response.status, response.reason,
body=json_body)
| mit |
JT5D/Alfred-Popclip-Sublime | Sublime Text 2/Python PEP8 Autoformat/libs/lib2to3/fixes/fix_dict.py | 7 | 3798 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for dict methods.
d.keys() -> list(d.keys())
d.items() -> list(d.items())
d.values() -> list(d.values())
d.iterkeys() -> iter(d.keys())
d.iteritems() -> iter(d.items())
d.itervalues() -> iter(d.values())
d.viewkeys() -> d.keys()
d.viewitems() -> d.items()
d.viewvalues() -> d.values()
Except in certain very specific contexts: the iter() can be dropped
when the context is list(), sorted(), iter() or for...in; the list()
can be dropped when the context is list() or sorted() (but not iter()
or for...in!). Special contexts that apply to both: list(), sorted(), tuple()
set(), any(), all(), sum().
Note: iter(d.keys()) could be written as iter(d) but since the
original d.iterkeys() was also redundant we don't fix this. And there
are (rare) contexts where it makes a difference (e.g. when passing it
as an argument to a function that introspects the argument).
"""
# Local imports
from .. import pytree
from .. import patcomp
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Name, Call, LParen, RParen, ArgList, Dot
from .. import fixer_util
iter_exempt = fixer_util.consuming_calls | set(["iter"])
class FixDict(fixer_base.BaseFix):
PATTERN = """
power< head=any+
trailer< '.' method=('keys'|'items'|'values'|
'iterkeys'|'iteritems'|'itervalues'|
'viewkeys'|'viewitems'|'viewvalues') >
parens=trailer< '(' ')' >
tail=any*
>
"""
def transform(self, node, results):
head = results["head"]
method = results["method"][0] # Extract node for method name
tail = results["tail"]
syms = self.syms
method_name = method.value
isiter = method_name.startswith(u"iter")
isview = method_name.startswith(u"view")
if isiter or isview:
method_name = method_name[4:]
assert method_name in (u"keys", u"items", u"values"), repr(method)
head = [n.clone() for n in head]
tail = [n.clone() for n in tail]
special = not tail and self.in_special_context(node, isiter)
args = head + [pytree.Node(syms.trailer,
[Dot(),
Name(method_name,
prefix=method.prefix)]),
results["parens"].clone()]
new = pytree.Node(syms.power, args)
if not (special or isview):
new.prefix = u""
new = Call(Name(u"iter" if isiter else u"list"), [new])
if tail:
new = pytree.Node(syms.power, [new] + tail)
new.prefix = node.prefix
return new
P1 = "power< func=NAME trailer< '(' node=any ')' > any* >"
p1 = patcomp.compile_pattern(P1)
P2 = """for_stmt< 'for' any 'in' node=any ':' any* >
| comp_for< 'for' any 'in' node=any any* >
"""
p2 = patcomp.compile_pattern(P2)
def in_special_context(self, node, isiter):
if node.parent is None:
return False
results = {}
if (node.parent.parent is not None and
self.p1.match(node.parent.parent, results) and
results["node"] is node):
if isiter:
# iter(d.iterkeys()) -> iter(d.keys()), etc.
return results["func"].value in iter_exempt
else:
# list(d.keys()) -> list(d.keys()), etc.
return results["func"].value in fixer_util.consuming_calls
if not isiter:
return False
# for ... in d.iterkeys() -> for ... in d.keys(), etc.
return self.p2.match(node.parent, results) and results["node"] is node
| gpl-2.0 |
Varentsov/servo | tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/browsers/servo.py | 8 | 2582 | import os
from .base import NullBrowser, ExecutorBrowser, require_arg
from ..executors import executor_kwargs as base_executor_kwargs
from ..executors.executorservo import ServoTestharnessExecutor, ServoRefTestExecutor, ServoWdspecExecutor
here = os.path.join(os.path.split(__file__)[0])
__wptrunner__ = {
"product": "servo",
"check_args": "check_args",
"browser": "ServoBrowser",
"executor": {
"testharness": "ServoTestharnessExecutor",
"reftest": "ServoRefTestExecutor",
"wdspec": "ServoWdspecExecutor",
},
"browser_kwargs": "browser_kwargs",
"executor_kwargs": "executor_kwargs",
"env_extras": "env_extras",
"env_options": "env_options",
"update_properties": "update_properties",
}
def check_args(**kwargs):
require_arg(kwargs, "binary")
def browser_kwargs(test_type, run_info_data, **kwargs):
return {
"binary": kwargs["binary"],
"debug_info": kwargs["debug_info"],
"binary_args": kwargs["binary_args"],
"user_stylesheets": kwargs.get("user_stylesheets"),
"ca_certificate_path": kwargs["ssl_env"].ca_cert_path(),
}
def executor_kwargs(test_type, server_config, cache_manager, run_info_data,
**kwargs):
rv = base_executor_kwargs(test_type, server_config,
cache_manager, **kwargs)
rv["pause_after_test"] = kwargs["pause_after_test"]
if test_type == "wdspec":
rv["capabilities"] = {}
return rv
def env_extras(**kwargs):
return []
def env_options():
return {"server_host": "127.0.0.1",
"bind_address": False,
"testharnessreport": "testharnessreport-servo.js",
"supports_debugger": True}
def update_properties():
return ["debug", "os", "version", "processor", "bits"], None
class ServoBrowser(NullBrowser):
def __init__(self, logger, binary, debug_info=None, binary_args=None,
user_stylesheets=None, ca_certificate_path=None):
NullBrowser.__init__(self, logger)
self.binary = binary
self.debug_info = debug_info
self.binary_args = binary_args or []
self.user_stylesheets = user_stylesheets or []
self.ca_certificate_path = ca_certificate_path
def executor_browser(self):
return ExecutorBrowser, {
"binary": self.binary,
"debug_info": self.debug_info,
"binary_args": self.binary_args,
"user_stylesheets": self.user_stylesheets,
"ca_certificate_path": self.ca_certificate_path,
}
| mpl-2.0 |
nitin-cherian/LifeLongLearning | Web_Development_Python/RealPython/flask-hello-world/env/lib/python3.5/site-packages/werkzeug/urls.py | 216 | 36710 | # -*- coding: utf-8 -*-
"""
werkzeug.urls
~~~~~~~~~~~~~
``werkzeug.urls`` used to provide several wrapper functions for Python 2
urlparse, whose main purpose were to work around the behavior of the Py2
stdlib and its lack of unicode support. While this was already a somewhat
inconvenient situation, it got even more complicated because Python 3's
``urllib.parse`` actually does handle unicode properly. In other words,
this module would wrap two libraries with completely different behavior. So
now this module contains a 2-and-3-compatible backport of Python 3's
``urllib.parse``, which is mostly API-compatible.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import re
from werkzeug._compat import text_type, PY2, to_unicode, \
to_native, implements_to_string, try_coerce_native, \
normalize_string_tuple, make_literal_wrapper, \
fix_tuple_repr
from werkzeug._internal import _encode_idna, _decode_idna
from werkzeug.datastructures import MultiDict, iter_multi_items
from collections import namedtuple
# A regular expression for what a valid schema looks like
_scheme_re = re.compile(r'^[a-zA-Z0-9+-.]+$')
# Characters that are safe in any part of an URL.
_always_safe = (b'abcdefghijklmnopqrstuvwxyz'
b'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.-+')
_hexdigits = '0123456789ABCDEFabcdef'
_hextobyte = dict(
((a + b).encode(), int(a + b, 16))
for a in _hexdigits for b in _hexdigits
)
_URLTuple = fix_tuple_repr(namedtuple(
'_URLTuple',
['scheme', 'netloc', 'path', 'query', 'fragment']
))
class BaseURL(_URLTuple):
'''Superclass of :py:class:`URL` and :py:class:`BytesURL`.'''
__slots__ = ()
def replace(self, **kwargs):
"""Return an URL with the same values, except for those parameters
given new values by whichever keyword arguments are specified."""
return self._replace(**kwargs)
@property
def host(self):
"""The host part of the URL if available, otherwise `None`. The
host is either the hostname or the IP address mentioned in the
URL. It will not contain the port.
"""
return self._split_host()[0]
@property
def ascii_host(self):
"""Works exactly like :attr:`host` but will return a result that
is restricted to ASCII. If it finds a netloc that is not ASCII
it will attempt to idna decode it. This is useful for socket
operations when the URL might include internationalized characters.
"""
rv = self.host
if rv is not None and isinstance(rv, text_type):
try:
rv = _encode_idna(rv)
except UnicodeError:
rv = rv.encode('ascii', 'ignore')
return to_native(rv, 'ascii', 'ignore')
@property
def port(self):
"""The port in the URL as an integer if it was present, `None`
otherwise. This does not fill in default ports.
"""
try:
rv = int(to_native(self._split_host()[1]))
if 0 <= rv <= 65535:
return rv
except (ValueError, TypeError):
pass
@property
def auth(self):
"""The authentication part in the URL if available, `None`
otherwise.
"""
return self._split_netloc()[0]
@property
def username(self):
"""The username if it was part of the URL, `None` otherwise.
This undergoes URL decoding and will always be a unicode string.
"""
rv = self._split_auth()[0]
if rv is not None:
return _url_unquote_legacy(rv)
@property
def raw_username(self):
"""The username if it was part of the URL, `None` otherwise.
Unlike :attr:`username` this one is not being decoded.
"""
return self._split_auth()[0]
@property
def password(self):
"""The password if it was part of the URL, `None` otherwise.
This undergoes URL decoding and will always be a unicode string.
"""
rv = self._split_auth()[1]
if rv is not None:
return _url_unquote_legacy(rv)
@property
def raw_password(self):
"""The password if it was part of the URL, `None` otherwise.
Unlike :attr:`password` this one is not being decoded.
"""
return self._split_auth()[1]
def decode_query(self, *args, **kwargs):
"""Decodes the query part of the URL. Ths is a shortcut for
calling :func:`url_decode` on the query argument. The arguments and
keyword arguments are forwarded to :func:`url_decode` unchanged.
"""
return url_decode(self.query, *args, **kwargs)
def join(self, *args, **kwargs):
"""Joins this URL with another one. This is just a convenience
function for calling into :meth:`url_join` and then parsing the
return value again.
"""
return url_parse(url_join(self, *args, **kwargs))
def to_url(self):
"""Returns a URL string or bytes depending on the type of the
information stored. This is just a convenience function
for calling :meth:`url_unparse` for this URL.
"""
return url_unparse(self)
def decode_netloc(self):
"""Decodes the netloc part into a string."""
rv = _decode_idna(self.host or '')
if ':' in rv:
rv = '[%s]' % rv
port = self.port
if port is not None:
rv = '%s:%d' % (rv, port)
auth = ':'.join(filter(None, [
_url_unquote_legacy(self.raw_username or '', '/:%@'),
_url_unquote_legacy(self.raw_password or '', '/:%@'),
]))
if auth:
rv = '%s@%s' % (auth, rv)
return rv
def to_uri_tuple(self):
"""Returns a :class:`BytesURL` tuple that holds a URI. This will
encode all the information in the URL properly to ASCII using the
rules a web browser would follow.
It's usually more interesting to directly call :meth:`iri_to_uri` which
will return a string.
"""
return url_parse(iri_to_uri(self).encode('ascii'))
def to_iri_tuple(self):
"""Returns a :class:`URL` tuple that holds a IRI. This will try
to decode as much information as possible in the URL without
losing information similar to how a web browser does it for the
URL bar.
It's usually more interesting to directly call :meth:`uri_to_iri` which
will return a string.
"""
return url_parse(uri_to_iri(self))
def get_file_location(self, pathformat=None):
"""Returns a tuple with the location of the file in the form
``(server, location)``. If the netloc is empty in the URL or
points to localhost, it's represented as ``None``.
The `pathformat` by default is autodetection but needs to be set
when working with URLs of a specific system. The supported values
are ``'windows'`` when working with Windows or DOS paths and
``'posix'`` when working with posix paths.
If the URL does not point to to a local file, the server and location
are both represented as ``None``.
:param pathformat: The expected format of the path component.
Currently ``'windows'`` and ``'posix'`` are
supported. Defaults to ``None`` which is
autodetect.
"""
if self.scheme != 'file':
return None, None
path = url_unquote(self.path)
host = self.netloc or None
if pathformat is None:
if os.name == 'nt':
pathformat = 'windows'
else:
pathformat = 'posix'
if pathformat == 'windows':
if path[:1] == '/' and path[1:2].isalpha() and path[2:3] in '|:':
path = path[1:2] + ':' + path[3:]
windows_share = path[:3] in ('\\' * 3, '/' * 3)
import ntpath
path = ntpath.normpath(path)
# Windows shared drives are represented as ``\\host\\directory``.
# That results in a URL like ``file://///host/directory``, and a
# path like ``///host/directory``. We need to special-case this
# because the path contains the hostname.
if windows_share and host is None:
parts = path.lstrip('\\').split('\\', 1)
if len(parts) == 2:
host, path = parts
else:
host = parts[0]
path = ''
elif pathformat == 'posix':
import posixpath
path = posixpath.normpath(path)
else:
raise TypeError('Invalid path format %s' % repr(pathformat))
if host in ('127.0.0.1', '::1', 'localhost'):
host = None
return host, path
def _split_netloc(self):
if self._at in self.netloc:
return self.netloc.split(self._at, 1)
return None, self.netloc
def _split_auth(self):
auth = self._split_netloc()[0]
if not auth:
return None, None
if self._colon not in auth:
return auth, None
return auth.split(self._colon, 1)
def _split_host(self):
rv = self._split_netloc()[1]
if not rv:
return None, None
if not rv.startswith(self._lbracket):
if self._colon in rv:
return rv.split(self._colon, 1)
return rv, None
idx = rv.find(self._rbracket)
if idx < 0:
return rv, None
host = rv[1:idx]
rest = rv[idx + 1:]
if rest.startswith(self._colon):
return host, rest[1:]
return host, None
@implements_to_string
class URL(BaseURL):
"""Represents a parsed URL. This behaves like a regular tuple but
also has some extra attributes that give further insight into the
URL.
"""
__slots__ = ()
_at = '@'
_colon = ':'
_lbracket = '['
_rbracket = ']'
def __str__(self):
return self.to_url()
def encode_netloc(self):
"""Encodes the netloc part to an ASCII safe URL as bytes."""
rv = self.ascii_host or ''
if ':' in rv:
rv = '[%s]' % rv
port = self.port
if port is not None:
rv = '%s:%d' % (rv, port)
auth = ':'.join(filter(None, [
url_quote(self.raw_username or '', 'utf-8', 'strict', '/:%'),
url_quote(self.raw_password or '', 'utf-8', 'strict', '/:%'),
]))
if auth:
rv = '%s@%s' % (auth, rv)
return to_native(rv)
def encode(self, charset='utf-8', errors='replace'):
"""Encodes the URL to a tuple made out of bytes. The charset is
only being used for the path, query and fragment.
"""
return BytesURL(
self.scheme.encode('ascii'),
self.encode_netloc(),
self.path.encode(charset, errors),
self.query.encode(charset, errors),
self.fragment.encode(charset, errors)
)
class BytesURL(BaseURL):
"""Represents a parsed URL in bytes."""
__slots__ = ()
_at = b'@'
_colon = b':'
_lbracket = b'['
_rbracket = b']'
def __str__(self):
return self.to_url().decode('utf-8', 'replace')
def encode_netloc(self):
"""Returns the netloc unchanged as bytes."""
return self.netloc
def decode(self, charset='utf-8', errors='replace'):
"""Decodes the URL to a tuple made out of strings. The charset is
only being used for the path, query and fragment.
"""
return URL(
self.scheme.decode('ascii'),
self.decode_netloc(),
self.path.decode(charset, errors),
self.query.decode(charset, errors),
self.fragment.decode(charset, errors)
)
def _unquote_to_bytes(string, unsafe=''):
if isinstance(string, text_type):
string = string.encode('utf-8')
if isinstance(unsafe, text_type):
unsafe = unsafe.encode('utf-8')
unsafe = frozenset(bytearray(unsafe))
bits = iter(string.split(b'%'))
result = bytearray(next(bits, b''))
for item in bits:
try:
char = _hextobyte[item[:2]]
if char in unsafe:
raise KeyError()
result.append(char)
result.extend(item[2:])
except KeyError:
result.extend(b'%')
result.extend(item)
return bytes(result)
def _url_encode_impl(obj, charset, encode_keys, sort, key):
iterable = iter_multi_items(obj)
if sort:
iterable = sorted(iterable, key=key)
for key, value in iterable:
if value is None:
continue
if not isinstance(key, bytes):
key = text_type(key).encode(charset)
if not isinstance(value, bytes):
value = text_type(value).encode(charset)
yield url_quote_plus(key) + '=' + url_quote_plus(value)
def _url_unquote_legacy(value, unsafe=''):
try:
return url_unquote(value, charset='utf-8',
errors='strict', unsafe=unsafe)
except UnicodeError:
return url_unquote(value, charset='latin1', unsafe=unsafe)
def url_parse(url, scheme=None, allow_fragments=True):
"""Parses a URL from a string into a :class:`URL` tuple. If the URL
is lacking a scheme it can be provided as second argument. Otherwise,
it is ignored. Optionally fragments can be stripped from the URL
by setting `allow_fragments` to `False`.
The inverse of this function is :func:`url_unparse`.
:param url: the URL to parse.
:param scheme: the default schema to use if the URL is schemaless.
:param allow_fragments: if set to `False` a fragment will be removed
from the URL.
"""
s = make_literal_wrapper(url)
is_text_based = isinstance(url, text_type)
if scheme is None:
scheme = s('')
netloc = query = fragment = s('')
i = url.find(s(':'))
if i > 0 and _scheme_re.match(to_native(url[:i], errors='replace')):
# make sure "iri" is not actually a port number (in which case
# "scheme" is really part of the path)
rest = url[i + 1:]
if not rest or any(c not in s('0123456789') for c in rest):
# not a port number
scheme, url = url[:i].lower(), rest
if url[:2] == s('//'):
delim = len(url)
for c in s('/?#'):
wdelim = url.find(c, 2)
if wdelim >= 0:
delim = min(delim, wdelim)
netloc, url = url[2:delim], url[delim:]
if (s('[') in netloc and s(']') not in netloc) or \
(s(']') in netloc and s('[') not in netloc):
raise ValueError('Invalid IPv6 URL')
if allow_fragments and s('#') in url:
url, fragment = url.split(s('#'), 1)
if s('?') in url:
url, query = url.split(s('?'), 1)
result_type = is_text_based and URL or BytesURL
return result_type(scheme, netloc, url, query, fragment)
def url_quote(string, charset='utf-8', errors='strict', safe='/:', unsafe=''):
"""URL encode a single string with a given encoding.
:param s: the string to quote.
:param charset: the charset to be used.
:param safe: an optional sequence of safe characters.
:param unsafe: an optional sequence of unsafe characters.
.. versionadded:: 0.9.2
The `unsafe` parameter was added.
"""
if not isinstance(string, (text_type, bytes, bytearray)):
string = text_type(string)
if isinstance(string, text_type):
string = string.encode(charset, errors)
if isinstance(safe, text_type):
safe = safe.encode(charset, errors)
if isinstance(unsafe, text_type):
unsafe = unsafe.encode(charset, errors)
safe = frozenset(bytearray(safe) + _always_safe) - frozenset(bytearray(unsafe))
rv = bytearray()
for char in bytearray(string):
if char in safe:
rv.append(char)
else:
rv.extend(('%%%02X' % char).encode('ascii'))
return to_native(bytes(rv))
def url_quote_plus(string, charset='utf-8', errors='strict', safe=''):
"""URL encode a single string with the given encoding and convert
whitespace to "+".
:param s: The string to quote.
:param charset: The charset to be used.
:param safe: An optional sequence of safe characters.
"""
return url_quote(string, charset, errors, safe + ' ', '+').replace(' ', '+')
def url_unparse(components):
"""The reverse operation to :meth:`url_parse`. This accepts arbitrary
as well as :class:`URL` tuples and returns a URL as a string.
:param components: the parsed URL as tuple which should be converted
into a URL string.
"""
scheme, netloc, path, query, fragment = \
normalize_string_tuple(components)
s = make_literal_wrapper(scheme)
url = s('')
# We generally treat file:///x and file:/x the same which is also
# what browsers seem to do. This also allows us to ignore a schema
# register for netloc utilization or having to differenciate between
# empty and missing netloc.
if netloc or (scheme and path.startswith(s('/'))):
if path and path[:1] != s('/'):
path = s('/') + path
url = s('//') + (netloc or s('')) + path
elif path:
url += path
if scheme:
url = scheme + s(':') + url
if query:
url = url + s('?') + query
if fragment:
url = url + s('#') + fragment
return url
def url_unquote(string, charset='utf-8', errors='replace', unsafe=''):
"""URL decode a single string with a given encoding. If the charset
is set to `None` no unicode decoding is performed and raw bytes
are returned.
:param s: the string to unquote.
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param errors: the error handling for the charset decoding.
"""
rv = _unquote_to_bytes(string, unsafe)
if charset is not None:
rv = rv.decode(charset, errors)
return rv
def url_unquote_plus(s, charset='utf-8', errors='replace'):
"""URL decode a single string with the given `charset` and decode "+" to
whitespace.
Per default encoding errors are ignored. If you want a different behavior
you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a
:exc:`HTTPUnicodeError` is raised.
:param s: The string to unquote.
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param errors: The error handling for the `charset` decoding.
"""
if isinstance(s, text_type):
s = s.replace(u'+', u' ')
else:
s = s.replace(b'+', b' ')
return url_unquote(s, charset, errors)
def url_fix(s, charset='utf-8'):
r"""Sometimes you get an URL by a user that just isn't a real URL because
it contains unsafe characters like ' ' and so on. This function can fix
some of the problems in a similar way browsers handle data entered by the
user:
>>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)')
'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)'
:param s: the string with the URL to fix.
:param charset: The target charset for the URL if the url was given as
unicode string.
"""
# First step is to switch to unicode processing and to convert
# backslashes (which are invalid in URLs anyways) to slashes. This is
# consistent with what Chrome does.
s = to_unicode(s, charset, 'replace').replace('\\', '/')
# For the specific case that we look like a malformed windows URL
# we want to fix this up manually:
if s.startswith('file://') and s[7:8].isalpha() and s[8:10] in (':/', '|/'):
s = 'file:///' + s[7:]
url = url_parse(s)
path = url_quote(url.path, charset, safe='/%+$!*\'(),')
qs = url_quote_plus(url.query, charset, safe=':&%=+$!*\'(),')
anchor = url_quote_plus(url.fragment, charset, safe=':&%=+$!*\'(),')
return to_native(url_unparse((url.scheme, url.encode_netloc(),
path, qs, anchor)))
def uri_to_iri(uri, charset='utf-8', errors='replace'):
r"""
Converts a URI in a given charset to a IRI.
Examples for URI versus IRI:
>>> uri_to_iri(b'http://xn--n3h.net/')
u'http://\u2603.net/'
>>> uri_to_iri(b'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th')
u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th'
Query strings are left unchanged:
>>> uri_to_iri('/?foo=24&x=%26%2f')
u'/?foo=24&x=%26%2f'
.. versionadded:: 0.6
:param uri: The URI to convert.
:param charset: The charset of the URI.
:param errors: The error handling on decode.
"""
if isinstance(uri, tuple):
uri = url_unparse(uri)
uri = url_parse(to_unicode(uri, charset))
path = url_unquote(uri.path, charset, errors, '%/;?')
query = url_unquote(uri.query, charset, errors, '%;/?:@&=+,$#')
fragment = url_unquote(uri.fragment, charset, errors, '%;/?:@&=+,$#')
return url_unparse((uri.scheme, uri.decode_netloc(),
path, query, fragment))
def iri_to_uri(iri, charset='utf-8', errors='strict', safe_conversion=False):
r"""
Converts any unicode based IRI to an acceptable ASCII URI. Werkzeug always
uses utf-8 URLs internally because this is what browsers and HTTP do as
well. In some places where it accepts an URL it also accepts a unicode IRI
and converts it into a URI.
Examples for IRI versus URI:
>>> iri_to_uri(u'http://☃.net/')
'http://xn--n3h.net/'
>>> iri_to_uri(u'http://üser:pässword@☃.net/påth')
'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th'
There is a general problem with IRI and URI conversion with some
protocols that appear in the wild that are in violation of the URI
specification. In places where Werkzeug goes through a forced IRI to
URI conversion it will set the `safe_conversion` flag which will
not perform a conversion if the end result is already ASCII. This
can mean that the return value is not an entirely correct URI but
it will not destroy such invalid URLs in the process.
As an example consider the following two IRIs::
magnet:?xt=uri:whatever
itms-services://?action=download-manifest
The internal representation after parsing of those URLs is the same
and there is no way to reconstruct the original one. If safe
conversion is enabled however this function becomes a noop for both of
those strings as they both can be considered URIs.
.. versionadded:: 0.6
.. versionchanged:: 0.9.6
The `safe_conversion` parameter was added.
:param iri: The IRI to convert.
:param charset: The charset for the URI.
:param safe_conversion: indicates if a safe conversion should take place.
For more information see the explanation above.
"""
if isinstance(iri, tuple):
iri = url_unparse(iri)
if safe_conversion:
try:
native_iri = to_native(iri)
ascii_iri = to_native(iri).encode('ascii')
if ascii_iri.split() == [ascii_iri]:
return native_iri
except UnicodeError:
pass
iri = url_parse(to_unicode(iri, charset, errors))
netloc = iri.encode_netloc()
path = url_quote(iri.path, charset, errors, '/:~+%')
query = url_quote(iri.query, charset, errors, '%&[]:;$*()+,!?*/=')
fragment = url_quote(iri.fragment, charset, errors, '=%&[]:;$()+,!?*/')
return to_native(url_unparse((iri.scheme, netloc,
path, query, fragment)))
def url_decode(s, charset='utf-8', decode_keys=False, include_empty=True,
errors='replace', separator='&', cls=None):
"""
Parse a querystring and return it as :class:`MultiDict`. There is a
difference in key decoding on different Python versions. On Python 3
keys will always be fully decoded whereas on Python 2, keys will
remain bytestrings if they fit into ASCII. On 2.x keys can be forced
to be unicode by setting `decode_keys` to `True`.
If the charset is set to `None` no unicode decoding will happen and
raw bytes will be returned.
Per default a missing value for a key will default to an empty key. If
you don't want that behavior you can set `include_empty` to `False`.
Per default encoding errors are ignored. If you want a different behavior
you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a
`HTTPUnicodeError` is raised.
.. versionchanged:: 0.5
In previous versions ";" and "&" could be used for url decoding.
This changed in 0.5 where only "&" is supported. If you want to
use ";" instead a different `separator` can be provided.
The `cls` parameter was added.
:param s: a string with the query string to decode.
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param decode_keys: Used on Python 2.x to control whether keys should
be forced to be unicode objects. If set to `True`
then keys will be unicode in all cases. Otherwise,
they remain `str` if they fit into ASCII.
:param include_empty: Set to `False` if you don't want empty values to
appear in the dict.
:param errors: the decoding error behavior.
:param separator: the pair separator to be used, defaults to ``&``
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`MultiDict` is used.
"""
if cls is None:
cls = MultiDict
if isinstance(s, text_type) and not isinstance(separator, text_type):
separator = separator.decode(charset or 'ascii')
elif isinstance(s, bytes) and not isinstance(separator, bytes):
separator = separator.encode(charset or 'ascii')
return cls(_url_decode_impl(s.split(separator), charset, decode_keys,
include_empty, errors))
def url_decode_stream(stream, charset='utf-8', decode_keys=False,
include_empty=True, errors='replace', separator='&',
cls=None, limit=None, return_iterator=False):
"""Works like :func:`url_decode` but decodes a stream. The behavior
of stream and limit follows functions like
:func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is
directly fed to the `cls` so you can consume the data while it's
parsed.
.. versionadded:: 0.8
:param stream: a stream with the encoded querystring
:param charset: the charset of the query string. If set to `None`
no unicode decoding will take place.
:param decode_keys: Used on Python 2.x to control whether keys should
be forced to be unicode objects. If set to `True`,
keys will be unicode in all cases. Otherwise, they
remain `str` if they fit into ASCII.
:param include_empty: Set to `False` if you don't want empty values to
appear in the dict.
:param errors: the decoding error behavior.
:param separator: the pair separator to be used, defaults to ``&``
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`MultiDict` is used.
:param limit: the content length of the URL data. Not necessary if
a limited stream is provided.
:param return_iterator: if set to `True` the `cls` argument is ignored
and an iterator over all decoded pairs is
returned
"""
from werkzeug.wsgi import make_chunk_iter
if return_iterator:
cls = lambda x: x
elif cls is None:
cls = MultiDict
pair_iter = make_chunk_iter(stream, separator, limit)
return cls(_url_decode_impl(pair_iter, charset, decode_keys,
include_empty, errors))
def _url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors):
for pair in pair_iter:
if not pair:
continue
s = make_literal_wrapper(pair)
equal = s('=')
if equal in pair:
key, value = pair.split(equal, 1)
else:
if not include_empty:
continue
key = pair
value = s('')
key = url_unquote_plus(key, charset, errors)
if charset is not None and PY2 and not decode_keys:
key = try_coerce_native(key)
yield key, url_unquote_plus(value, charset, errors)
def url_encode(obj, charset='utf-8', encode_keys=False, sort=False, key=None,
separator=b'&'):
"""URL encode a dict/`MultiDict`. If a value is `None` it will not appear
in the result string. Per default only values are encoded into the target
charset strings. If `encode_keys` is set to ``True`` unicode keys are
supported too.
If `sort` is set to `True` the items are sorted by `key` or the default
sorting algorithm.
.. versionadded:: 0.5
`sort`, `key`, and `separator` were added.
:param obj: the object to encode into a query string.
:param charset: the charset of the query string.
:param encode_keys: set to `True` if you have unicode keys. (Ignored on
Python 3.x)
:param sort: set to `True` if you want parameters to be sorted by `key`.
:param separator: the separator to be used for the pairs.
:param key: an optional function to be used for sorting. For more details
check out the :func:`sorted` documentation.
"""
separator = to_native(separator, 'ascii')
return separator.join(_url_encode_impl(obj, charset, encode_keys, sort, key))
def url_encode_stream(obj, stream=None, charset='utf-8', encode_keys=False,
sort=False, key=None, separator=b'&'):
"""Like :meth:`url_encode` but writes the results to a stream
object. If the stream is `None` a generator over all encoded
pairs is returned.
.. versionadded:: 0.8
:param obj: the object to encode into a query string.
:param stream: a stream to write the encoded object into or `None` if
an iterator over the encoded pairs should be returned. In
that case the separator argument is ignored.
:param charset: the charset of the query string.
:param encode_keys: set to `True` if you have unicode keys. (Ignored on
Python 3.x)
:param sort: set to `True` if you want parameters to be sorted by `key`.
:param separator: the separator to be used for the pairs.
:param key: an optional function to be used for sorting. For more details
check out the :func:`sorted` documentation.
"""
separator = to_native(separator, 'ascii')
gen = _url_encode_impl(obj, charset, encode_keys, sort, key)
if stream is None:
return gen
for idx, chunk in enumerate(gen):
if idx:
stream.write(separator)
stream.write(chunk)
def url_join(base, url, allow_fragments=True):
"""Join a base URL and a possibly relative URL to form an absolute
interpretation of the latter.
:param base: the base URL for the join operation.
:param url: the URL to join.
:param allow_fragments: indicates whether fragments should be allowed.
"""
if isinstance(base, tuple):
base = url_unparse(base)
if isinstance(url, tuple):
url = url_unparse(url)
base, url = normalize_string_tuple((base, url))
s = make_literal_wrapper(base)
if not base:
return url
if not url:
return base
bscheme, bnetloc, bpath, bquery, bfragment = \
url_parse(base, allow_fragments=allow_fragments)
scheme, netloc, path, query, fragment = \
url_parse(url, bscheme, allow_fragments)
if scheme != bscheme:
return url
if netloc:
return url_unparse((scheme, netloc, path, query, fragment))
netloc = bnetloc
if path[:1] == s('/'):
segments = path.split(s('/'))
elif not path:
segments = bpath.split(s('/'))
if not query:
query = bquery
else:
segments = bpath.split(s('/'))[:-1] + path.split(s('/'))
# If the rightmost part is "./" we want to keep the slash but
# remove the dot.
if segments[-1] == s('.'):
segments[-1] = s('')
# Resolve ".." and "."
segments = [segment for segment in segments if segment != s('.')]
while 1:
i = 1
n = len(segments) - 1
while i < n:
if segments[i] == s('..') and \
segments[i - 1] not in (s(''), s('..')):
del segments[i - 1:i + 1]
break
i += 1
else:
break
# Remove trailing ".." if the URL is absolute
unwanted_marker = [s(''), s('..')]
while segments[:2] == unwanted_marker:
del segments[1]
path = s('/').join(segments)
return url_unparse((scheme, netloc, path, query, fragment))
class Href(object):
"""Implements a callable that constructs URLs with the given base. The
function can be called with any number of positional and keyword
arguments which than are used to assemble the URL. Works with URLs
and posix paths.
Positional arguments are appended as individual segments to
the path of the URL:
>>> href = Href('/foo')
>>> href('bar', 23)
'/foo/bar/23'
>>> href('foo', bar=23)
'/foo/foo?bar=23'
If any of the arguments (positional or keyword) evaluates to `None` it
will be skipped. If no keyword arguments are given the last argument
can be a :class:`dict` or :class:`MultiDict` (or any other dict subclass),
otherwise the keyword arguments are used for the query parameters, cutting
off the first trailing underscore of the parameter name:
>>> href(is_=42)
'/foo?is=42'
>>> href({'foo': 'bar'})
'/foo?foo=bar'
Combining of both methods is not allowed:
>>> href({'foo': 'bar'}, bar=42)
Traceback (most recent call last):
...
TypeError: keyword arguments and query-dicts can't be combined
Accessing attributes on the href object creates a new href object with
the attribute name as prefix:
>>> bar_href = href.bar
>>> bar_href("blub")
'/foo/bar/blub'
If `sort` is set to `True` the items are sorted by `key` or the default
sorting algorithm:
>>> href = Href("/", sort=True)
>>> href(a=1, b=2, c=3)
'/?a=1&b=2&c=3'
.. versionadded:: 0.5
`sort` and `key` were added.
"""
def __init__(self, base='./', charset='utf-8', sort=False, key=None):
if not base:
base = './'
self.base = base
self.charset = charset
self.sort = sort
self.key = key
def __getattr__(self, name):
if name[:2] == '__':
raise AttributeError(name)
base = self.base
if base[-1:] != '/':
base += '/'
return Href(url_join(base, name), self.charset, self.sort, self.key)
def __call__(self, *path, **query):
if path and isinstance(path[-1], dict):
if query:
raise TypeError('keyword arguments and query-dicts '
'can\'t be combined')
query, path = path[-1], path[:-1]
elif query:
query = dict([(k.endswith('_') and k[:-1] or k, v)
for k, v in query.items()])
path = '/'.join([to_unicode(url_quote(x, self.charset), 'ascii')
for x in path if x is not None]).lstrip('/')
rv = self.base
if path:
if not rv.endswith('/'):
rv += '/'
rv = url_join(rv, './' + path)
if query:
rv += '?' + to_unicode(url_encode(query, self.charset, sort=self.sort,
key=self.key), 'ascii')
return to_native(rv)
| mit |
escorciav/linux-utils | hacks/compress_folder.py | 1 | 2153 | #!/bin/env python
"""
Compress and delete a folder if 50% of the files inside it are less than 2MB.
It only considers files in the first level of the folder.
"""
import os
import shutil
import sys
import tarfile
from contextlib import contextmanager
@contextmanager
def cd(newdir):
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
def tar_dir(dest, source):
dirname = os.path.dirname(source)
with cd(dirname):
basename = os.path.basename(source)
with tarfile.open(dest, mode='w:gz') as archive:
for i in os.listdir(source):
archive.add(os.path.join(basename, i))
def main(dirname, mb_threshold=2):
if not os.path.exists(dirname) or not os.path.isdir(dirname):
print 'Unexistent folder: {}. Bye!'.format(dirname)
return None
# Grab file sizes in MB
files_size = [os.path.getsize(os.path.join(dirname, f)) / 1024 / 1024
for f in os.listdir(dirname)
if os.path.isfile(os.path.join(dirname, f))]
if len(files_size) < 1:
print 'Skip folder compression. No files inside folder.'
return None
# Compute median of file sizes
n_files = len(files_size)
files_size.sort()
mb_median = files_size[n_files/2]
if mb_median > mb_threshold:
print 'Skip folder compression. Good amount of big files.'
return None
source, basename = os.path.abspath(dirname), os.path.basename(dirname)
destination = os.path.join(os.path.dirname(source), basename + '.tar.gz')
print 'Compressing file onto {}'.format(destination)
tar_dir(destination, source)
try:
with tarfile.open(destination, 'r') as fid:
successfull_compresion = True
fid.close()
except:
successfull_compresion = False
print 'Unknown error. Deleting compressed file:'
os.remove(destination)
if successfull_compresion:
shutil.rmtree(dirname)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'compress_folder.py [folder]'
main(sys.argv[1])
| mit |
gurneyalex/OpenUpgrade | addons/sales_team/res_config.py | 366 | 1922 | # -*- coding: utf-8 -*-
from openerp.osv import fields, osv
class sales_team_configuration(osv.TransientModel):
_name = 'sale.config.settings'
_inherit = ['sale.config.settings']
def set_group_multi_salesteams(self, cr, uid, ids, context=None):
""" This method is automatically called by res_config as it begins
with set. It is used to implement the 'one group or another'
behavior. We have to perform some group manipulation by hand
because in res_config.execute(), set_* methods are called
after group_*; therefore writing on an hidden res_config file
could not work.
If group_multi_salesteams is checked: remove group_mono_salesteams
from group_user, remove the users. Otherwise, just add
group_mono_salesteams in group_user.
The inverse logic about group_multi_salesteams is managed by the
normal behavior of 'group_multi_salesteams' field.
"""
def ref(xml_id):
mod, xml = xml_id.split('.', 1)
return self.pool['ir.model.data'].get_object(cr, uid, mod, xml, context)
for obj in self.browse(cr, uid, ids, context=context):
config_group = ref('base.group_mono_salesteams')
base_group = ref('base.group_user')
if obj.group_multi_salesteams:
base_group.write({'implied_ids': [(3, config_group.id)]})
config_group.write({'users': [(3, u.id) for u in base_group.users]})
else:
base_group.write({'implied_ids': [(4, config_group.id)]})
return True
_columns = {
'group_multi_salesteams': fields.boolean("Organize Sales activities into multiple Sales Teams",
implied_group='base.group_multi_salesteams',
help="""Allows you to use Sales Teams to manage your leads and opportunities."""),
}
| agpl-3.0 |
loco-odoo/localizacion_co | openerp/addons/hr_timesheet_sheet/wizard/__init__.py | 443 | 1075 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_timesheet_current
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
drufat/vispy | vispy/visuals/sphere.py | 7 | 3002 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
from ..geometry import create_sphere
from .mesh import MeshVisual
from .visual import CompoundVisual
class SphereVisual(CompoundVisual):
"""Visual that displays a sphere
Parameters
----------
radius : float
The size of the sphere.
cols : int
Number of cols that make up the sphere mesh
(for method='latitude' and 'cube').
rows : int
Number of rows that make up the sphere mesh
(for method='latitude' and 'cube').
depth : int
Number of depth segments that make up the sphere mesh
(for method='cube').
subdivisions : int
Number of subdivisions to perform (for method='ico').
method : str
Method for generating sphere. Accepts 'latitude' for
latitude-longitude, 'ico' for icosahedron, and 'cube'
for cube based tessellation.
vertex_colors : ndarray
Same as for `MeshVisual` class.
See `create_sphere` for vertex ordering.
face_colors : ndarray
Same as for `MeshVisual` class.
See `create_sphere` for vertex ordering.
color : Color
The `Color` to use when drawing the sphere faces.
edge_color : tuple or Color
The `Color` to use when drawing the sphere edges. If `None`, then no
sphere edges are drawn.
"""
def __init__(self, radius=1.0, cols=30, rows=30, depth=30, subdivisions=3,
method='latitude', vertex_colors=None, face_colors=None,
color=(0.5, 0.5, 1, 1), edge_color=None, **kwargs):
mesh = create_sphere(cols, rows, depth, radius=radius,
subdivisions=subdivisions, method=method)
self._mesh = MeshVisual(vertices=mesh.get_vertices(),
faces=mesh.get_faces(),
vertex_colors=vertex_colors,
face_colors=face_colors, color=color)
if edge_color:
self._border = MeshVisual(vertices=mesh.get_vertices(),
faces=mesh.get_edges(),
color=edge_color, mode='lines')
else:
self._border = MeshVisual()
CompoundVisual.__init__(self, [self._mesh, self._border], **kwargs)
self.mesh.set_gl_state(polygon_offset_fill=True,
polygon_offset=(1, 1), depth_test=True)
@property
def mesh(self):
"""The vispy.visuals.MeshVisual that used to fil in.
"""
return self._mesh
@property
def border(self):
"""The vispy.visuals.MeshVisual that used to draw the border.
"""
return self._border
| bsd-3-clause |
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/IPython/qt/console/completion_lexer.py | 13 | 2495 | # System library imports
from pygments.token import Token, is_token_subtype
class CompletionLexer(object):
""" Uses Pygments and some auxillary information to lex code snippets for
symbol contexts.
"""
# Maps Lexer names to a list of possible name separators
separator_map = { 'C' : [ '.', '->' ],
'C++' : [ '.', '->', '::' ],
'Python' : [ '.' ] }
def __init__(self, lexer):
""" Create a CompletionLexer using the specified Pygments lexer.
"""
self.lexer = lexer
def get_context(self, string):
""" Assuming the cursor is at the end of the specified string, get the
context (a list of names) for the symbol at cursor position.
"""
context = []
reversed_tokens = list(self._lexer.get_tokens(string))
reversed_tokens.reverse()
# Pygments often tacks on a newline when none is specified in the input.
# Remove this newline.
if reversed_tokens and reversed_tokens[0][1].endswith('\n') and \
not string.endswith('\n'):
reversed_tokens.pop(0)
current_op = ''
for token, text in reversed_tokens:
if is_token_subtype(token, Token.Name):
# Handle a trailing separator, e.g 'foo.bar.'
if current_op in self._name_separators:
if not context:
context.insert(0, '')
# Handle non-separator operators and punction.
elif current_op:
break
context.insert(0, text)
current_op = ''
# Pygments doesn't understand that, e.g., '->' is a single operator
# in C++. This is why we have to build up an operator from
# potentially several tokens.
elif token is Token.Operator or token is Token.Punctuation:
current_op = text + current_op
# Break on anything that is not a Operator, Punctuation, or Name.
else:
break
return context
def get_lexer(self, lexer):
return self._lexer
def set_lexer(self, lexer, name_separators=None):
self._lexer = lexer
if name_separators is None:
self._name_separators = self.separator_map.get(lexer.name, ['.'])
else:
self._name_separators = list(name_separators)
lexer = property(get_lexer, set_lexer)
| bsd-3-clause |
cocoon-project/restful-workshop | src/server/bottle.py | 1 | 127304 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Bottle is a fast and simple micro-framework for small web applications. It
offers request dispatching (Routes) with url parameter support, templates,
a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
template engines - all in a single file and with no dependencies other than the
Python Standard Library.
Homepage and documentation: http://bottlepy.org/
Copyright (c) 2011, Marcel Hellkamp.
License: MIT (see LICENSE for details)
"""
from __future__ import with_statement
__author__ = 'Marcel Hellkamp'
__version__ = '0.11.dev'
__license__ = 'MIT'
# The gevent server adapter needs to patch some modules before they are imported
# This is why we parse the commandline parameters here but handle them later
if __name__ == '__main__':
from optparse import OptionParser
_cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app")
_opt = _cmd_parser.add_option
_opt("--version", action="store_true", help="show version number.")
_opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.")
_opt("-s", "--server", default='wsgiref', help="use SERVER as backend.")
_opt("-p", "--plugin", action="append", help="install additional plugin/s.")
_opt("--debug", action="store_true", help="start server in debug mode.")
_opt("--reload", action="store_true", help="auto-reload on file changes.")
_cmd_options, _cmd_args = _cmd_parser.parse_args()
if _cmd_options.server and _cmd_options.server.startswith('gevent'):
import gevent.monkey; gevent.monkey.patch_all()
import base64, cgi, email.utils, functools, hmac, imp, itertools, mimetypes,\
os, re, subprocess, sys, tempfile, threading, time, urllib, warnings
from datetime import date as datedate, datetime, timedelta
from tempfile import TemporaryFile
from traceback import format_exc, print_exc
try: from json import dumps as json_dumps, loads as json_lds
except ImportError: # pragma: no cover
try: from simplejson import dumps as json_dumps, loads as json_lds
except ImportError:
try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds
except ImportError:
def json_dumps(data):
raise ImportError("JSON support requires Python 2.6 or simplejson.")
json_lds = json_dumps
# We now try to fix 2.5/2.6/3.1/3.2 incompatibilities.
# It ain't pretty but it works... Sorry for the mess.
py = sys.version_info
py3k = py >= (3,0,0)
py25 = py < (2,6,0)
# Workaround for the missing "as" keyword in py3k.
def _e(): return sys.exc_info()[1]
# Workaround for the "print is a keyword/function" dilemma.
_stdout, _stderr = sys.stdout.write, sys.stderr.write
# Lots of stdlib and builtin differences.
if py3k:
import http.client as httplib
import _thread as thread
from urllib.parse import urljoin, parse_qsl, SplitResult as UrlSplitResult
from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
from http.cookies import SimpleCookie
from collections import MutableMapping as DictMixin
import pickle
from io import BytesIO
basestring = str
unicode = str
json_loads = lambda s: json_lds(touni(s))
callable = lambda x: hasattr(x, '__call__')
imap = map
else: # 2.x
import httplib
import thread
from urlparse import urljoin, SplitResult as UrlSplitResult
from urllib import urlencode, quote as urlquote, unquote as urlunquote
from Cookie import SimpleCookie
from itertools import imap
import cPickle as pickle
from StringIO import StringIO as BytesIO
if py25:
msg = "Python 2.5 support may be dropped in future versions of Bottle."
warnings.warn(msg, DeprecationWarning)
from cgi import parse_qsl
from UserDict import DictMixin
def next(it): return it.next()
bytes = str
else: # 2.6, 2.7
from urlparse import parse_qsl
from collections import MutableMapping as DictMixin
json_loads = json_lds
# Some helpers for string/byte handling
def tob(s, enc='utf8'):
return s.encode(enc) if isinstance(s, unicode) else bytes(s)
def touni(s, enc='utf8', err='strict'):
return s.decode(enc, err) if isinstance(s, bytes) else unicode(s)
tonat = touni if py3k else tob
# 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense).
# 3.1 needs a workaround.
NCTextIOWrapper = None
if (3,0,0) < py < (3,2,0):
from io import TextIOWrapper
class NCTextIOWrapper(TextIOWrapper):
def close(self): pass # Keep wrapped buffer open.
# A bug in functools causes it to break if the wrapper is an instance method
def update_wrapper(wrapper, wrapped, *a, **ka):
try: functools.update_wrapper(wrapper, wrapped, *a, **ka)
except AttributeError: pass
# These helpers are used at module level and need to be defined first.
# And yes, I know PEP-8, but sometimes a lower-case classname makes more sense.
def depr(message):
warnings.warn(message, DeprecationWarning, stacklevel=3)
def makelist(data): # This is just to handy
if isinstance(data, (tuple, list, set, dict)): return list(data)
elif data: return [data]
else: return []
class DictProperty(object):
''' Property that maps to a key in a local dict-like attribute. '''
def __init__(self, attr, key=None, read_only=False):
self.attr, self.key, self.read_only = attr, key, read_only
def __call__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter, self.key = func, self.key or func.__name__
return self
def __get__(self, obj, cls):
if obj is None: return self
key, storage = self.key, getattr(obj, self.attr)
if key not in storage: storage[key] = self.getter(obj)
return storage[key]
def __set__(self, obj, value):
if self.read_only: raise AttributeError("Read-Only property.")
getattr(obj, self.attr)[self.key] = value
def __delete__(self, obj):
if self.read_only: raise AttributeError("Read-Only property.")
del getattr(obj, self.attr)[self.key]
class cached_property(object):
''' A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property. '''
def __init__(self, func):
self.func = func
def __get__(self, obj, cls):
if obj is None: return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
class lazy_attribute(object):
''' A property that caches itself to the class object. '''
def __init__(self, func):
functools.update_wrapper(self, func, updated=[])
self.getter = func
def __get__(self, obj, cls):
value = self.getter(cls)
setattr(cls, self.__name__, value)
return value
###############################################################################
# Exceptions and Events ########################################################
###############################################################################
class BottleException(Exception):
""" A base class for exceptions used by bottle. """
pass
#TODO: This should subclass BaseRequest
class HTTPResponse(BottleException):
""" Used to break execution and immediately finish the response """
def __init__(self, output='', status=200, header=None):
super(BottleException, self).__init__("HTTP Response %d" % status)
self.status = int(status)
self.output = output
self.headers = HeaderDict(header) if header else None
def apply(self, response):
if self.headers:
for key, value in self.headers.allitems():
response.headers[key] = value
response.status = self.status
class HTTPError(HTTPResponse):
""" Used to generate an error page """
def __init__(self, code=500, output='Unknown Error', exception=None,
traceback=None, header=None):
super(HTTPError, self).__init__(output, code, header)
self.exception = exception
self.traceback = traceback
def __repr__(self):
return tonat(template(ERROR_PAGE_TEMPLATE, e=self))
###############################################################################
# Routing ######################################################################
###############################################################################
class RouteError(BottleException):
""" This is a base class for all routing related exceptions """
class RouteReset(BottleException):
""" If raised by a plugin or request handler, the route is reset and all
plugins are re-applied. """
class RouterUnknownModeError(RouteError): pass
class RouteSyntaxError(RouteError):
""" The route parser found something not supported by this router """
class RouteBuildError(RouteError):
""" The route could not been built """
class Router(object):
''' A Router is an ordered collection of route->target pairs. It is used to
efficiently match WSGI requests against a number of routes and return
the first target that satisfies the request. The target may be anything,
usually a string, ID or callable object. A route consists of a path-rule
and a HTTP method.
The path-rule is either a static path (e.g. `/contact`) or a dynamic
path that contains wildcards (e.g. `/wiki/<page>`). The wildcard syntax
and details on the matching order are described in docs:`routing`.
'''
default_pattern = '[^/]+'
default_filter = 're'
#: Sorry for the mess. It works. Trust me.
rule_syntax = re.compile('(\\\\*)'\
'(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\
'|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\
'(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))')
def __init__(self, strict=False):
self.rules = {} # A {rule: Rule} mapping
self.builder = {} # A rule/name->build_info mapping
self.static = {} # Cache for static routes: {path: {method: target}}
self.dynamic = [] # Cache for dynamic routes. See _compile()
#: If true, static routes are no longer checked first.
self.strict_order = strict
self.filters = {'re': self.re_filter, 'int': self.int_filter,
'float': self.float_filter, 'path': self.path_filter}
def re_filter(self, conf):
return conf or self.default_pattern, None, None
def int_filter(self, conf):
return r'-?\d+', int, lambda x: str(int(x))
def float_filter(self, conf):
return r'-?[\d.]+', float, lambda x: str(float(x))
def path_filter(self, conf):
return r'.+?', None, None
def add_filter(self, name, func):
''' Add a filter. The provided function is called with the configuration
string as parameter and must return a (regexp, to_python, to_url) tuple.
The first element is a string, the last two are callables or None. '''
self.filters[name] = func
def parse_rule(self, rule):
''' Parses a rule into a (name, filter, conf) token stream. If mode is
None, name contains a static rule part. '''
offset, prefix = 0, ''
for match in self.rule_syntax.finditer(rule):
prefix += rule[offset:match.start()]
g = match.groups()
if len(g[0])%2: # Escaped wildcard
prefix += match.group(0)[len(g[0]):]
offset = match.end()
continue
if prefix: yield prefix, None, None
name, filtr, conf = g[1:4] if not g[2] is None else g[4:7]
if not filtr: filtr = self.default_filter
yield name, filtr, conf or None
offset, prefix = match.end(), ''
if offset <= len(rule) or prefix:
yield prefix+rule[offset:], None, None
def add(self, rule, method, target, name=None):
''' Add a new route or replace the target for an existing route. '''
if rule in self.rules:
self.rules[rule][method] = target
if name: self.builder[name] = self.builder[rule]
return
target = self.rules[rule] = {method: target}
# Build pattern and other structures for dynamic routes
anons = 0 # Number of anonymous wildcards
pattern = '' # Regular expression pattern
filters = [] # Lists of wildcard input filters
builder = [] # Data structure for the URL builder
is_static = True
for key, mode, conf in self.parse_rule(rule):
if mode:
is_static = False
mask, in_filter, out_filter = self.filters[mode](conf)
if key:
pattern += '(?P<%s>%s)' % (key, mask)
else:
pattern += '(?:%s)' % mask
key = 'anon%d' % anons; anons += 1
if in_filter: filters.append((key, in_filter))
builder.append((key, out_filter or str))
elif key:
pattern += re.escape(key)
builder.append((None, key))
self.builder[rule] = builder
if name: self.builder[name] = builder
if is_static and not self.strict_order:
self.static[self.build(rule)] = target
return
def fpat_sub(m):
return m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:'
flat_pattern = re.sub(r'(\\*)(\(\?P<[^>]*>|\((?!\?))', fpat_sub, pattern)
try:
re_match = re.compile('^(%s)$' % pattern).match
except re.error:
raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e()))
def match(path):
""" Return an url-argument dictionary. """
url_args = re_match(path).groupdict()
for name, wildcard_filter in filters:
try:
url_args[name] = wildcard_filter(url_args[name])
except ValueError:
raise HTTPError(400, 'Path has wrong format.')
return url_args
try:
combined = '%s|(^%s$)' % (self.dynamic[-1][0].pattern, flat_pattern)
self.dynamic[-1] = (re.compile(combined), self.dynamic[-1][1])
self.dynamic[-1][1].append((match, target))
except (AssertionError, IndexError): # AssertionError: Too many groups
self.dynamic.append((re.compile('(^%s$)' % flat_pattern),
[(match, target)]))
return match
def build(self, _name, *anons, **query):
''' Build an URL by filling the wildcards in a rule. '''
builder = self.builder.get(_name)
if not builder: raise RouteBuildError("No route with that name.", _name)
try:
for i, value in enumerate(anons): query['anon%d'%i] = value
url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder])
return url if not query else url+'?'+urlencode(query)
except KeyError:
raise RouteBuildError('Missing URL argument: %r' % _e().args[0])
def match(self, environ):
''' Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). '''
path, targets, urlargs = environ['PATH_INFO'] or '/', None, {}
if path in self.static:
targets = self.static[path]
else:
for combined, rules in self.dynamic:
match = combined.match(path)
if not match: continue
getargs, targets = rules[match.lastindex - 1]
urlargs = getargs(path) if getargs else {}
break
if not targets:
raise HTTPError(404, "Not found: " + repr(environ['PATH_INFO']))
method = environ['REQUEST_METHOD'].upper()
if method in targets:
return targets[method], urlargs
if method == 'HEAD' and 'GET' in targets:
return targets['GET'], urlargs
if 'ANY' in targets:
return targets['ANY'], urlargs
allowed = [verb for verb in targets if verb != 'ANY']
if 'GET' in allowed and 'HEAD' not in allowed:
allowed.append('HEAD')
raise HTTPError(405, "Method not allowed.",
header=[('Allow',",".join(allowed))])
class Route(object):
''' This class wraps a route callback along with route specific metadata and
configuration and applies Plugins on demand. It is also responsible for
turing an URL path rule into a regular expression usable by the Router.
'''
def __init__(self, app, rule, method, callback, name=None,
plugins=None, skiplist=None, **config):
#: The application this route is installed to.
self.app = app
#: The path-rule string (e.g. ``/wiki/:page``).
self.rule = rule
#: The HTTP method as a string (e.g. ``GET``).
self.method = method
#: The original callback with no plugins applied. Useful for introspection.
self.callback = callback
#: The name of the route (if specified) or ``None``.
self.name = name or None
#: A list of route-specific plugins (see :meth:`Bottle.route`).
self.plugins = plugins or []
#: A list of plugins to not apply to this route (see :meth:`Bottle.route`).
self.skiplist = skiplist or []
#: Additional keyword arguments passed to the :meth:`Bottle.route`
#: decorator are stored in this dictionary. Used for route-specific
#: plugin configuration and meta-data.
self.config = ConfigDict(config)
def __call__(self, *a, **ka):
depr("Some APIs changed to return Route() instances instead of"\
" callables. Make sure to use the Route.call method and not to"\
" call Route instances directly.")
return self.call(*a, **ka)
@cached_property
def call(self):
''' The route callback with all plugins applied. This property is
created on demand and then cached to speed up subsequent requests.'''
return self._make_callback()
def reset(self):
''' Forget any cached values. The next time :attr:`call` is accessed,
all plugins are re-applied. '''
self.__dict__.pop('call', None)
def prepare(self):
''' Do all on-demand work immediately (useful for debugging).'''
self.call
@property
def _context(self):
depr('Switch to Plugin API v2 and access the Route object directly.')
return dict(rule=self.rule, method=self.method, callback=self.callback,
name=self.name, app=self.app, config=self.config,
apply=self.plugins, skip=self.skiplist)
def all_plugins(self):
''' Yield all Plugins affecting this route. '''
unique = set()
for p in reversed(self.app.plugins + self.plugins):
if True in self.skiplist: break
name = getattr(p, 'name', False)
if name and (name in self.skiplist or name in unique): continue
if p in self.skiplist or type(p) in self.skiplist: continue
if name: unique.add(name)
yield p
def _make_callback(self):
callback = self.callback
for plugin in self.all_plugins():
try:
if hasattr(plugin, 'apply'):
api = getattr(plugin, 'api', 1)
context = self if api > 1 else self._context
callback = plugin.apply(callback, context)
else:
callback = plugin(callback)
except RouteReset: # Try again with changed configuration.
return self._make_callback()
if not callback is self.callback:
update_wrapper(callback, self.callback)
return callback
def __repr__(self):
return '<%s %r %r>' % (self.method, self.rule, self.callback)
###############################################################################
# Application Object ###########################################################
###############################################################################
class Bottle(object):
""" Each Bottle object represents a single, distinct web application and
consists of routes, callbacks, plugins, resources and configuration.
Instances are callable WSGI applications.
:param catchall: If true (default), handle all exceptions. Turn off to
let debugging middleware handle exceptions.
"""
def __init__(self, catchall=True, autojson=True):
#: If true, most exceptions are catched and returned as :exc:`HTTPError`
self.catchall = catchall
#: A :cls:`ResourceManager` for application files
self.resources = ResourceManager()
#: A :cls:`ConfigDict` for app specific configuration.
self.config = ConfigDict()
self.config.autojson = autojson
self.routes = [] # List of installed :class:`Route` instances.
self.router = Router() # Maps requests to :class:`Route` instances.
self.error_handler = {}
# Core plugins
self.plugins = [] # List of installed plugins.
self.hooks = HooksPlugin()
self.install(self.hooks)
if self.config.autojson:
self.install(JSONPlugin())
self.install(TemplatePlugin())
def mount(self, prefix, app, **options):
''' Mount an application (:class:`Bottle` or plain WSGI) to a specific
URL prefix. Example::
root_app.mount('/admin/', admin_app)
:param prefix: path prefix or `mount-point`. If it ends in a slash,
that slash is mandatory.
:param app: an instance of :class:`Bottle` or a WSGI application.
All other parameters are passed to the underlying :meth:`route` call.
'''
if isinstance(app, basestring):
prefix, app = app, prefix
depr('Parameter order of Bottle.mount() changed.') # 0.10
parts = [p for p in prefix.split('/') if p]
if not parts: raise ValueError('Empty path prefix.')
path_depth = len(parts)
options.setdefault('skip', True)
options.setdefault('method', 'ANY')
@self.route('/%s/:#.*#' % '/'.join(parts), **options)
def mountpoint():
try:
request.path_shift(path_depth)
rs = BaseResponse([], 200)
def start_response(status, header):
rs.status = status
for name, value in header: rs.add_header(name, value)
return rs.body.append
rs.body = itertools.chain(rs.body, app(request.environ, start_response))
return HTTPResponse(rs.body, rs.status_code, rs.headers)
finally:
request.path_shift(-path_depth)
if not prefix.endswith('/'):
self.route('/' + '/'.join(parts), callback=mountpoint, **options)
def merge(self, routes):
''' Merge the routes of another :cls:`Bottle` application or a list of
:class:`Route` objects into this application. The routes keep their
'owner', meaning that the :data:`Route.app` attribute is not
changed. '''
if isinstance(routes, Bottle):
routes = routes.routes
for route in routes:
self.add_route(route)
def install(self, plugin):
''' Add a plugin to the list of plugins and prepare it for being
applied to all routes of this application. A plugin may be a simple
decorator or an object that implements the :class:`Plugin` API.
'''
if hasattr(plugin, 'setup'): plugin.setup(self)
if not callable(plugin) and not hasattr(plugin, 'apply'):
raise TypeError("Plugins must be callable or implement .apply()")
self.plugins.append(plugin)
self.reset()
return plugin
def uninstall(self, plugin):
''' Uninstall plugins. Pass an instance to remove a specific plugin, a type
object to remove all plugins that match that type, a string to remove
all plugins with a matching ``name`` attribute or ``True`` to remove all
plugins. Return the list of removed plugins. '''
removed, remove = [], plugin
for i, plugin in list(enumerate(self.plugins))[::-1]:
if remove is True or remove is plugin or remove is type(plugin) \
or getattr(plugin, 'name', True) == remove:
removed.append(plugin)
del self.plugins[i]
if hasattr(plugin, 'close'): plugin.close()
if removed: self.reset()
return removed
def run(self, **kwargs):
''' Calls :func:`run` with the same parameters. '''
run(self, **kwargs)
def reset(self, route=None):
''' Reset all routes (force plugins to be re-applied) and clear all
caches. If an ID or route object is given, only that specific route
is affected. '''
if route is None: routes = self.routes
elif isinstance(route, Route): routes = [route]
else: routes = [self.routes[route]]
for route in routes: route.reset()
if DEBUG:
for route in routes: route.prepare()
self.hooks.trigger('app_reset')
def close(self):
''' Close the application and all installed plugins. '''
for plugin in self.plugins:
if hasattr(plugin, 'close'): plugin.close()
self.stopped = True
def match(self, environ):
""" Search for a matching route and return a (:class:`Route` , urlargs)
tuple. The second value is a dictionary with parameters extracted
from the URL. Raise :exc:`HTTPError` (404/405) on a non-match."""
return self.router.match(environ)
def get_url(self, routename, **kargs):
""" Return a string that matches a named route """
scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
location = self.router.build(routename, **kargs).lstrip('/')
return urljoin(urljoin('/', scriptname), location)
def add_route(self, route):
''' Add a route object, but do not change the :data:`Route.app`
attribute.'''
self.routes.append(route)
self.router.add(route.rule, route.method, route, name=route.name)
if DEBUG: route.prepare()
def route(self, path=None, method='GET', callback=None, name=None,
apply=None, skip=None, **config):
""" A decorator to bind a function to a request URL. Example::
@app.route('/hello/:name')
def hello(name):
return 'Hello %s' % name
The ``:name`` part is a wildcard. See :class:`Router` for syntax
details.
:param path: Request path or a list of paths to listen to. If no
path is specified, it is automatically generated from the
signature of the function.
:param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of
methods to listen to. (default: `GET`)
:param callback: An optional shortcut to avoid the decorator
syntax. ``route(..., callback=func)`` equals ``route(...)(func)``
:param name: The name for this route. (default: None)
:param apply: A decorator or plugin or a list of plugins. These are
applied to the route callback in addition to installed plugins.
:param skip: A list of plugins, plugin classes or names. Matching
plugins are not installed to this route. ``True`` skips all.
Any additional keyword arguments are stored as route-specific
configuration and passed to plugins (see :meth:`Plugin.apply`).
"""
if callable(path): path, callback = None, path
plugins = makelist(apply)
skiplist = makelist(skip)
def decorator(callback):
# TODO: Documentation and tests
if isinstance(callback, basestring): callback = load(callback)
for rule in makelist(path) or yieldroutes(callback):
for verb in makelist(method):
verb = verb.upper()
route = Route(self, rule, verb, callback, name=name,
plugins=plugins, skiplist=skiplist, **config)
self.add_route(route)
return callback
return decorator(callback) if callback else decorator
def get(self, path=None, method='GET', **options):
""" Equals :meth:`route`. """
return self.route(path, method, **options)
def post(self, path=None, method='POST', **options):
""" Equals :meth:`route` with a ``POST`` method parameter. """
return self.route(path, method, **options)
def put(self, path=None, method='PUT', **options):
""" Equals :meth:`route` with a ``PUT`` method parameter. """
return self.route(path, method, **options)
def delete(self, path=None, method='DELETE', **options):
""" Equals :meth:`route` with a ``DELETE`` method parameter. """
return self.route(path, method, **options)
def error(self, code=500):
""" Decorator: Register an output handler for a HTTP error code"""
def wrapper(handler):
self.error_handler[int(code)] = handler
return handler
return wrapper
def hook(self, name):
""" Return a decorator that attaches a callback to a hook. Three hooks
are currently implemented:
- before_request: Executed once before each request
- after_request: Executed once after each request
- app_reset: Called whenever :meth:`reset` is called.
"""
def wrapper(func):
self.hooks.add(name, func)
return func
return wrapper
def handle(self, path, method='GET'):
""" (deprecated) Execute the first matching route callback and return
the result. :exc:`HTTPResponse` exceptions are catched and returned.
If :attr:`Bottle.catchall` is true, other exceptions are catched as
well and returned as :exc:`HTTPError` instances (500).
"""
depr("This method will change semantics in 0.10. Try to avoid it.")
if isinstance(path, dict):
return self._handle(path)
return self._handle({'PATH_INFO': path, 'REQUEST_METHOD': method.upper()})
def _handle(self, environ):
try:
environ['bottle.app'] = self
request.bind(environ)
response.bind()
route, args = self.router.match(environ)
environ['route.handle'] = environ['bottle.route'] = route
environ['route.url_args'] = args
return route.call(**args)
except HTTPResponse:
return _e()
except RouteReset:
route.reset()
return self._handle(environ)
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
stacktrace = format_exc(10)
environ['wsgi.errors'].write(stacktrace)
return HTTPError(500, "Internal Server Error", _e(), stacktrace)
def _cast(self, out, peek=None):
""" Try to convert the parameter into something WSGI compatible and set
correct HTTP headers when possible.
Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
iterable of strings and iterable of unicodes
"""
# Empty output is done here
if not out:
response['Content-Length'] = 0
return []
# Join lists of byte or unicode strings. Mixed lists are NOT supported
if isinstance(out, (tuple, list))\
and isinstance(out[0], (bytes, unicode)):
out = out[0][0:0].join(out) # b'abc'[0:0] -> b''
# Encode unicode strings
if isinstance(out, unicode):
out = out.encode(response.charset)
# Byte Strings are just returned
if isinstance(out, bytes):
response['Content-Length'] = len(out)
return [out]
# HTTPError or HTTPException (recursive, because they may wrap anything)
# TODO: Handle these explicitly in handle() or make them iterable.
if isinstance(out, HTTPError):
out.apply(response)
out = self.error_handler.get(out.status, repr)(out)
if isinstance(out, HTTPResponse):
depr('Error handlers must not return :exc:`HTTPResponse`.') #0.9
return self._cast(out)
if isinstance(out, HTTPResponse):
out.apply(response)
return self._cast(out.output)
# File-like objects.
if hasattr(out, 'read'):
if 'wsgi.file_wrapper' in request.environ:
return request.environ['wsgi.file_wrapper'](out)
elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
return WSGIFileWrapper(out)
# Handle Iterables. We peek into them to detect their inner type.
try:
out = iter(out)
first = next(out)
while not first:
first = next(out)
except StopIteration:
return self._cast('')
except HTTPResponse:
first = _e()
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
first = HTTPError(500, 'Unhandled exception', _e(), format_exc(10))
# These are the inner types allowed in iterator or generator objects.
if isinstance(first, HTTPResponse):
return self._cast(first)
if isinstance(first, bytes):
return itertools.chain([first], out)
if isinstance(first, unicode):
return imap(lambda x: x.encode(response.charset),
itertools.chain([first], out))
return self._cast(HTTPError(500, 'Unsupported response type: %s'\
% type(first)))
def wsgi(self, environ, start_response):
""" The bottle WSGI-interface. """
try:
out = self._cast(self._handle(environ))
# rfc2616 section 4.3
if response._status_code in (100, 101, 204, 304)\
or request.method == 'HEAD':
if hasattr(out, 'close'): out.close()
out = []
if isinstance(response._status_line, unicode):
response._status_line = str(response._status_line)
start_response(response._status_line, list(response.iter_headers()))
return out
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception:
if not self.catchall: raise
err = '<h1>Critical error while processing request: %s</h1>' \
% html_escape(environ.get('PATH_INFO', '/'))
if DEBUG:
err += '<h2>Error:</h2>\n<pre>\n%s\n</pre>\n' \
'<h2>Traceback:</h2>\n<pre>\n%s\n</pre>\n' \
% (html_escape(repr(_e())), html_escape(format_exc(10)))
environ['wsgi.errors'].write(err)
headers = [('Content-Type', 'text/html; charset=UTF-8')]
start_response('500 INTERNAL SERVER ERROR', headers)
return [tob(err)]
def __call__(self, environ, start_response):
''' Each instance of :class:'Bottle' is a WSGI application. '''
return self.wsgi(environ, start_response)
###############################################################################
# HTTP and WSGI Tools ##########################################################
###############################################################################
class BaseRequest(object):
""" A wrapper for WSGI environment dictionaries that adds a lot of
convenient access methods and properties. Most of them are read-only.
Adding new attributes to a request actually adds them to the environ
dictionary (as 'bottle.request.ext.<name>'). This is the recommended
way to store and access request-specific data.
"""
__slots__ = ('environ')
#: Maximum size of memory buffer for :attr:`body` in bytes.
MEMFILE_MAX = 102400
#: Maximum number pr GET or POST parameters per request
MAX_PARAMS = 100
def __init__(self, environ=None):
""" Wrap a WSGI environ dictionary. """
#: The wrapped WSGI environ dictionary. This is the only real attribute.
#: All other attributes actually are read-only properties.
self.environ = {} if environ is None else environ
self.environ['bottle.request'] = self
@DictProperty('environ', 'bottle.app', read_only=True)
def app(self):
''' Bottle application handling this request. '''
raise RuntimeError('This request is not connected to an application.')
@property
def path(self):
''' The value of ``PATH_INFO`` with exactly one prefixed slash (to fix
broken clients and avoid the "empty path" edge case). '''
return '/' + self.environ.get('PATH_INFO','').lstrip('/')
@property
def method(self):
''' The ``REQUEST_METHOD`` value as an uppercase string. '''
return self.environ.get('REQUEST_METHOD', 'GET').upper()
@DictProperty('environ', 'bottle.request.headers', read_only=True)
def headers(self):
''' A :class:`WSGIHeaderDict` that provides case-insensitive access to
HTTP request headers. '''
return WSGIHeaderDict(self.environ)
def get_header(self, name, default=None):
''' Return the value of a request header, or a given default value. '''
return self.headers.get(name, default)
@DictProperty('environ', 'bottle.request.cookies', read_only=True)
def cookies(self):
""" Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT
decoded. Use :meth:`get_cookie` if you expect signed cookies. """
cookies = SimpleCookie(self.environ.get('HTTP_COOKIE',''))
cookies = list(cookies.values())[:self.MAX_PARAMS]
return FormsDict((c.key, c.value) for c in cookies)
def get_cookie(self, key, default=None, secret=None):
""" Return the content of a cookie. To read a `Signed Cookie`, the
`secret` must match the one used to create the cookie (see
:meth:`BaseResponse.set_cookie`). If anything goes wrong (missing
cookie or wrong signature), return a default value. """
value = self.cookies.get(key)
if secret and value:
dec = cookie_decode(value, secret) # (key, value) tuple or None
return dec[1] if dec and dec[0] == key else default
return value or default
@DictProperty('environ', 'bottle.request.query', read_only=True)
def query(self):
''' The :attr:`query_string` parsed into a :class:`FormsDict`. These
values are sometimes called "URL arguments" or "GET parameters", but
not to be confused with "URL wildcards" as they are provided by the
:class:`Router`. '''
pairs = parse_qsl(self.query_string, keep_blank_values=True)
get = self.environ['bottle.get'] = FormsDict()
for key, value in pairs[:self.MAX_PARAMS]:
get[key] = value
return get
@DictProperty('environ', 'bottle.request.forms', read_only=True)
def forms(self):
""" Form values parsed from an `url-encoded` or `multipart/form-data`
encoded POST or PUT request body. The result is retuned as a
:class:`FormsDict`. All keys and values are strings. File uploads
are stored separately in :attr:`files`. """
forms = FormsDict()
for name, item in self.POST.allitems():
if not hasattr(item, 'filename'):
forms[name] = item
return forms
@DictProperty('environ', 'bottle.request.params', read_only=True)
def params(self):
""" A :class:`FormsDict` with the combined values of :attr:`query` and
:attr:`forms`. File uploads are stored in :attr:`files`. """
params = FormsDict()
for key, value in self.query.allitems():
params[key] = value
for key, value in self.forms.allitems():
params[key] = value
return params
@DictProperty('environ', 'bottle.request.files', read_only=True)
def files(self):
""" File uploads parsed from an `url-encoded` or `multipart/form-data`
encoded POST or PUT request body. The values are instances of
:class:`cgi.FieldStorage`. The most important attributes are:
filename
The filename, if specified; otherwise None; this is the client
side filename, *not* the file name on which it is stored (that's
a temporary file you don't deal with)
file
The file(-like) object from which you can read the data.
value
The value as a *string*; for file uploads, this transparently
reads the file every time you request the value. Do not do this
on big files.
"""
files = FormsDict()
for name, item in self.POST.allitems():
if hasattr(item, 'filename'):
files[name] = item
return files
@DictProperty('environ', 'bottle.request.json', read_only=True)
def json(self):
''' If the ``Content-Type`` header is ``application/json``, this
property holds the parsed content of the request body. Only requests
smaller than :attr:`MEMFILE_MAX` are processed to avoid memory
exhaustion. '''
if 'application/json' in self.environ.get('CONTENT_TYPE', '') \
and 0 < self.content_length < self.MEMFILE_MAX:
return json_loads(self.body.read(self.MEMFILE_MAX))
return None
@DictProperty('environ', 'bottle.request.body', read_only=True)
def _body(self):
maxread = max(0, self.content_length)
stream = self.environ['wsgi.input']
body = BytesIO() if maxread < self.MEMFILE_MAX else TemporaryFile(mode='w+b')
while maxread > 0:
part = stream.read(min(maxread, self.MEMFILE_MAX))
if not part: break
body.write(part)
maxread -= len(part)
self.environ['wsgi.input'] = body
body.seek(0)
return body
@property
def body(self):
""" The HTTP request body as a seek-able file-like object. Depending on
:attr:`MEMFILE_MAX`, this is either a temporary file or a
:class:`io.BytesIO` instance. Accessing this property for the first
time reads and replaces the ``wsgi.input`` environ variable.
Subsequent accesses just do a `seek(0)` on the file object. """
self._body.seek(0)
return self._body
#: An alias for :attr:`query`.
GET = query
@DictProperty('environ', 'bottle.request.post', read_only=True)
def POST(self):
""" The values of :attr:`forms` and :attr:`files` combined into a single
:class:`FormsDict`. Values are either strings (form values) or
instances of :class:`cgi.FieldStorage` (file uploads).
"""
post = FormsDict()
safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi
for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
if key in self.environ: safe_env[key] = self.environ[key]
if NCTextIOWrapper:
fb = NCTextIOWrapper(self.body, encoding='ISO-8859-1', newline='\n')
else:
fb = self.body
data = cgi.FieldStorage(fp=fb, environ=safe_env, keep_blank_values=True)
for item in (data.list or [])[:self.MAX_PARAMS]:
post[item.name] = item if item.filename else item.value
return post
@property
def COOKIES(self):
''' Alias for :attr:`cookies` (deprecated). '''
depr('BaseRequest.COOKIES was renamed to BaseRequest.cookies (lowercase).')
return self.cookies
@property
def url(self):
""" The full request URI including hostname and scheme. If your app
lives behind a reverse proxy or load balancer and you get confusing
results, make sure that the ``X-Forwarded-Host`` header is set
correctly. """
return self.urlparts.geturl()
@DictProperty('environ', 'bottle.request.urlparts', read_only=True)
def urlparts(self):
''' The :attr:`url` string as an :class:`urlparse.SplitResult` tuple.
The tuple contains (scheme, host, path, query_string and fragment),
but the fragment is always empty because it is not visible to the
server. '''
env = self.environ
http = env.get('wsgi.url_scheme', 'http')
host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
if not host:
# HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
host = env.get('SERVER_NAME', '127.0.0.1')
port = env.get('SERVER_PORT')
if port and port != ('80' if http == 'http' else '443'):
host += ':' + port
path = urlquote(self.fullpath)
return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
@property
def fullpath(self):
""" Request path including :attr:`script_name` (if present). """
return urljoin(self.script_name, self.path.lstrip('/'))
@property
def query_string(self):
""" The raw :attr:`query` part of the URL (everything in between ``?``
and ``#``) as a string. """
return self.environ.get('QUERY_STRING', '')
@property
def script_name(self):
''' The initial portion of the URL's `path` that was removed by a higher
level (server or routing middleware) before the application was
called. This script path is returned with leading and tailing
slashes. '''
script_name = self.environ.get('SCRIPT_NAME', '').strip('/')
return '/' + script_name + '/' if script_name else '/'
def path_shift(self, shift=1):
''' Shift path segments from :attr:`path` to :attr:`script_name` and
vice versa.
:param shift: The number of path segments to shift. May be negative
to change the shift direction. (default: 1)
'''
script = self.environ.get('SCRIPT_NAME','/')
self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift)
@property
def content_length(self):
''' The request body length as an integer. The client is responsible to
set this header. Otherwise, the real length of the body is unknown
and -1 is returned. In this case, :attr:`body` will be empty. '''
return int(self.environ.get('CONTENT_LENGTH') or -1)
@property
def is_xhr(self):
''' True if the request was triggered by a XMLHttpRequest. This only
works with JavaScript libraries that support the `X-Requested-With`
header (most of the popular libraries do). '''
requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','')
return requested_with.lower() == 'xmlhttprequest'
@property
def is_ajax(self):
''' Alias for :attr:`is_xhr`. "Ajax" is not the right term. '''
return self.is_xhr
@property
def auth(self):
""" HTTP authentication data as a (user, password) tuple. This
implementation currently supports basic (not digest) authentication
only. If the authentication happened at a higher level (e.g. in the
front web-server or a middleware), the password field is None, but
the user field is looked up from the ``REMOTE_USER`` environ
variable. On any errors, None is returned. """
basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION',''))
if basic: return basic
ruser = self.environ.get('REMOTE_USER')
if ruser: return (ruser, None)
return None
@property
def remote_route(self):
""" A list of all IPs that were involved in this request, starting with
the client IP and followed by zero or more proxies. This does only
work if all proxies support the ```X-Forwarded-For`` header. Note
that this information can be forged by malicious clients. """
proxy = self.environ.get('HTTP_X_FORWARDED_FOR')
if proxy: return [ip.strip() for ip in proxy.split(',')]
remote = self.environ.get('REMOTE_ADDR')
return [remote] if remote else []
@property
def remote_addr(self):
""" The client IP as a string. Note that this information can be forged
by malicious clients. """
route = self.remote_route
return route[0] if route else None
def copy(self):
""" Return a new :class:`Request` with a shallow :attr:`environ` copy. """
return Request(self.environ.copy())
def get(self, value, default=None): return self.environ.get(value, default)
def __getitem__(self, key): return self.environ[key]
def __delitem__(self, key): self[key] = ""; del(self.environ[key])
def __iter__(self): return iter(self.environ)
def __len__(self): return len(self.environ)
def keys(self): return self.environ.keys()
def __setitem__(self, key, value):
""" Change an environ value and clear all caches that depend on it. """
if self.environ.get('bottle.request.readonly'):
raise KeyError('The environ dictionary is read-only.')
self.environ[key] = value
todelete = ()
if key == 'wsgi.input':
todelete = ('body', 'forms', 'files', 'params', 'post', 'json')
elif key == 'QUERY_STRING':
todelete = ('query', 'params')
elif key.startswith('HTTP_'):
todelete = ('headers', 'cookies')
for key in todelete:
self.environ.pop('bottle.request.'+key, None)
def __repr__(self):
return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url)
def __getattr__(self, name):
''' Search in self.environ for additional user defined attributes. '''
try:
var = self.environ['bottle.request.ext.%s'%name]
return var.__get__(self) if hasattr(var, '__get__') else var
except KeyError:
raise AttributeError('Attribute %r not defined.' % name)
def __setattr__(self, name, value):
if name == 'environ': return object.__setattr__(self, name, value)
self.environ['bottle.request.ext.%s'%name] = value
def _hkey(s):
return s.title().replace('_','-')
class HeaderProperty(object):
def __init__(self, name, reader=None, writer=str, default=''):
self.name, self.reader, self.writer, self.default = name, reader, writer, default
self.__doc__ = 'Current value of the %r header.' % name.title()
def __get__(self, obj, cls):
if obj is None: return self
value = obj.headers.get(self.name)
return self.reader(value) if (value and self.reader) else (value or self.default)
def __set__(self, obj, value):
if self.writer: value = self.writer(value)
obj.headers[self.name] = value
def __delete__(self, obj):
if self.name in obj.headers:
del obj.headers[self.name]
class BaseResponse(object):
""" Storage class for a response body as well as headers and cookies.
This class does support dict-like case-insensitive item-access to
headers, but is NOT a dict. Most notably, iterating over a response
yields parts of the body and not the headers.
"""
default_status = 200
default_content_type = 'text/html; charset=UTF-8'
# Header blacklist for specific response codes
# (rfc2616 section 10.2.3 and 10.3.5)
bad_headers = {
204: set(('Content-Type',)),
304: set(('Allow', 'Content-Encoding', 'Content-Language',
'Content-Length', 'Content-Range', 'Content-Type',
'Content-Md5', 'Last-Modified'))}
def __init__(self, body='', status=None, **headers):
self._status_line = None
self._status_code = None
self._cookies = None
self._headers = {'Content-Type': [self.default_content_type]}
self.body = body
self.status = status or self.default_status
if headers:
for name, value in headers.items():
self[name] = value
def copy(self):
''' Returns a copy of self. '''
copy = Response()
copy.status = self.status
copy._headers = dict((k, v[:]) for (k, v) in self._headers.items())
return copy
def __iter__(self):
return iter(self.body)
def close(self):
if hasattr(self.body, 'close'):
self.body.close()
@property
def status_line(self):
''' The HTTP status line as a string (e.g. ``404 Not Found``).'''
return self._status_line
@property
def status_code(self):
''' The HTTP status code as an integer (e.g. 404).'''
return self._status_code
def _set_status(self, status):
if isinstance(status, int):
code, status = status, _HTTP_STATUS_LINES.get(status)
elif ' ' in status:
status = status.strip()
code = int(status.split()[0])
else:
raise ValueError('String status line without a reason phrase.')
if not 100 <= code <= 999: raise ValueError('Status code out of range.')
self._status_code = code
self._status_line = status or ('%d Unknown' % code)
def _get_status(self):
return self._status_line
status = property(_get_status, _set_status, None,
''' A writeable property to change the HTTP response status. It accepts
either a numeric code (100-999) or a string with a custom reason
phrase (e.g. "404 Brain not found"). Both :data:`status_line` and
:data:`status_code` are updated accordingly. The return value is
always a status string. ''')
del _get_status, _set_status
@property
def headers(self):
''' An instance of :class:`HeaderDict`, a case-insensitive dict-like
view on the response headers. '''
self.__dict__['headers'] = hdict = HeaderDict()
hdict.dict = self._headers
return hdict
def __contains__(self, name): return _hkey(name) in self._headers
def __delitem__(self, name): del self._headers[_hkey(name)]
def __getitem__(self, name): return self._headers[_hkey(name)][-1]
def __setitem__(self, name, value): self._headers[_hkey(name)] = [str(value)]
def get_header(self, name, default=None):
''' Return the value of a previously defined header. If there is no
header with that name, return a default value. '''
return self._headers.get(_hkey(name), [default])[-1]
def set_header(self, name, value, append=False):
''' Create a new response header, replacing any previously defined
headers with the same name. '''
if append:
self.add_header(name, value)
else:
self._headers[_hkey(name)] = [str(value)]
def add_header(self, name, value):
''' Add an additional response header, not removing duplicates. '''
self._headers.setdefault(_hkey(name), []).append(str(value))
def iter_headers(self):
''' Yield (header, value) tuples, skipping headers that are not
allowed with the current response status code. '''
headers = self._headers.items()
bad_headers = self.bad_headers.get(self._status_code)
if bad_headers:
headers = [h for h in headers if h[0] not in bad_headers]
for name, values in headers:
for value in values:
yield name, value
if self._cookies:
for c in self._cookies.values():
yield 'Set-Cookie', c.OutputString()
def wsgiheader(self):
depr('The wsgiheader method is deprecated. See headerlist.') #0.10
return self.headerlist
@property
def headerlist(self):
''' WSGI conform list of (header, value) tuples. '''
return list(self.iter_headers())
content_type = HeaderProperty('Content-Type')
content_length = HeaderProperty('Content-Length', reader=int)
@property
def charset(self):
""" Return the charset specified in the content-type header (default: utf8). """
if 'charset=' in self.content_type:
return self.content_type.split('charset=')[-1].split(';')[0].strip()
return 'UTF-8'
@property
def COOKIES(self):
""" A dict-like SimpleCookie instance. This should not be used directly.
See :meth:`set_cookie`. """
depr('The COOKIES dict is deprecated. Use `set_cookie()` instead.') # 0.10
if not self._cookies:
self._cookies = SimpleCookie()
return self._cookies
def set_cookie(self, name, value, secret=None, **options):
''' Create a new cookie or replace an old one. If the `secret` parameter is
set, create a `Signed Cookie` (described below).
:param name: the name of the cookie.
:param value: the value of the cookie.
:param secret: a signature key required for signed cookies.
Additionally, this method accepts all RFC 2109 attributes that are
supported by :class:`cookie.Morsel`, including:
:param max_age: maximum age in seconds. (default: None)
:param expires: a datetime object or UNIX timestamp. (default: None)
:param domain: the domain that is allowed to read the cookie.
(default: current domain)
:param path: limits the cookie to a given path (default: current path)
:param secure: limit the cookie to HTTPS connections (default: off).
:param httponly: prevents client-side javascript to read this cookie
(default: off, requires Python 2.6 or newer).
If neither `expires` nor `max_age` is set (default), the cookie will
expire at the end of the browser session (as soon as the browser
window is closed).
Signed cookies may store any pickle-able object and are
cryptographically signed to prevent manipulation. Keep in mind that
cookies are limited to 4kb in most browsers.
Warning: Signed cookies are not encrypted (the client can still see
the content) and not copy-protected (the client can restore an old
cookie). The main intention is to make pickling and unpickling
save, not to store secret information at client side.
'''
if not self._cookies:
self._cookies = SimpleCookie()
if secret:
value = touni(cookie_encode((name, value), secret))
elif not isinstance(value, basestring):
raise TypeError('Secret key missing for non-string Cookie.')
if len(value) > 4096: raise ValueError('Cookie value to long.')
self._cookies[name] = value
for key, value in options.items():
if key == 'max_age':
if isinstance(value, timedelta):
value = value.seconds + value.days * 24 * 3600
if key == 'expires':
if isinstance(value, (datedate, datetime)):
value = value.timetuple()
elif isinstance(value, (int, float)):
value = time.gmtime(value)
value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
self._cookies[name][key.replace('_', '-')] = value
def delete_cookie(self, key, **kwargs):
''' Delete a cookie. Be sure to use the same `domain` and `path`
settings as used to create the cookie. '''
kwargs['max_age'] = -1
kwargs['expires'] = 0
self.set_cookie(key, '', **kwargs)
def __repr__(self):
out = ''
for name, value in self.headerlist:
out += '%s: %s\n' % (name.title(), value.strip())
return out
#: Thread-local storage for :class:`LocalRequest` and :class:`LocalResponse`
#: attributes.
_lctx = threading.local()
def local_property(name):
return property(lambda self: getattr(_lctx, name),
lambda self, value: setattr(_lctx, name, value),
lambda self: delattr(_lctx, name),
'Thread-local property stored in :data:`_lctx.%s`' % name)
class LocalRequest(BaseRequest):
''' A thread-local subclass of :class:`BaseRequest` with a different
set of attribues for each thread. There is usually only one global
instance of this class (:data:`request`). If accessed during a
request/response cycle, this instance always refers to the *current*
request (even on a multithreaded server). '''
bind = BaseRequest.__init__
environ = local_property('request_environ')
class LocalResponse(BaseResponse):
''' A thread-local subclass of :class:`BaseResponse` with a different
set of attribues for each thread. There is usually only one global
instance of this class (:data:`response`). Its attributes are used
to build the HTTP response at the end of the request/response cycle.
'''
bind = BaseResponse.__init__
_status_line = local_property('response_status_line')
_status_code = local_property('response_status_code')
_cookies = local_property('response_cookies')
_headers = local_property('response_headers')
body = local_property('response_body')
Response = LocalResponse # BC 0.9
Request = LocalRequest # BC 0.9
###############################################################################
# Plugins ######################################################################
###############################################################################
class PluginError(BottleException): pass
class JSONPlugin(object):
name = 'json'
api = 2
def __init__(self, json_dumps=json_dumps):
self.json_dumps = json_dumps
def apply(self, callback, context):
dumps = self.json_dumps
if not dumps: return callback
def wrapper(*a, **ka):
rv = callback(*a, **ka)
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization succesful
response.content_type = 'application/json'
return json_response
return rv
return wrapper
class HooksPlugin(object):
name = 'hooks'
api = 2
_names = 'before_request', 'after_request', 'app_reset'
def __init__(self):
self.hooks = dict((name, []) for name in self._names)
self.app = None
def _empty(self):
return not (self.hooks['before_request'] or self.hooks['after_request'])
def setup(self, app):
self.app = app
def add(self, name, func):
''' Attach a callback to a hook. '''
was_empty = self._empty()
self.hooks.setdefault(name, []).append(func)
if self.app and was_empty and not self._empty(): self.app.reset()
def remove(self, name, func):
''' Remove a callback from a hook. '''
was_empty = self._empty()
if name in self.hooks and func in self.hooks[name]:
self.hooks[name].remove(func)
if self.app and not was_empty and self._empty(): self.app.reset()
def trigger(self, name, *a, **ka):
''' Trigger a hook and return a list of results. '''
hooks = self.hooks[name]
if ka.pop('reversed', False): hooks = hooks[::-1]
return [hook(*a, **ka) for hook in hooks]
def apply(self, callback, context):
if self._empty(): return callback
def wrapper(*a, **ka):
self.trigger('before_request')
rv = callback(*a, **ka)
self.trigger('after_request', reversed=True)
return rv
return wrapper
class TemplatePlugin(object):
''' This plugin applies the :func:`view` decorator to all routes with a
`template` config parameter. If the parameter is a tuple, the second
element must be a dict with additional options (e.g. `template_engine`)
or default variables for the template. '''
name = 'template'
api = 2
def apply(self, callback, route):
conf = route.config.get('template')
if isinstance(conf, (tuple, list)) and len(conf) == 2:
return view(conf[0], **conf[1])(callback)
elif isinstance(conf, str) and 'template_opts' in route.config:
depr('The `template_opts` parameter is deprecated.') #0.9
return view(conf, **route.config['template_opts'])(callback)
elif isinstance(conf, str):
return view(conf)(callback)
else:
return callback
#: Not a plugin, but part of the plugin API. TODO: Find a better place.
class _ImportRedirect(object):
def __init__(self, name, impmask):
''' Create a virtual package that redirects imports (see PEP 302). '''
self.name = name
self.impmask = impmask
self.module = sys.modules.setdefault(name, imp.new_module(name))
self.module.__dict__.update({'__file__': __file__, '__path__': [],
'__all__': [], '__loader__': self})
sys.meta_path.append(self)
def find_module(self, fullname, path=None):
if '.' not in fullname: return
packname, modname = fullname.rsplit('.', 1)
if packname != self.name: return
return self
def load_module(self, fullname):
if fullname in sys.modules: return sys.modules[fullname]
packname, modname = fullname.rsplit('.', 1)
realname = self.impmask % modname
__import__(realname)
module = sys.modules[fullname] = sys.modules[realname]
setattr(self.module, modname, module)
module.__loader__ = self
return module
###############################################################################
# Common Utilities #############################################################
###############################################################################
class MultiDict(DictMixin):
""" This dict stores multiple values per key, but behaves exactly like a
normal dict in that it returns only the newest value for any given key.
There are special methods available to access the full list of values.
"""
def __init__(self, *a, **k):
self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items())
def __len__(self): return len(self.dict)
def __iter__(self): return iter(self.dict)
def __contains__(self, key): return key in self.dict
def __delitem__(self, key): del self.dict[key]
def __getitem__(self, key): return self.dict[key][-1]
def __setitem__(self, key, value): self.append(key, value)
def keys(self): return self.dict.keys()
if py3k:
def values(self): return (v[-1] for v in self.dict.values())
def items(self): return ((k, v[-1]) for k, v in self.dict.items())
def allitems(self):
return ((k, v) for k, vl in self.dict.items() for v in vl)
iterkeys = keys
itervalues = values
iteritems = items
iterallitems = allitems
else:
def values(self): return [v[-1] for v in self.dict.values()]
def items(self): return [(k, v[-1]) for k, v in self.dict.items()]
def iterkeys(self): return self.dict.iterkeys()
def itervalues(self): return (v[-1] for v in self.dict.itervalues())
def iteritems(self):
return ((k, v[-1]) for k, v in self.dict.iteritems())
def iterallitems(self):
return ((k, v) for k, vl in self.dict.iteritems() for v in vl)
def allitems(self):
return [(k, v) for k, vl in self.dict.iteritems() for v in vl]
def get(self, key, default=None, index=-1, type=None):
''' Return the most recent value for a key.
:param default: The default value to be returned if the key is not
present or the type conversion fails.
:param index: An index for the list of available values.
:param type: If defined, this callable is used to cast the value
into a specific type. Exception are suppressed and result in
the default value to be returned.
'''
try:
val = self.dict[key][index]
return type(val) if type else val
except Exception:
pass
return default
def append(self, key, value):
''' Add a new value to the list of values for this key. '''
self.dict.setdefault(key, []).append(value)
def replace(self, key, value):
''' Replace the list of values with a single value. '''
self.dict[key] = [value]
def getall(self, key):
''' Return a (possibly empty) list of values for a key. '''
return self.dict.get(key) or []
#: Aliases for WTForms to mimic other multi-dict APIs (Django)
getone = get
getlist = getall
class FormsDict(MultiDict):
''' This :class:`MultiDict` subclass is used to store request form data.
Additionally to the normal dict-like item access methods (which return
unmodified data as native strings), this container also supports
attribute-like access to its values. Attributes are automatically de-
or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing
attributes default to an empty string. '''
#: Encoding used for attribute values.
input_encoding = 'utf8'
#: If true (default), unicode strings are first encoded with `latin1`
#: and then decoded to match :attr:`input_encoding`.
recode_unicode = True
def _fix(self, s, encoding=None):
if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI
s = s.encode('latin1')
if isinstance(s, bytes): # Python 2 WSGI
return s.decode(encoding or self.input_encoding)
return s
def decode(self, encoding=None):
''' Returns a copy with all keys and values de- or recoded to match
:attr:`input_encoding`. Some libraries (e.g. WTForms) want a
unicode dictionary. '''
copy = FormsDict()
enc = copy.input_encoding = encoding or self.input_encoding
copy.recode_unicode = False
for key, value in self.allitems():
copy.append(self._fix(key, enc), self._fix(value, enc))
return copy
def getunicode(self, name, default=None, encoding=None):
try:
return self._fix(self[name], encoding)
except (UnicodeError, KeyError):
return default
def __getattr__(self, name, default=unicode()):
return self.getunicode(name, default=default)
class HeaderDict(MultiDict):
""" A case-insensitive version of :class:`MultiDict` that defaults to
replace the old value instead of appending it. """
def __init__(self, *a, **ka):
self.dict = {}
if a or ka: self.update(*a, **ka)
def __contains__(self, key): return _hkey(key) in self.dict
def __delitem__(self, key): del self.dict[_hkey(key)]
def __getitem__(self, key): return self.dict[_hkey(key)][-1]
def __setitem__(self, key, value): self.dict[_hkey(key)] = [str(value)]
def append(self, key, value):
self.dict.setdefault(_hkey(key), []).append(str(value))
def replace(self, key, value): self.dict[_hkey(key)] = [str(value)]
def getall(self, key): return self.dict.get(_hkey(key)) or []
def get(self, key, default=None, index=-1):
return MultiDict.get(self, _hkey(key), default, index)
def filter(self, names):
for name in [_hkey(n) for n in names]:
if name in self.dict:
del self.dict[name]
class WSGIHeaderDict(DictMixin):
''' This dict-like class wraps a WSGI environ dict and provides convenient
access to HTTP_* fields. Keys and values are native strings
(2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI
environment contains non-native string values, these are de- or encoded
using a lossless 'latin1' character set.
The API will remain stable even on changes to the relevant PEPs.
Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one
that uses non-native strings.)
'''
#: List of keys that do not have a 'HTTP_' prefix.
cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH')
def __init__(self, environ):
self.environ = environ
def _ekey(self, key):
''' Translate header field name to CGI/WSGI environ key. '''
key = key.replace('-','_').upper()
if key in self.cgikeys:
return key
return 'HTTP_' + key
def raw(self, key, default=None):
''' Return the header value as is (may be bytes or unicode). '''
return self.environ.get(self._ekey(key), default)
def __getitem__(self, key):
return tonat(self.environ[self._ekey(key)], 'latin1')
def __setitem__(self, key, value):
raise TypeError("%s is read-only." % self.__class__)
def __delitem__(self, key):
raise TypeError("%s is read-only." % self.__class__)
def __iter__(self):
for key in self.environ:
if key[:5] == 'HTTP_':
yield key[5:].replace('_', '-').title()
elif key in self.cgikeys:
yield key.replace('_', '-').title()
def keys(self): return [x for x in self]
def __len__(self): return len(self.keys())
def __contains__(self, key): return self._ekey(key) in self.environ
class ConfigDict(dict):
''' A dict-subclass with some extras: You can access keys like attributes.
Uppercase attributes create new ConfigDicts and act as name-spaces.
Other missing attributes return None. Calling a ConfigDict updates its
values and returns itself.
>>> cfg = ConfigDict()
>>> cfg.Namespace.value = 5
>>> cfg.OtherNamespace(a=1, b=2)
>>> cfg
{'Namespace': {'value': 5}, 'OtherNamespace': {'a': 1, 'b': 2}}
'''
def __getattr__(self, key):
if key not in self and key[0].isupper():
self[key] = ConfigDict()
return self.get(key)
def __setattr__(self, key, value):
if hasattr(dict, key):
raise AttributeError('Read-only attribute.')
if key in self and self[key] and isinstance(self[key], ConfigDict):
raise AttributeError('Non-empty namespace attribute.')
self[key] = value
def __delattr__(self, key):
if key in self: del self[key]
def __call__(self, *a, **ka):
for key, value in dict(*a, **ka).items(): setattr(self, key, value)
return self
class AppStack(list):
""" A stack-like list. Calling it returns the head of the stack. """
def __call__(self):
""" Return the current default application. """
return self[-1]
def push(self, value=None):
""" Add a new :class:`Bottle` instance to the stack """
if not isinstance(value, Bottle):
value = Bottle()
self.append(value)
return value
class WSGIFileWrapper(object):
def __init__(self, fp, buffer_size=1024*64):
self.fp, self.buffer_size = fp, buffer_size
for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'):
if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr))
def __iter__(self):
buff, read = self.buffer_size, self.read
while True:
part = read(buff)
if not part: return
yield part
class ResourceManager(object):
''' This class manages a list of search paths and helps to find and open
aplication-bound resources (files).
:param base: path used to resolve relative search paths. It works as a
default for :meth:`add_path`.
:param opener: callable used to open resources.
:param cachemode: controls which lookups are cached. One of 'all',
'found' or 'none'.
'''
def __init__(self, base='./', opener=open, cachemode='all'):
self.opener = open
self.base = './'
self.cachemode = cachemode
#: A list of search paths. See :meth:`add_path` for details.
self.path = []
#: A list of file masks. See :meth:`add_mask` for details.
self.mask = ['%s']
#: A cache for resolved paths. `res.cache.clear()`` clears the cache.
self.cache = {}
def add_path(self, path, base=None, index=None):
''' Add a path to the :attr:`path` list.
The path is turned into an absolute and normalized form. If it
looks like a file (not ending in `/`), the filename is stripped
off. The path is not required to exist.
Relative paths are joined with `base` or :attr:`self.base`, which
defaults to the current working directory. This comes in handy if
you resources live in a sub-folder of your module or package::
res.add_path('./resources/', __file__)
The :attr:`path` list is searched in order and new paths are
added to the end of the list. The *index* parameter can change
the position (e.g. ``0`` to prepend). Adding a path a second time
moves it to the new position.
'''
base = os.path.abspath(os.path.dirname(base or self.base))
path = os.path.abspath(os.path.join(base, os.path.dirname(path)))
path += os.sep
if path in self.path:
self.path.remove(path)
if index is None:
self.path.append(path)
else:
self.path.insert(index, path)
self.cache.clear()
def add_mask(self, mask, index=None):
''' Add a new format string to the :attr:`mask` list.
Masks are used to turn resource names into actual filenames. The
mask string must contain exactly one occurence of ``%s``, which
is replaced by the supplied resource name on lookup. This can be
used to auto-append file extentions (e.g. ``%s.ext``).
'''
if index is None:
self.masks.append(mask)
else:
self.masks.insert(index, mask)
self.cache.clear()
def lookup(self, name):
''' Search for a resource and return an absolute file path, or `None`.
The :attr:`path` list is searched in order. For each path, the
:attr:`mask` entries are tried in order. The first path that points
to an existing file is returned. Symlinks are followed. The result
is cached to speed up future lookups. '''
if name not in self.cache or DEBUG:
for path in self.path:
for mask in self.mask:
fpath = os.path.join(path, mask%name)
if os.path.isfile(fpath):
if self.cachemode in ('all', 'found'):
self.cache[name] = fpath
return fpath
if self.cachemode == 'all':
self.cache[name] = None
return self.cache[name]
def open(self, name, *args, **kwargs):
''' Find a resource and return an opened file object, or raise IOError.
Additional parameters are passed to the ``open()`` built-in.
'''
fname = self.lookup(name)
if not fname: raise IOError("Resource %r not found." % name)
return self.opener(name, *args, **kwargs)
###############################################################################
# Application Helper ###########################################################
###############################################################################
def abort(code=500, text='Unknown Error: Application stopped.'):
""" Aborts execution and causes a HTTP error. """
raise HTTPError(code, text)
def redirect(url, code=None):
""" Aborts execution and causes a 303 or 302 redirect, depending on
the HTTP protocol version. """
if code is None:
code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
location = urljoin(request.url, url)
raise HTTPResponse("", status=code, header=dict(Location=location))
def _file_iter_range(fp, offset, bytes, maxread=1024*1024):
''' Yield chunks from a range in a file. No chunk is bigger than maxread.'''
fp.seek(offset)
while bytes > 0:
part = fp.read(min(bytes, maxread))
if not part: break
bytes -= len(part)
yield part
def static_file(filename, root, mimetype='auto', download=False):
""" Open a file in a safe way and return :exc:`HTTPResponse` with status
code 200, 305, 401 or 404. Set Content-Type, Content-Encoding,
Content-Length and Last-Modified header. Obey If-Modified-Since header
and HEAD requests.
"""
root = os.path.abspath(root) + os.sep
filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
header = dict()
if not filename.startswith(root):
return HTTPError(403, "Access denied.")
if not os.path.exists(filename) or not os.path.isfile(filename):
return HTTPError(404, "File does not exist.")
if not os.access(filename, os.R_OK):
return HTTPError(403, "You do not have permission to access this file.")
if mimetype == 'auto':
mimetype, encoding = mimetypes.guess_type(filename)
if mimetype: header['Content-Type'] = mimetype
if encoding: header['Content-Encoding'] = encoding
elif mimetype:
header['Content-Type'] = mimetype
if download:
download = os.path.basename(filename if download == True else download)
header['Content-Disposition'] = 'attachment; filename="%s"' % download
stats = os.stat(filename)
header['Content-Length'] = clen = stats.st_size
lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
header['Last-Modified'] = lm
ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
if ims:
ims = parse_date(ims.split(";")[0].strip())
if ims is not None and ims >= int(stats.st_mtime):
header['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
return HTTPResponse(status=304, header=header)
body = '' if request.method == 'HEAD' else open(filename, 'rb')
header["Accept-Ranges"] = "bytes"
ranges = request.environ.get('HTTP_RANGE')
if 'HTTP_RANGE' in request.environ:
ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen))
if not ranges:
return HTTPError(416, "Requested Range Not Satisfiable")
offset, end = ranges[0]
header["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen)
header["Content-Length"] = str(end-offset)
if body: body = _file_iter_range(body, offset, end-offset)
return HTTPResponse(body, header=header, status=206)
return HTTPResponse(body, header=header)
###############################################################################
# HTTP Utilities and MISC (TODO) ###############################################
###############################################################################
def debug(mode=True):
""" Change the debug level.
There is only one debug level supported at the moment."""
global DEBUG
DEBUG = bool(mode)
def parse_date(ims):
""" Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
try:
ts = email.utils.parsedate_tz(ims)
return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone
except (TypeError, ValueError, IndexError, OverflowError):
return None
def parse_auth(header):
""" Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
try:
method, data = header.split(None, 1)
if method.lower() == 'basic':
user, pwd = touni(base64.b64decode(tob(data))).split(':',1)
return user, pwd
except (KeyError, ValueError):
return None
def parse_range_header(header, maxlen=0):
''' Yield (start, end) ranges parsed from a HTTP Range header. Skip
unsatisfiable ranges. The end index is non-inclusive.'''
if not header or header[:6] != 'bytes=': return
ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r]
for start, end in ranges:
try:
if not start: # bytes=-100 -> last 100 bytes
start, end = max(0, maxlen-int(end)), maxlen
elif not end: # bytes=100- -> all but the first 99 bytes
start, end = int(start), maxlen
else: # bytes=100-200 -> bytes 100-200 (inclusive)
start, end = int(start), min(int(end)+1, maxlen)
if 0 <= start < end <= maxlen:
yield start, end
except ValueError:
pass
def _lscmp(a, b):
''' Compares two strings in a cryptographically save way:
Runtime is not affected by length of common prefix. '''
return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b)
def cookie_encode(data, key):
''' Encode and sign a pickle-able object. Return a (byte) string '''
msg = base64.b64encode(pickle.dumps(data, -1))
sig = base64.b64encode(hmac.new(tob(key), msg).digest())
return tob('!') + sig + tob('?') + msg
def cookie_decode(data, key):
''' Verify and decode an encoded string. Return an object or None.'''
data = tob(data)
if cookie_is_encoded(data):
sig, msg = data.split(tob('?'), 1)
if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg).digest())):
return pickle.loads(base64.b64decode(msg))
return None
def cookie_is_encoded(data):
''' Return True if the argument looks like a encoded cookie.'''
return bool(data.startswith(tob('!')) and tob('?') in data)
def html_escape(string):
''' Escape HTML special characters ``&<>`` and quotes ``'"``. '''
return string.replace('&','&').replace('<','<').replace('>','>')\
.replace('"','"').replace("'",''')
def html_quote(string):
''' Escape and quote a string to be used as an HTTP attribute.'''
return '"%s"' % html_escape(string).replace('\n','%#10;')\
.replace('\r',' ').replace('\t','	')
def yieldroutes(func):
""" Return a generator for routes that match the signature (name, args)
of the func parameter. This may yield more than one route if the function
takes optional keyword arguments. The output is best described by example::
a() -> '/a'
b(x, y) -> '/b/:x/:y'
c(x, y=5) -> '/c/:x' and '/c/:x/:y'
d(x=5, y=6) -> '/d' and '/d/:x' and '/d/:x/:y'
"""
import inspect # Expensive module. Only import if necessary.
path = '/' + func.__name__.replace('__','/').lstrip('/')
spec = inspect.getargspec(func)
argc = len(spec[0]) - len(spec[3] or [])
path += ('/:%s' * argc) % tuple(spec[0][:argc])
yield path
for arg in spec[0][argc:]:
path += '/:%s' % arg
yield path
def path_shift(script_name, path_info, shift=1):
''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
:return: The modified paths.
:param script_name: The SCRIPT_NAME path.
:param script_name: The PATH_INFO path.
:param shift: The number of path fragments to shift. May be negative to
change the shift direction. (default: 1)
'''
if shift == 0: return script_name, path_info
pathlist = path_info.strip('/').split('/')
scriptlist = script_name.strip('/').split('/')
if pathlist and pathlist[0] == '': pathlist = []
if scriptlist and scriptlist[0] == '': scriptlist = []
if shift > 0 and shift <= len(pathlist):
moved = pathlist[:shift]
scriptlist = scriptlist + moved
pathlist = pathlist[shift:]
elif shift < 0 and shift >= -len(scriptlist):
moved = scriptlist[shift:]
pathlist = moved + pathlist
scriptlist = scriptlist[:shift]
else:
empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO'
raise AssertionError("Cannot shift. Nothing left from %s" % empty)
new_script_name = '/' + '/'.join(scriptlist)
new_path_info = '/' + '/'.join(pathlist)
if path_info.endswith('/') and pathlist: new_path_info += '/'
return new_script_name, new_path_info
def validate(**vkargs):
"""
Validates and manipulates keyword arguments by user defined callables.
Handles ValueError and missing arguments by raising HTTPError(403).
"""
depr('Use route wildcard filters instead.')
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kargs):
for key, value in vkargs.items():
if key not in kargs:
abort(403, 'Missing parameter: %s' % key)
try:
kargs[key] = value(kargs[key])
except ValueError:
abort(403, 'Wrong parameter format for: %s' % key)
return func(*args, **kargs)
return wrapper
return decorator
def auth_basic(check, realm="private", text="Access denied"):
''' Callback decorator to require HTTP auth (basic).
TODO: Add route(check_auth=...) parameter. '''
def decorator(func):
def wrapper(*a, **ka):
user, password = request.auth or (None, None)
if user is None or not check(user, password):
response.headers['WWW-Authenticate'] = 'Basic realm="%s"' % realm
return HTTPError(401, text)
return func(*a, **ka)
return wrapper
return decorator
# Shortcuts for common Bottle methods.
# They all refer to the current default application.
def make_default_app_wrapper(name):
''' Return a callable that relays calls to the current default app. '''
@functools.wraps(getattr(Bottle, name))
def wrapper(*a, **ka):
return getattr(app(), name)(*a, **ka)
return wrapper
route = make_default_app_wrapper('route')
get = make_default_app_wrapper('get')
post = make_default_app_wrapper('post')
put = make_default_app_wrapper('put')
delete = make_default_app_wrapper('delete')
error = make_default_app_wrapper('error')
mount = make_default_app_wrapper('mount')
hook = make_default_app_wrapper('hook')
install = make_default_app_wrapper('install')
uninstall = make_default_app_wrapper('uninstall')
url = make_default_app_wrapper('get_url')
###############################################################################
# Server Adapter ###############################################################
###############################################################################
class ServerAdapter(object):
quiet = False
def __init__(self, host='127.0.0.1', port=8080, **config):
self.options = config
self.host = host
self.port = int(port)
def run(self, handler): # pragma: no cover
pass
def __repr__(self):
args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()])
return "%s(%s)" % (self.__class__.__name__, args)
class CGIServer(ServerAdapter):
quiet = True
def run(self, handler): # pragma: no cover
from wsgiref.handlers import CGIHandler
def fixed_environ(environ, start_response):
environ.setdefault('PATH_INFO', '')
return handler(environ, start_response)
CGIHandler().run(fixed_environ)
class FlupFCGIServer(ServerAdapter):
def run(self, handler): # pragma: no cover
import flup.server.fcgi
self.options.setdefault('bindAddress', (self.host, self.port))
flup.server.fcgi.WSGIServer(handler, **self.options).run()
class WSGIRefServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from wsgiref.simple_server import make_server, WSGIRequestHandler
if self.quiet:
class QuietHandler(WSGIRequestHandler):
def log_request(*args, **kw): pass
self.options['handler_class'] = QuietHandler
srv = make_server(self.host, self.port, handler, **self.options)
srv.serve_forever()
class CherryPyServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from cherrypy import wsgiserver
server = wsgiserver.CherryPyWSGIServer((self.host, self.port), handler)
try:
server.start()
finally:
server.stop()
class WaitressServer(ServerAdapter):
def run(self, handler):
from waitress import serve
serve(handler, host=self.host, port=self.port)
class PasteServer(ServerAdapter):
def run(self, handler): # pragma: no cover
from paste import httpserver
if not self.quiet:
from paste.translogger import TransLogger
handler = TransLogger(handler)
httpserver.serve(handler, host=self.host, port=str(self.port),
**self.options)
class MeinheldServer(ServerAdapter):
def run(self, handler):
from meinheld import server
server.listen((self.host, self.port))
server.run(handler)
class FapwsServer(ServerAdapter):
""" Extremely fast webserver using libev. See http://www.fapws.org/ """
def run(self, handler): # pragma: no cover
import fapws._evwsgi as evwsgi
from fapws import base, config
port = self.port
if float(config.SERVER_IDENT[-2:]) > 0.4:
# fapws3 silently changed its API in 0.5
port = str(port)
evwsgi.start(self.host, port)
# fapws3 never releases the GIL. Complain upstream. I tried. No luck.
if 'BOTTLE_CHILD' in os.environ and not self.quiet:
_stderr("WARNING: Auto-reloading does not work with Fapws3.\n")
_stderr(" (Fapws3 breaks python thread support)\n")
evwsgi.set_base_module(base)
def app(environ, start_response):
environ['wsgi.multiprocess'] = False
return handler(environ, start_response)
evwsgi.wsgi_cb(('', app))
evwsgi.run()
class TornadoServer(ServerAdapter):
""" The super hyped asynchronous server by facebook. Untested. """
def run(self, handler): # pragma: no cover
import tornado.wsgi, tornado.httpserver, tornado.ioloop
container = tornado.wsgi.WSGIContainer(handler)
server = tornado.httpserver.HTTPServer(container)
server.listen(port=self.port)
tornado.ioloop.IOLoop.instance().start()
class AppEngineServer(ServerAdapter):
""" Adapter for Google App Engine. """
quiet = True
def run(self, handler):
from google.appengine.ext.webapp import util
# A main() function in the handler script enables 'App Caching'.
# Lets makes sure it is there. This _really_ improves performance.
module = sys.modules.get('__main__')
if module and not hasattr(module, 'main'):
module.main = lambda: util.run_wsgi_app(handler)
util.run_wsgi_app(handler)
class TwistedServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from twisted.web import server, wsgi
from twisted.python.threadpool import ThreadPool
from twisted.internet import reactor
thread_pool = ThreadPool()
thread_pool.start()
reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler))
reactor.listenTCP(self.port, factory, interface=self.host)
reactor.run()
class DieselServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from diesel.protocols.wsgi import WSGIApplication
app = WSGIApplication(handler, port=self.port)
app.run()
class GeventServer(ServerAdapter):
""" Untested. Options:
* `monkey` (default: True) fixes the stdlib to use greenthreads.
* `fast` (default: False) uses libevent's http server, but has some
issues: No streaming, no pipelining, no SSL.
"""
def run(self, handler):
from gevent import wsgi as wsgi_fast, pywsgi, monkey, local
if self.options.get('monkey', True):
if not threading.local is local.local: monkey.patch_all()
wsgi = wsgi_fast if self.options.get('fast') else pywsgi
log = None if self.quiet else 'default'
wsgi.WSGIServer((self.host, self.port), handler, log=log).serve_forever()
class GunicornServer(ServerAdapter):
""" Untested. See http://gunicorn.org/configure.html for options. """
def run(self, handler):
from gunicorn.app.base import Application
config = {'bind': "%s:%d" % (self.host, int(self.port))}
config.update(self.options)
class GunicornApplication(Application):
def init(self, parser, opts, args):
return config
def load(self):
return handler
GunicornApplication().run()
class EventletServer(ServerAdapter):
""" Untested """
def run(self, handler):
from eventlet import wsgi, listen
try:
wsgi.server(listen((self.host, self.port)), handler,
log_output=(not self.quiet))
except TypeError:
# Fallback, if we have old version of eventlet
wsgi.server(listen((self.host, self.port)), handler)
class RocketServer(ServerAdapter):
""" Untested. """
def run(self, handler):
from rocket import Rocket
server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler })
server.start()
class BjoernServer(ServerAdapter):
""" Fast server written in C: https://github.com/jonashaag/bjoern """
def run(self, handler):
from bjoern import run
run(handler, self.host, self.port)
class AutoServer(ServerAdapter):
""" Untested. """
adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer, WSGIRefServer]
def run(self, handler):
for sa in self.adapters:
try:
return sa(self.host, self.port, **self.options).run(handler)
except ImportError:
pass
server_names = {
'cgi': CGIServer,
'flup': FlupFCGIServer,
'wsgiref': WSGIRefServer,
'waitress': WaitressServer,
'cherrypy': CherryPyServer,
'paste': PasteServer,
'fapws3': FapwsServer,
'tornado': TornadoServer,
'gae': AppEngineServer,
'twisted': TwistedServer,
'diesel': DieselServer,
'meinheld': MeinheldServer,
'gunicorn': GunicornServer,
'eventlet': EventletServer,
'gevent': GeventServer,
'rocket': RocketServer,
'bjoern' : BjoernServer,
'auto': AutoServer,
}
###############################################################################
# Application Control ##########################################################
###############################################################################
def load(target, **namespace):
""" Import a module or fetch an object from a module.
* ``package.module`` returns `module` as a module object.
* ``pack.mod:name`` returns the module variable `name` from `pack.mod`.
* ``pack.mod:func()`` calls `pack.mod.func()` and returns the result.
The last form accepts not only function calls, but any type of
expression. Keyword arguments passed to this function are available as
local variables. Example: ``import_string('re:compile(x)', x='[a-z]')``
"""
module, target = target.split(":", 1) if ':' in target else (target, None)
if module not in sys.modules: __import__(module)
if not target: return sys.modules[module]
if target.isalnum(): return getattr(sys.modules[module], target)
package_name = module.split('.')[0]
namespace[package_name] = sys.modules[package_name]
return eval('%s.%s' % (module, target), namespace)
def load_app(target):
""" Load a bottle application from a module and make sure that the import
does not affect the current default application, but returns a separate
application object. See :func:`load` for the target parameter. """
global NORUN; NORUN, nr_old = True, NORUN
try:
tmp = default_app.push() # Create a new "default application"
rv = load(target) # Import the target module
return rv if callable(rv) else tmp
finally:
default_app.remove(tmp) # Remove the temporary added default application
NORUN = nr_old
_debug = debug
def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
interval=1, reloader=False, quiet=False, plugins=None,
debug=False, **kargs):
""" Start a server instance. This method blocks until the server terminates.
:param app: WSGI application or target string supported by
:func:`load_app`. (default: :func:`default_app`)
:param server: Server adapter to use. See :data:`server_names` keys
for valid names or pass a :class:`ServerAdapter` subclass.
(default: `wsgiref`)
:param host: Server address to bind to. Pass ``0.0.0.0`` to listens on
all interfaces including the external one. (default: 127.0.0.1)
:param port: Server port to bind to. Values below 1024 require root
privileges. (default: 8080)
:param reloader: Start auto-reloading server? (default: False)
:param interval: Auto-reloader interval in seconds (default: 1)
:param quiet: Suppress output to stdout and stderr? (default: False)
:param options: Options passed to the server adapter.
"""
if NORUN: return
if reloader and not os.environ.get('BOTTLE_CHILD'):
try:
lockfile = None
fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock')
os.close(fd) # We only need this file to exist. We never write to it
while os.path.exists(lockfile):
args = [sys.executable] + sys.argv
environ = os.environ.copy()
environ['BOTTLE_CHILD'] = 'true'
environ['BOTTLE_LOCKFILE'] = lockfile
p = subprocess.Popen(args, env=environ)
while p.poll() is None: # Busy wait...
os.utime(lockfile, None) # I am alive!
time.sleep(interval)
if p.poll() != 3:
if os.path.exists(lockfile): os.unlink(lockfile)
sys.exit(p.poll())
except KeyboardInterrupt:
pass
finally:
if os.path.exists(lockfile):
os.unlink(lockfile)
return
try:
_debug(debug)
app = app or default_app()
if isinstance(app, basestring):
app = load_app(app)
if not callable(app):
raise ValueError("Application is not callable: %r" % app)
for plugin in plugins or []:
app.install(plugin)
if server in server_names:
server = server_names.get(server)
if isinstance(server, basestring):
server = load(server)
if isinstance(server, type):
server = server(host=host, port=port, **kargs)
if not isinstance(server, ServerAdapter):
raise ValueError("Unknown or unsupported server: %r" % server)
server.quiet = server.quiet or quiet
if not server.quiet:
_stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server)))
_stderr("Listening on http://%s:%d/\n" % (server.host, server.port))
_stderr("Hit Ctrl-C to quit.\n\n")
if reloader:
lockfile = os.environ.get('BOTTLE_LOCKFILE')
bgcheck = FileCheckerThread(lockfile, interval)
with bgcheck:
server.run(app)
if bgcheck.status == 'reload':
sys.exit(3)
else:
server.run(app)
except KeyboardInterrupt:
pass
except (SystemExit, MemoryError):
raise
except:
if not reloader: raise
if not getattr(server, 'quiet', quiet):
print_exc()
time.sleep(interval)
sys.exit(3)
class FileCheckerThread(threading.Thread):
''' Interrupt main-thread as soon as a changed module file is detected,
the lockfile gets deleted or gets to old. '''
def __init__(self, lockfile, interval):
threading.Thread.__init__(self)
self.lockfile, self.interval = lockfile, interval
#: Is one of 'reload', 'error' or 'exit'
self.status = None
def run(self):
exists = os.path.exists
mtime = lambda path: os.stat(path).st_mtime
files = dict()
for module in list(sys.modules.values()):
path = getattr(module, '__file__', '')
if path[-4:] in ('.pyo', '.pyc'): path = path[:-1]
if path and exists(path): files[path] = mtime(path)
while not self.status:
if not exists(self.lockfile)\
or mtime(self.lockfile) < time.time() - self.interval - 5:
self.status = 'error'
thread.interrupt_main()
for path, lmtime in list(files.items()):
if not exists(path) or mtime(path) > lmtime:
self.status = 'reload'
thread.interrupt_main()
break
time.sleep(self.interval)
def __enter__(self):
self.start()
def __exit__(self, exc_type, exc_val, exc_tb):
if not self.status: self.status = 'exit' # silent exit
self.join()
return exc_type is not None and issubclass(exc_type, KeyboardInterrupt)
###############################################################################
# Template Adapters ############################################################
###############################################################################
class TemplateError(HTTPError):
def __init__(self, message):
HTTPError.__init__(self, 500, message)
class BaseTemplate(object):
""" Base class and minimal API for template adapters """
extensions = ['tpl','html','thtml','stpl']
settings = {} #used in prepare()
defaults = {} #used in render()
def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings):
""" Create a new template.
If the source parameter (str or buffer) is missing, the name argument
is used to guess a template filename. Subclasses can assume that
self.source and/or self.filename are set. Both are strings.
The lookup, encoding and settings parameters are stored as instance
variables.
The lookup parameter stores a list containing directory paths.
The encoding parameter should be used to decode byte strings or files.
The settings parameter contains a dict for engine-specific settings.
"""
self.name = name
self.source = source.read() if hasattr(source, 'read') else source
self.filename = source.filename if hasattr(source, 'filename') else None
self.lookup = [os.path.abspath(x) for x in lookup]
self.encoding = encoding
self.settings = self.settings.copy() # Copy from class variable
self.settings.update(settings) # Apply
if not self.source and self.name:
self.filename = self.search(self.name, self.lookup)
if not self.filename:
raise TemplateError('Template %s not found.' % repr(name))
if not self.source and not self.filename:
raise TemplateError('No template specified.')
self.prepare(**self.settings)
@classmethod
def search(cls, name, lookup=[]):
""" Search name in all directories specified in lookup.
First without, then with common extensions. Return first hit. """
if os.path.isfile(name): return name
for spath in lookup:
fname = os.path.join(spath, name)
if os.path.isfile(fname):
return fname
for ext in cls.extensions:
if os.path.isfile('%s.%s' % (fname, ext)):
return '%s.%s' % (fname, ext)
@classmethod
def global_config(cls, key, *args):
''' This reads or sets the global settings stored in class.settings. '''
if args:
cls.settings = cls.settings.copy() # Make settings local to class
cls.settings[key] = args[0]
else:
return cls.settings[key]
def prepare(self, **options):
""" Run preparations (parsing, caching, ...).
It should be possible to call this again to refresh a template or to
update settings.
"""
raise NotImplementedError
def render(self, *args, **kwargs):
""" Render the template with the specified local variables and return
a single byte or unicode string. If it is a byte string, the encoding
must match self.encoding. This method must be thread-safe!
Local variables may be provided in dictionaries (*args)
or directly, as keywords (**kwargs).
"""
raise NotImplementedError
class MakoTemplate(BaseTemplate):
def prepare(self, **options):
from mako.template import Template
from mako.lookup import TemplateLookup
options.update({'input_encoding':self.encoding})
options.setdefault('format_exceptions', bool(DEBUG))
lookup = TemplateLookup(directories=self.lookup, **options)
if self.source:
self.tpl = Template(self.source, lookup=lookup, **options)
else:
self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
class CheetahTemplate(BaseTemplate):
def prepare(self, **options):
from Cheetah.Template import Template
self.context = threading.local()
self.context.vars = {}
options['searchList'] = [self.context.vars]
if self.source:
self.tpl = Template(source=self.source, **options)
else:
self.tpl = Template(file=self.filename, **options)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
self.context.vars.update(self.defaults)
self.context.vars.update(kwargs)
out = str(self.tpl)
self.context.vars.clear()
return out
class Jinja2Template(BaseTemplate):
def prepare(self, filters=None, tests=None, **kwargs):
from jinja2 import Environment, FunctionLoader
if 'prefix' in kwargs: # TODO: to be removed after a while
raise RuntimeError('The keyword argument `prefix` has been removed. '
'Use the full jinja2 environment name line_statement_prefix instead.')
self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
if filters: self.env.filters.update(filters)
if tests: self.env.tests.update(tests)
if self.source:
self.tpl = self.env.from_string(self.source)
else:
self.tpl = self.env.get_template(self.filename)
def render(self, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
_defaults = self.defaults.copy()
_defaults.update(kwargs)
return self.tpl.render(**_defaults)
def loader(self, name):
fname = self.search(name, self.lookup)
if not fname: return
with open(fname, "rb") as f:
return f.read().decode(self.encoding)
class SimpleTALTemplate(BaseTemplate):
''' Deprecated, do not use. '''
def prepare(self, **options):
depr('The SimpleTAL template handler is deprecated'\
' and will be removed in 0.12')
from simpletal import simpleTAL
if self.source:
self.tpl = simpleTAL.compileHTMLTemplate(self.source)
else:
with open(self.filename, 'rb') as fp:
self.tpl = simpleTAL.compileHTMLTemplate(tonat(fp.read()))
def render(self, *args, **kwargs):
from simpletal import simpleTALES
for dictarg in args: kwargs.update(dictarg)
context = simpleTALES.Context()
for k,v in self.defaults.items():
context.addGlobal(k, v)
for k,v in kwargs.items():
context.addGlobal(k, v)
output = StringIO()
self.tpl.expand(context, output)
return output.getvalue()
class SimpleTemplate(BaseTemplate):
blocks = ('if', 'elif', 'else', 'try', 'except', 'finally', 'for', 'while',
'with', 'def', 'class')
dedent_blocks = ('elif', 'else', 'except', 'finally')
@lazy_attribute
def re_pytokens(cls):
''' This matches comments and all kinds of quoted strings but does
NOT match comments (#...) within quoted strings. (trust me) '''
return re.compile(r'''
(''(?!')|""(?!")|'{6}|"{6} # Empty strings (all 4 types)
|'(?:[^\\']|\\.)+?' # Single quotes (')
|"(?:[^\\"]|\\.)+?" # Double quotes (")
|'{3}(?:[^\\]|\\.|\n)+?'{3} # Triple-quoted strings (')
|"{3}(?:[^\\]|\\.|\n)+?"{3} # Triple-quoted strings (")
|\#.* # Comments
)''', re.VERBOSE)
def prepare(self, escape_func=html_escape, noescape=False, **kwargs):
self.cache = {}
enc = self.encoding
self._str = lambda x: touni(x, enc)
self._escape = lambda x: escape_func(touni(x, enc))
if noescape:
self._str, self._escape = self._escape, self._str
@classmethod
def split_comment(cls, code):
""" Removes comments (#...) from python code. """
if '#' not in code: return code
#: Remove comments only (leave quoted strings as they are)
subf = lambda m: '' if m.group(0)[0]=='#' else m.group(0)
return re.sub(cls.re_pytokens, subf, code)
@cached_property
def co(self):
return compile(self.code, self.filename or '<string>', 'exec')
@cached_property
def code(self):
stack = [] # Current Code indentation
lineno = 0 # Current line of code
ptrbuffer = [] # Buffer for printable strings and token tuple instances
codebuffer = [] # Buffer for generated python code
multiline = dedent = oneline = False
template = self.source or open(self.filename, 'rb').read()
def yield_tokens(line):
for i, part in enumerate(re.split(r'\{\{(.*?)\}\}', line)):
if i % 2:
if part.startswith('!'): yield 'RAW', part[1:]
else: yield 'CMD', part
else: yield 'TXT', part
def flush(): # Flush the ptrbuffer
if not ptrbuffer: return
cline = ''
for line in ptrbuffer:
for token, value in line:
if token == 'TXT': cline += repr(value)
elif token == 'RAW': cline += '_str(%s)' % value
elif token == 'CMD': cline += '_escape(%s)' % value
cline += ', '
cline = cline[:-2] + '\\\n'
cline = cline[:-2]
if cline[:-1].endswith('\\\\\\\\\\n'):
cline = cline[:-7] + cline[-1] # 'nobr\\\\\n' --> 'nobr'
cline = '_printlist([' + cline + '])'
del ptrbuffer[:] # Do this before calling code() again
code(cline)
def code(stmt):
for line in stmt.splitlines():
codebuffer.append(' ' * len(stack) + line.strip())
for line in template.splitlines(True):
lineno += 1
line = touni(line, self.encoding)
sline = line.lstrip()
if lineno <= 2:
m = re.match(r"%\s*#.*coding[:=]\s*([-\w.]+)", sline)
if m: self.encoding = m.group(1)
if m: line = line.replace('coding','coding (removed)')
if sline and sline[0] == '%' and sline[:2] != '%%':
line = line.split('%',1)[1].lstrip() # Full line following the %
cline = self.split_comment(line).strip()
cmd = re.split(r'[^a-zA-Z0-9_]', cline)[0]
flush() # You are actually reading this? Good luck, it's a mess :)
if cmd in self.blocks or multiline:
cmd = multiline or cmd
dedent = cmd in self.dedent_blocks # "else:"
if dedent and not oneline and not multiline:
cmd = stack.pop()
code(line)
oneline = not cline.endswith(':') # "if 1: pass"
multiline = cmd if cline.endswith('\\') else False
if not oneline and not multiline:
stack.append(cmd)
elif cmd == 'end' and stack:
code('#end(%s) %s' % (stack.pop(), line.strip()[3:]))
elif cmd == 'include':
p = cline.split(None, 2)[1:]
if len(p) == 2:
code("_=_include(%s, _stdout, %s)" % (repr(p[0]), p[1]))
elif p:
code("_=_include(%s, _stdout)" % repr(p[0]))
else: # Empty %include -> reverse of %rebase
code("_printlist(_base)")
elif cmd == 'rebase':
p = cline.split(None, 2)[1:]
if len(p) == 2:
code("globals()['_rebase']=(%s, dict(%s))" % (repr(p[0]), p[1]))
elif p:
code("globals()['_rebase']=(%s, {})" % repr(p[0]))
else:
code(line)
else: # Line starting with text (not '%') or '%%' (escaped)
if line.strip().startswith('%%'):
line = line.replace('%%', '%', 1)
ptrbuffer.append(yield_tokens(line))
flush()
return '\n'.join(codebuffer) + '\n'
def subtemplate(self, _name, _stdout, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
if _name not in self.cache:
self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
return self.cache[_name].execute(_stdout, kwargs)
def execute(self, _stdout, *args, **kwargs):
for dictarg in args: kwargs.update(dictarg)
env = self.defaults.copy()
env.update({'_stdout': _stdout, '_printlist': _stdout.extend,
'_include': self.subtemplate, '_str': self._str,
'_escape': self._escape, 'get': env.get,
'setdefault': env.setdefault, 'defined': env.__contains__})
env.update(kwargs)
eval(self.co, env)
if '_rebase' in env:
subtpl, rargs = env['_rebase']
rargs['_base'] = _stdout[:] #copy stdout
del _stdout[:] # clear stdout
return self.subtemplate(subtpl,_stdout,rargs)
return env
def render(self, *args, **kwargs):
""" Render the template using keyword arguments as local variables. """
for dictarg in args: kwargs.update(dictarg)
stdout = []
self.execute(stdout, kwargs)
return ''.join(stdout)
def template(*args, **kwargs):
'''
Get a rendered template as a string iterator.
You can use a name, a filename or a template string as first parameter.
Template rendering arguments can be passed as dictionaries
or directly (as keyword arguments).
'''
tpl = args[0] if args else None
template_adapter = kwargs.pop('template_adapter', SimpleTemplate)
if tpl not in TEMPLATES or DEBUG:
settings = kwargs.pop('template_settings', {})
lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
if isinstance(tpl, template_adapter):
TEMPLATES[tpl] = tpl
if settings: TEMPLATES[tpl].prepare(**settings)
elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
TEMPLATES[tpl] = template_adapter(source=tpl, lookup=lookup, **settings)
else:
TEMPLATES[tpl] = template_adapter(name=tpl, lookup=lookup, **settings)
if not TEMPLATES[tpl]:
abort(500, 'Template (%s) not found' % tpl)
for dictarg in args[1:]: kwargs.update(dictarg)
return TEMPLATES[tpl].render(kwargs)
mako_template = functools.partial(template, template_adapter=MakoTemplate)
cheetah_template = functools.partial(template, template_adapter=CheetahTemplate)
jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
simpletal_template = functools.partial(template, template_adapter=SimpleTALTemplate)
def view(tpl_name, **defaults):
''' Decorator: renders a template for a handler.
The handler can control its behavior like that:
- return a dict of template vars to fill out the template
- return something other than a dict and the view decorator will not
process the template, but return the handler result as is.
This includes returning a HTTPResponse(dict) to get,
for instance, JSON with autojson or other castfilters.
'''
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if isinstance(result, (dict, DictMixin)):
tplvars = defaults.copy()
tplvars.update(result)
return template(tpl_name, **tplvars)
return result
return wrapper
return decorator
mako_view = functools.partial(view, template_adapter=MakoTemplate)
cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
simpletal_view = functools.partial(view, template_adapter=SimpleTALTemplate)
###############################################################################
# Constants and Globals ########################################################
###############################################################################
TEMPLATE_PATH = ['./', './views/']
TEMPLATES = {}
DEBUG = False
NORUN = False # If set, run() does nothing. Used by load_app()
#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found')
HTTP_CODES = httplib.responses
HTTP_CODES[418] = "I'm a teapot" # RFC 2324
HTTP_CODES[428] = "Precondition Required"
HTTP_CODES[429] = "Too Many Requests"
HTTP_CODES[431] = "Request Header Fields Too Large"
HTTP_CODES[511] = "Network Authentication Required"
_HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.items())
#: The default template used for error pages. Override with @error()
ERROR_PAGE_TEMPLATE = """
%%try:
%%from %s import DEBUG, HTTP_CODES, request, touni
%%status_name = HTTP_CODES.get(e.status, 'Unknown').title()
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
<html>
<head>
<title>Error {{e.status}}: {{status_name}}</title>
<style type="text/css">
html {background-color: #eee; font-family: sans;}
body {background-color: #fff; border: 1px solid #ddd;
padding: 15px; margin: 15px;}
pre {background-color: #eee; border: 1px solid #ddd; padding: 5px;}
</style>
</head>
<body>
<h1>Error {{e.status}}: {{status_name}}</h1>
<p>Sorry, the requested URL <tt>{{repr(request.url)}}</tt>
caused an error:</p>
<pre>{{e.output}}</pre>
%%if DEBUG and e.exception:
<h2>Exception:</h2>
<pre>{{repr(e.exception)}}</pre>
%%end
%%if DEBUG and e.traceback:
<h2>Traceback:</h2>
<pre>{{e.traceback}}</pre>
%%end
</body>
</html>
%%except ImportError:
<b>ImportError:</b> Could not generate the error page. Please add bottle to
the import path.
%%end
""" % __name__
#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a
#: request callback, this instance always refers to the *current* request
#: (even on a multithreaded server).
request = LocalRequest()
#: A thread-safe instance of :class:`LocalResponse`. It is used to change the
#: HTTP response for the *current* request.
response = LocalResponse()
#: A thread-safe namespace. Not used by Bottle.
local = threading.local()
# Initialize app stack (create first empty Bottle app)
# BC: 0.6.4 and needed for run()
app = default_app = AppStack()
app.push()
#: A virtual package that redirects import statements.
#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`.
ext = _ImportRedirect(__name__+'.ext', 'bottle_%s').module
if __name__ == '__main__':
opt, args, parser = _cmd_options, _cmd_args, _cmd_parser
if opt.version:
_stdout('Bottle %s\n'%__version__)
sys.exit(0)
if not args:
parser.print_help()
_stderr('\nError: No application specified.\n')
sys.exit(1)
sys.path.insert(0, '.')
sys.modules.setdefault('bottle', sys.modules['__main__'])
host, port = (opt.bind or 'localhost'), 8080
if ':' in host:
host, port = host.rsplit(':', 1)
run(args[0], host=host, port=port, server=opt.server,
reloader=opt.reload, plugins=opt.plugin, debug=opt.debug)
# THE END
| gpl-2.0 |
Fusion-Rom/android_external_chromium_org | tools/telemetry/telemetry/core/backends/chrome/ios_browser_finder.py | 25 | 4184 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Finds iOS browsers that can be controlled by telemetry."""
import logging
import re
import subprocess
from telemetry import decorators
from telemetry.core import browser
from telemetry.core import platform
from telemetry.core import possible_browser
from telemetry.core.backends.chrome import inspector_backend
from telemetry.core.backends.chrome import ios_browser_backend
from telemetry.core.platform import ios_platform_backend
# Key matches output from ios-webkit-debug-proxy and the value is a readable
# description of the browser.
IOS_BROWSERS = {'CriOS': 'ios-chrome', 'Version': 'ios-safari'}
DEVICE_LIST_URL = 'http://127.0.0.1:9221/json'
IOS_WEBKIT_DEBUG_PROXY = 'ios_webkit_debug_proxy'
class PossibleIOSBrowser(possible_browser.PossibleBrowser):
"""A running iOS browser instance."""
def __init__(self, browser_type, finder_options):
super(PossibleIOSBrowser, self).__init__(browser_type, 'ios',
finder_options, True)
# TODO(baxley): Implement the following methods for iOS.
def Create(self):
backend = ios_browser_backend.IosBrowserBackend(
self.finder_options.browser_options)
return browser.Browser(backend,
self._platform_backend,
self._archive_path,
self._append_to_existing_wpr,
self._make_javascript_deterministic,
self._credentials_path)
def SupportsOptions(self, finder_options):
#TODO(baxley): Implement me.
return True
def UpdateExecutableIfNeeded(self):
#TODO(baxley): Implement me.
pass
def _InitPlatformIfNeeded(self):
if self._platform:
return
self._platform_backend = ios_platform_backend.IosPlatformBackend()
self._platform = platform.Platform(self._platform_backend)
def SelectDefaultBrowser(_):
return None # TODO(baxley): Implement me.
def CanFindAvailableBrowsers():
# TODO(baxley): Add support for all platforms possible. Probably Linux,
# probably not Windows.
return platform.GetHostPlatform().GetOSName() == 'mac'
def FindAllBrowserTypes(_):
return IOS_BROWSERS.values()
@decorators.Cache
def _IsIosDeviceAttached():
devices = subprocess.check_output('system_profiler SPUSBDataType', shell=True)
for line in devices.split('\n'):
if line and re.match('\s*(iPod|iPhone|iPad):', line):
return True
return False
def FindAllAvailableBrowsers(finder_options):
"""Find all running iOS browsers on connected devices."""
if not CanFindAvailableBrowsers():
return []
if not _IsIosDeviceAttached():
return []
options = finder_options.browser_options
options.browser_type = 'ios-chrome'
backend = ios_browser_backend.IosBrowserBackend(options)
host = platform.GetHostPlatform()
# TODO(baxley): Use idevice to wake up device or log debug statement.
if not host.IsApplicationRunning(IOS_WEBKIT_DEBUG_PROXY):
host.LaunchApplication(IOS_WEBKIT_DEBUG_PROXY)
if not host.IsApplicationRunning(IOS_WEBKIT_DEBUG_PROXY):
return []
device_urls = backend.GetDeviceUrls()
if not device_urls:
logging.debug('Could not find any devices over %s.'
% IOS_WEBKIT_DEBUG_PROXY)
return []
debug_urls = backend.GetWebSocketDebuggerUrls(device_urls)
# Get the userAgent for each UIWebView to find the browsers.
browser_pattern = ('\)\s(%s)\/(\d+[\.\d]*)\sMobile'
% '|'.join(IOS_BROWSERS.keys()))
browser_types = set()
for url in debug_urls:
context = {'webSocketDebuggerUrl':url , 'id':1}
inspector = inspector_backend.InspectorBackend(backend, context)
res = inspector.EvaluateJavaScript("navigator.userAgent")
match_browsers = re.search(browser_pattern, res)
if match_browsers:
browser_types.add(match_browsers.group(1))
browsers = []
for browser_type in browser_types:
browsers.append(PossibleIOSBrowser(IOS_BROWSERS[browser_type],
finder_options))
return list(browsers)
| bsd-3-clause |
msabramo/ansible | lib/ansible/plugins/shell/fish.py | 45 | 4770 | # (c) 2014, Chris Church <chris@ninemoreminutes.com>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.shell.sh import ShellModule as ShModule
from ansible.compat.six import text_type
from ansible.compat.six.moves import shlex_quote
class ShellModule(ShModule):
# Common shell filenames that this plugin handles
COMPATIBLE_SHELLS = frozenset(('fish',))
# Family of shells this has. Must match the filename without extension
SHELL_FAMILY = 'fish'
_SHELL_EMBEDDED_PY_EOL = '\n'
_SHELL_REDIRECT_ALLNULL = '> /dev/null 2>&1'
_SHELL_AND = '; and'
_SHELL_OR = '; or'
_SHELL_SUB_LEFT = '('
_SHELL_SUB_RIGHT = ')'
_SHELL_GROUP_LEFT = ''
_SHELL_GROUP_RIGHT = ''
def env_prefix(self, **kwargs):
env = self.env.copy()
env.update(kwargs)
return ' '.join(['set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k,v in env.items()])
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None):
# don't quote the cmd if it's an empty string, because this will break pipelining mode
if cmd.strip() != '':
cmd = shlex_quote(cmd)
cmd_parts = [env_string.strip(), shebang.replace("#!", "").strip(), cmd]
if arg_path is not None:
cmd_parts.append(arg_path)
new_cmd = " ".join(cmd_parts)
if rm_tmp:
new_cmd = 'begin ; %s; rm -rf "%s" %s ; end' % (new_cmd, rm_tmp, self._SHELL_REDIRECT_ALLNULL)
return new_cmd
def checksum(self, path, python_interp):
# The following test is fish-compliant.
#
# In the following test, each condition is a check and logical
# comparison (or or and) that sets the rc value. Every check is run so
# the last check in the series to fail will be the rc that is
# returned.
#
# If a check fails we error before invoking the hash functions because
# hash functions may successfully take the hash of a directory on BSDs
# (UFS filesystem?) which is not what the rest of the ansible code
# expects
#
# If all of the available hashing methods fail we fail with an rc of
# 0. This logic is added to the end of the cmd at the bottom of this
# function.
# Return codes:
# checksum: success!
# 0: Unknown error
# 1: Remote file does not exist
# 2: No read permissions on the file
# 3: File is a directory
# 4: No python interpreter
# Quoting gets complex here. We're writing a python string that's
# used by a variety of shells on the remote host to invoke a python
# "one-liner".
shell_escaped_path = shlex_quote(path)
test = "set rc flag; [ -r %(p)s ] %(shell_or)s set rc 2; [ -f %(p)s ] %(shell_or)s set rc 1; [ -d %(p)s ] %(shell_and)s set rc 3; %(i)s -V 2>/dev/null %(shell_or)s set rc 4; [ x\"$rc\" != \"xflag\" ] %(shell_and)s echo \"$rc \"%(p)s %(shell_and)s exit 0" % dict(p=shell_escaped_path, i=python_interp, shell_and=self._SHELL_AND, shell_or=self._SHELL_OR)
csums = [
u"({0} -c 'import hashlib; BLOCKSIZE = 65536; hasher = hashlib.sha1();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)".format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL), # Python > 2.4 (including python3)
u"({0} -c 'import sha; BLOCKSIZE = 65536; hasher = sha.sha();{2}afile = open(\"'{1}'\", \"rb\"){2}buf = afile.read(BLOCKSIZE){2}while len(buf) > 0:{2}\thasher.update(buf){2}\tbuf = afile.read(BLOCKSIZE){2}afile.close(){2}print(hasher.hexdigest())' 2>/dev/null)".format(python_interp, shell_escaped_path, self._SHELL_EMBEDDED_PY_EOL), # Python == 2.4
]
cmd = (" %s " % self._SHELL_OR).join(csums)
cmd = "%s; %s %s (echo \'0 \'%s)" % (test, cmd, self._SHELL_OR, shell_escaped_path)
return cmd
| gpl-3.0 |
knittledan/imageResizer | PIL/mac/PIL/PcfFontFile.py | 26 | 6192 | #
# THIS IS WORK IN PROGRESS
#
# The Python Imaging Library
# $Id$
#
# portable compiled font file parser
#
# history:
# 1997-08-19 fl created
# 2003-09-13 fl fixed loading of unicode fonts
#
# Copyright (c) 1997-2003 by Secret Labs AB.
# Copyright (c) 1997-2003 by Fredrik Lundh.
#
# See the README file for information on usage and redistribution.
#
from PIL import Image
from PIL import FontFile
from PIL import _binary
# --------------------------------------------------------------------
# declarations
PCF_MAGIC = 0x70636601 # "\x01fcp"
PCF_PROPERTIES = (1 << 0)
PCF_ACCELERATORS = (1 << 1)
PCF_METRICS = (1 << 2)
PCF_BITMAPS = (1 << 3)
PCF_INK_METRICS = (1 << 4)
PCF_BDF_ENCODINGS = (1 << 5)
PCF_SWIDTHS = (1 << 6)
PCF_GLYPH_NAMES = (1 << 7)
PCF_BDF_ACCELERATORS = (1 << 8)
BYTES_PER_ROW = [
lambda bits: ((bits+7) >> 3),
lambda bits: ((bits+15) >> 3) & ~1,
lambda bits: ((bits+31) >> 3) & ~3,
lambda bits: ((bits+63) >> 3) & ~7,
]
i8 = _binary.i8
l16 = _binary.i16le
l32 = _binary.i32le
b16 = _binary.i16be
b32 = _binary.i32be
def sz(s, o):
return s[o:s.index(b"\0", o)]
##
# Font file plugin for the X11 PCF format.
class PcfFontFile(FontFile.FontFile):
name = "name"
def __init__(self, fp):
magic = l32(fp.read(4))
if magic != PCF_MAGIC:
raise SyntaxError("not a PCF file")
FontFile.FontFile.__init__(self)
count = l32(fp.read(4))
self.toc = {}
for i in range(count):
type = l32(fp.read(4))
self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4))
self.fp = fp
self.info = self._load_properties()
metrics = self._load_metrics()
bitmaps = self._load_bitmaps(metrics)
encoding = self._load_encoding()
#
# create glyph structure
for ch in range(256):
ix = encoding[ch]
if ix is not None:
x, y, l, r, w, a, d, f = metrics[ix]
glyph = (w, 0), (l, d-y, x+l, d), (0, 0, x, y), bitmaps[ix]
self.glyph[ch] = glyph
def _getformat(self, tag):
format, size, offset = self.toc[tag]
fp = self.fp
fp.seek(offset)
format = l32(fp.read(4))
if format & 4:
i16, i32 = b16, b32
else:
i16, i32 = l16, l32
return fp, format, i16, i32
def _load_properties(self):
#
# font properties
properties = {}
fp, format, i16, i32 = self._getformat(PCF_PROPERTIES)
nprops = i32(fp.read(4))
# read property description
p = []
for i in range(nprops):
p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4))))
if nprops & 3:
fp.seek(4 - (nprops & 3), 1) # pad
data = fp.read(i32(fp.read(4)))
for k, s, v in p:
k = sz(data, k)
if s:
v = sz(data, v)
properties[k] = v
return properties
def _load_metrics(self):
#
# font metrics
metrics = []
fp, format, i16, i32 = self._getformat(PCF_METRICS)
append = metrics.append
if (format & 0xff00) == 0x100:
# "compressed" metrics
for i in range(i16(fp.read(2))):
left = i8(fp.read(1)) - 128
right = i8(fp.read(1)) - 128
width = i8(fp.read(1)) - 128
ascent = i8(fp.read(1)) - 128
descent = i8(fp.read(1)) - 128
xsize = right - left
ysize = ascent + descent
append(
(xsize, ysize, left, right, width,
ascent, descent, 0)
)
else:
# "jumbo" metrics
for i in range(i32(fp.read(4))):
left = i16(fp.read(2))
right = i16(fp.read(2))
width = i16(fp.read(2))
ascent = i16(fp.read(2))
descent = i16(fp.read(2))
attributes = i16(fp.read(2))
xsize = right - left
ysize = ascent + descent
append(
(xsize, ysize, left, right, width,
ascent, descent, attributes)
)
return metrics
def _load_bitmaps(self, metrics):
#
# bitmap data
bitmaps = []
fp, format, i16, i32 = self._getformat(PCF_BITMAPS)
nbitmaps = i32(fp.read(4))
if nbitmaps != len(metrics):
raise IOError("Wrong number of bitmaps")
offsets = []
for i in range(nbitmaps):
offsets.append(i32(fp.read(4)))
bitmapSizes = []
for i in range(4):
bitmapSizes.append(i32(fp.read(4)))
byteorder = format & 4 # non-zero => MSB
bitorder = format & 8 # non-zero => MSB
padindex = format & 3
bitmapsize = bitmapSizes[padindex]
offsets.append(bitmapsize)
data = fp.read(bitmapsize)
pad = BYTES_PER_ROW[padindex]
mode = "1;R"
if bitorder:
mode = "1"
for i in range(nbitmaps):
x, y, l, r, w, a, d, f = metrics[i]
b, e = offsets[i], offsets[i+1]
bitmaps.append(
Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x))
)
return bitmaps
def _load_encoding(self):
# map character code to bitmap index
encoding = [None] * 256
fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS)
firstCol, lastCol = i16(fp.read(2)), i16(fp.read(2))
firstRow, lastRow = i16(fp.read(2)), i16(fp.read(2))
default = i16(fp.read(2))
nencoding = (lastCol - firstCol + 1) * (lastRow - firstRow + 1)
for i in range(nencoding):
encodingOffset = i16(fp.read(2))
if encodingOffset != 0xFFFF:
try:
encoding[i+firstCol] = encodingOffset
except IndexError:
break # only load ISO-8859-1 glyphs
return encoding
| mit |
OSVR/UIforETWbins | bin/IdentifyChromeProcesses.py | 1 | 5681 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
import os
import re
def main():
if len(sys.argv) < 2:
print("Usage: %s tracename" % sys.argv[0])
sys.exit(0)
# Typical output of -a process -withcmdline looks like:
# MIN, 24656403, Process, 0XA1141C60, chrome.exe ( 748), 10760, 1, 0x11e8c260, "C:\...\chrome.exe" --type=renderer ...
# Find the PID and ParentPID
pidsRe = re.compile(r".*\(([\d ]*)\), *(\d*),.*")
# Find the space-terminated word after 'type='. This used to require that it
# be the first command-line option, but that is likely to not always be true.
processTypeRe = re.compile(r".* --type=([^ ]*) .*")
tracename = sys.argv[1]
#-tle = tolerate lost events
#-tti = tolerate time ivnersions
#-a process = show process, thread, image information (see xperf -help processing)
#-withcmdline = show command line in process reports (see xperf -help process)
command = 'xperf -i "%s" -tle -tti -a process -withcmdline' % tracename
# Group all of the chrome.exe processes by browser Pid, then by type.
# pathByBrowserPid just maps from the browser Pid to the disk path to chrome.exe
pathByBrowserPid = {}
# pidsByParent is a dictionary that is indexed by the browser Pid. It contains
# a dictionary that is indexed by process type with each entry's payload
# being a list of Pids (for example, a list of renderer processes).
pidsByParent = {}
# Dictionary of Pids and their lines of data
lineByPid = {}
for line in os.popen(command).readlines():
# Split the commandline from the .csv data and then extract the exePath.
# It may or may not be quoted, and may or not have the .exe suffix.
parts = line.split(", ")
if len(parts) > 8:
processName = parts[4]
commandLine = parts[8]
if commandLine[0] == '"':
exePath = commandLine[1:commandLine.find('"', 1)]
else:
exePath = commandLine.split(" ")[0]
# The exepath may omit the ".exe" suffix so we need to look at processName
# instead.
if processName.count("chrome.exe") > 0:
pids = pidsRe.match(line)
pid = int(pids.groups()[0])
parentPid = int(pids.groups()[1])
lineByPid[pid] = line
match = processTypeRe.match(commandLine)
if match:
type = match.groups()[0]
browserPid = parentPid
else:
type = "browser"
browserPid = pid
pathByBrowserPid[browserPid] = exePath
# Retrieve or create the list of processes associated with this
# browser (parent) pid.
pidsByType = pidsByParent.get(browserPid, {})
pidList = list(pidsByType.get(type, []))
pidList.append(pid)
pidsByType[type] = pidList
pidsByParent[browserPid] = pidsByType
# Scan a copy of the list of browser Pids looking for those with parents
# in the list and no children. These represent child processes whose --type=
# option was too far along in the command line for ETW's 512-character capture
# to get. See crbug.com/614502 for how this happened.
# This should probably be deleted at some point, along with the declaration and
# initialization of lineByPid.
for pid in pathByBrowserPid.keys()[:]:
# Checking that there is only one entry (itself) in the list is important
# to avoid problems caused by Pid reuse that could cause one browser process
# to appear to be another browser process' parent.
if len(pidsByParent[pid]) == 1: # The 'browser' appears in its own list
line = lineByPid[pid]
pids = pidsRe.match(line)
pid = int(pids.groups()[0])
parentPid = int(pids.groups()[1])
if pathByBrowserPid.has_key(parentPid):
browserPid = parentPid
# Retrieve the list of processes associated with this
# browser (parent) pid.
pidsByType = pidsByParent[browserPid]
type = "gpu???"
pidList = list(pidsByType.get(type, []))
pidList.append(pid)
pidsByType[type] = pidList
pidsByParent[browserPid] = pidsByType
# Delete the references to the process that we now know isn't a browser
# process.
del pathByBrowserPid[pid]
del pidsByParent[pid]
print("Chrome PIDs by process type:\r")
for browserPid in pidsByParent.keys():
exePath = pathByBrowserPid[browserPid]
# Any paths with no entries in them should be ignored.
pidsByType = pidsByParent[browserPid]
if len(pidsByType) == 0:
assert False
continue
print("%s (%d)\r" % (exePath, browserPid))
keys = list(pidsByType.keys())
keys.sort()
# Note the importance of printing the '\r' so that the
# output will be compatible with Windows edit controls.
for type in keys:
print(" %-11s : " % type, end="")
for pid in pidsByType[type]:
print("%d " % pid, end="")
print("\r")
print("\r")
if __name__ == "__main__":
main()
| apache-2.0 |
ar4s/django | django/core/files/uploadhandler.py | 3 | 6820 | """
Base file upload handler classes, and the built-in concrete subclasses
"""
from __future__ import unicode_literals
from io import BytesIO
from django.conf import settings
from django.core.files.uploadedfile import TemporaryUploadedFile, InMemoryUploadedFile
from django.utils.encoding import python_2_unicode_compatible
from django.utils.module_loading import import_by_path
__all__ = ['UploadFileException','StopUpload', 'SkipFile', 'FileUploadHandler',
'TemporaryFileUploadHandler', 'MemoryFileUploadHandler',
'load_handler', 'StopFutureHandlers']
class UploadFileException(Exception):
"""
Any error having to do with uploading files.
"""
pass
@python_2_unicode_compatible
class StopUpload(UploadFileException):
"""
This exception is raised when an upload must abort.
"""
def __init__(self, connection_reset=False):
"""
If ``connection_reset`` is ``True``, Django knows will halt the upload
without consuming the rest of the upload. This will cause the browser to
show a "connection reset" error.
"""
self.connection_reset = connection_reset
def __str__(self):
if self.connection_reset:
return 'StopUpload: Halt current upload.'
else:
return 'StopUpload: Consume request data, then halt.'
class SkipFile(UploadFileException):
"""
This exception is raised by an upload handler that wants to skip a given file.
"""
pass
class StopFutureHandlers(UploadFileException):
"""
Upload handers that have handled a file and do not want future handlers to
run should raise this exception instead of returning None.
"""
pass
class FileUploadHandler(object):
"""
Base class for streaming upload handlers.
"""
chunk_size = 64 * 2 ** 10 #: The default chunk size is 64 KB.
def __init__(self, request=None):
self.file_name = None
self.content_type = None
self.content_length = None
self.charset = None
self.content_type_extra = None
self.request = request
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Handle the raw input from the client.
Parameters:
:input_data:
An object that supports reading via .read().
:META:
``request.META``.
:content_length:
The (integer) value of the Content-Length header from the
client.
:boundary: The boundary from the Content-Type header. Be sure to
prepend two '--'.
"""
pass
def new_file(self, field_name, file_name, content_type, content_length, charset=None, content_type_extra=None):
"""
Signal that a new file has been started.
Warning: As with any data from the client, you should not trust
content_length (and sometimes won't even get it).
"""
self.field_name = field_name
self.file_name = file_name
self.content_type = content_type
self.content_length = content_length
self.charset = charset
self.content_type_extra = content_type_extra
def receive_data_chunk(self, raw_data, start):
"""
Receive data from the streamed upload parser. ``start`` is the position
in the file of the chunk.
"""
raise NotImplementedError('subclasses of FileUploadHandler must provide a recieve_data_chunk() method')
def file_complete(self, file_size):
"""
Signal that a file has completed. File size corresponds to the actual
size accumulated by all the chunks.
Subclasses should return a valid ``UploadedFile`` object.
"""
raise NotImplementedError('subclasses of FileUploadHandler must provide a file_complete() method')
def upload_complete(self):
"""
Signal that the upload is complete. Subclasses should perform cleanup
that is necessary for this handler.
"""
pass
class TemporaryFileUploadHandler(FileUploadHandler):
"""
Upload handler that streams data into a temporary file.
"""
def __init__(self, *args, **kwargs):
super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs)
def new_file(self, file_name, *args, **kwargs):
"""
Create the file object to append to as data is coming in.
"""
super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs)
self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)
def receive_data_chunk(self, raw_data, start):
self.file.write(raw_data)
def file_complete(self, file_size):
self.file.seek(0)
self.file.size = file_size
return self.file
class MemoryFileUploadHandler(FileUploadHandler):
"""
File upload handler to stream uploads into memory (used for small files).
"""
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Use the content_length to signal whether or not this handler should be in use.
"""
# Check the content-length header to see if we should
# If the post is too large, we cannot use the Memory handler.
if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
self.activated = False
else:
self.activated = True
def new_file(self, *args, **kwargs):
super(MemoryFileUploadHandler, self).new_file(*args, **kwargs)
if self.activated:
self.file = BytesIO()
raise StopFutureHandlers()
def receive_data_chunk(self, raw_data, start):
"""
Add the data to the BytesIO file.
"""
if self.activated:
self.file.write(raw_data)
else:
return raw_data
def file_complete(self, file_size):
"""
Return a file object if we're activated.
"""
if not self.activated:
return
self.file.seek(0)
return InMemoryUploadedFile(
file = self.file,
field_name = self.field_name,
name = self.file_name,
content_type = self.content_type,
size = file_size,
charset = self.charset,
content_type_extra = self.content_type_extra
)
def load_handler(path, *args, **kwargs):
"""
Given a path to a handler, return an instance of that handler.
E.g.::
>>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
<TemporaryFileUploadHandler object at 0x...>
"""
return import_by_path(path)(*args, **kwargs)
| bsd-3-clause |
RedbackThomson/LoLShadow | sleekxmpp/plugins/xep_0224/attention.py | 14 | 2180 | """
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Nathanael C. Fritz, Lance J.T. Stout
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
import logging
from sleekxmpp.stanza import Message
from sleekxmpp.xmlstream import register_stanza_plugin
from sleekxmpp.xmlstream.handler import Callback
from sleekxmpp.xmlstream.matcher import StanzaPath
from sleekxmpp.plugins import BasePlugin
from sleekxmpp.plugins.xep_0224 import stanza
log = logging.getLogger(__name__)
class XEP_0224(BasePlugin):
"""
XEP-0224: Attention
"""
name = 'xep_0224'
description = 'XEP-0224: Attention'
dependencies = set(['xep_0030'])
stanza = stanza
def plugin_init(self):
"""Start the XEP-0224 plugin."""
register_stanza_plugin(Message, stanza.Attention)
self.xmpp.register_handler(
Callback('Attention',
StanzaPath('message/attention'),
self._handle_attention))
def plugin_end(self):
self.xmpp['xep_0030'].del_feature(feature=stanza.Attention.namespace)
self.xmpp.remove_handler('Attention')
def session_bind(self, jid):
self.xmpp['xep_0030'].add_feature(stanza.Attention.namespace)
def request_attention(self, to, mfrom=None, mbody=''):
"""
Send an attention message with an optional body.
Arguments:
to -- The attention request recipient's JID.
mfrom -- Optionally specify the sender of the attention request.
mbody -- An optional message body to include in the request.
"""
m = self.xmpp.Message()
m['to'] = to
m['type'] = 'headline'
m['attention'] = True
if mfrom:
m['from'] = mfrom
m['body'] = mbody
m.send()
def _handle_attention(self, msg):
"""
Raise an event after receiving a message with an attention request.
Arguments:
msg -- A message stanza with an attention element.
"""
log.debug("Received attention request from: %s", msg['from'])
self.xmpp.event('attention', msg)
| mit |
tectronics/prometeo-erp | hr/views/leaverequests.py | 3 | 5705 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This file is part of the prometeo project.
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
__author__ = 'Emanuele Bertoldi <emanuele.bertoldi@gmail.com>'
__copyright__ = 'Copyright (c) 2011 Emanuele Bertoldi'
__version__ = '0.0.5'
from datetime import datetime
from django.shortcuts import render_to_response, get_object_or_404
from django.utils.translation import ugettext_lazy as _
from django.views.generic import list_detail, create_update
from django.views.generic.simple import redirect_to
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.contrib.auth.models import User
from django.contrib import messages
from django.conf import settings
from prometeo.core.auth.decorators import obj_permission_required as permission_required
from prometeo.core.views import filtered_list_detail
from prometeo.documents.models import *
from prometeo.documents.forms import *
from prometeo.documents.views import *
from ..forms import *
def _get_leaverequest(request, *args, **kwargs):
return get_object_or_404(LeaveRequest, id=kwargs.get('id', None))
@permission_required('hr.view_leaverequest')
def leaverequest_list(request, page=0, paginate_by=10, **kwargs):
"""Displays the list of all filtered leave requests.
"""
return filtered_list_detail(
request,
Document.objects.get_for_content(LeaveRequest),
fields=['code', 'author', 'created', 'owner', 'status'],
paginate_by=paginate_by,
page=page,
template_name='hr/leaverequest_list.html',
**kwargs
)
@permission_required('hr.view_leaverequest', _get_leaverequest)
def leaverequest_detail(request, id, **kwargs):
"""Displays a leave request.
"""
object_list = Document.objects.get_for_content(LeaveRequest)
return list_detail.object_detail(
request,
object_id=object_list.get(object_id=id).pk,
queryset=object_list,
template_name=kwargs.pop('template_name', 'hr/leaverequest_detail.html'),
extra_context={'object_list': object_list},
**kwargs
)
@permission_required('hr.add_leaverequest')
def leaverequest_add(request, **kwargs):
"""Adds a new leave request.
"""
leaverequest = LeaveRequest()
doc = Document(author=request.user, content_object=leaverequest)
if request.method == 'POST':
dform = DocumentForm(request.POST, instance=doc)
form = LeaveRequestForm(request.POST, instance=leaverequest)
if form.is_valid() and dform.is_valid():
form.save()
doc.content_object = leaverequest
dform.save()
messages.success(request, _("The leave request was created successfully."))
return redirect_to(request, url=doc.get_absolute_url())
else:
dform = DocumentForm(instance=doc)
form = LeaveRequestForm(instance=leaverequest)
return render_to_response('hr/leaverequest_edit.html', RequestContext(request, {'form': form, 'dform': dform, 'object': doc}))
@permission_required('hr.change_leaverequest', _get_leaverequest)
def leaverequest_edit(request, id, **kwargs):
"""Edits a leave request.
"""
doc = Document.objects.get_for_content(LeaveRequest).get(object_id=id)
leaverequest = doc.content_object
if request.method == 'POST':
dform = DocumentForm(request.POST, instance=doc)
form = LeaveRequestForm(request.POST, instance=leaverequest)
if form.is_valid() and dform.is_valid():
form.save()
dform.save()
messages.success(request, _("The leave request was updated successfully."))
return redirect_to(request, url=doc.get_absolute_url())
else:
dform = DocumentForm(instance=doc)
form = LeaveRequestForm(instance=leaverequest)
return render_to_response('hr/leaverequest_edit.html', RequestContext(request, {'form': form, 'dform': dform, 'object': doc}))
@permission_required('hr.delete_leaverequest', _get_leaverequest)
def leaverequest_delete(request, id, **kwargs):
"""Deletes a leave request.
"""
return create_update.delete_object(
request,
model=Document,
object_id=Document.objects.get_for_content(LeaveRequest).get(object_id=id).pk,
post_delete_redirect=reverse('leaverequest_list'),
template_name='hr/leaverequest_delete.html',
**kwargs
)
@permission_required('hr.change_leaverequest', _get_leaverequest)
def leaverequest_hardcopies(request, id, page=0, paginate_by=10, **kwargs):
"""Shows leave request hard copies.
"""
return hardcopy_list(request, Document.objects.get_for_content(LeaveRequest).get(object_id=id).pk, page, paginate_by, **kwargs)
@permission_required('hr.change_leaverequest', _get_leaverequest)
def leaverequest_add_hardcopy(request, id, **kwargs):
"""Adds an hard copy to the given document.
"""
return hardcopy_add(request, Document.objects.get_for_content(LeaveRequest).get(object_id=id).pk, **kwargs)
| lgpl-3.0 |
maithreyee/python-koans | python3/koans/about_methods.py | 82 | 5312 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Partially based on AboutMethods in the Ruby Koans
#
from runner.koan import *
def my_global_function(a,b):
return a + b
class AboutMethods(Koan):
def test_calling_a_global_function(self):
self.assertEqual(__, my_global_function(2,3))
# NOTE: Wrong number of arguments is not a SYNTAX error, but a
# runtime error.
def test_calling_functions_with_wrong_number_of_arguments(self):
try:
my_global_function()
except TypeError as exception:
msg = exception.args[0]
# Note, the text comparison works for Python 3.2
# It has changed in the past and may change in the future
self.assertRegexpMatches(msg,
r'my_global_function\(\) missing 2 required positional arguments')
try:
my_global_function(1, 2, 3)
except Exception as e:
msg = e.args[0]
# Note, watch out for parenthesis. They need slashes in front!
self.assertRegexpMatches(msg, __)
# ------------------------------------------------------------------
def pointless_method(self, a, b):
sum = a + b
def test_which_does_not_return_anything(self):
self.assertEqual(__, self.pointless_method(1, 2))
# Notice that methods accessed from class scope do not require
# you to pass the first "self" argument?
# ------------------------------------------------------------------
def method_with_defaults(self, a, b='default_value'):
return [a, b]
def test_calling_with_default_values(self):
self.assertEqual(__, self.method_with_defaults(1))
self.assertEqual(__, self.method_with_defaults(1, 2))
# ------------------------------------------------------------------
def method_with_var_args(self, *args):
return args
def test_calling_with_variable_arguments(self):
self.assertEqual(__, self.method_with_var_args())
self.assertEqual(('one',), self.method_with_var_args('one'))
self.assertEqual(__, self.method_with_var_args('one', 'two'))
# ------------------------------------------------------------------
def function_with_the_same_name(self, a, b):
return a + b
def test_functions_without_self_arg_are_global_functions(self):
def function_with_the_same_name(a, b):
return a * b
self.assertEqual(__, function_with_the_same_name(3,4))
def test_calling_methods_in_same_class_with_explicit_receiver(self):
def function_with_the_same_name(a, b):
return a * b
self.assertEqual(__, self.function_with_the_same_name(3,4))
# ------------------------------------------------------------------
def another_method_with_the_same_name(self):
return 10
link_to_overlapped_method = another_method_with_the_same_name
def another_method_with_the_same_name(self):
return 42
def test_that_old_methods_are_hidden_by_redefinitions(self):
self.assertEqual(__, self.another_method_with_the_same_name())
def test_that_overlapped_method_is_still_there(self):
self.assertEqual(__, self.link_to_overlapped_method())
# ------------------------------------------------------------------
def empty_method(self):
pass
def test_methods_that_do_nothing_need_to_use_pass_as_a_filler(self):
self.assertEqual(__, self.empty_method())
def test_pass_does_nothing_at_all(self):
"You"
"shall"
"not"
pass
self.assertEqual(____, "Still got to this line" != None)
# ------------------------------------------------------------------
def one_line_method(self): return 'Madagascar'
def test_no_indentation_required_for_one_line_statement_bodies(self):
self.assertEqual(__, self.one_line_method())
# ------------------------------------------------------------------
def method_with_documentation(self):
"A string placed at the beginning of a function is used for documentation"
return "ok"
def test_the_documentation_can_be_viewed_with_the_doc_method(self):
self.assertRegexpMatches(self.method_with_documentation.__doc__, __)
# ------------------------------------------------------------------
class Dog:
def name(self):
return "Fido"
def _tail(self):
# Prefixing a method with an underscore implies private scope
return "wagging"
def __password(self):
return 'password' # Genius!
def test_calling_methods_in_other_objects(self):
rover = self.Dog()
self.assertEqual(__, rover.name())
def test_private_access_is_implied_but_not_enforced(self):
rover = self.Dog()
# This is a little rude, but legal
self.assertEqual(__, rover._tail())
def test_attributes_with_double_underscore_prefixes_are_subject_to_name_mangling(self):
rover = self.Dog()
with self.assertRaises(___): password = rover.__password()
# But this still is!
self.assertEqual(__, rover._Dog__password())
# Name mangling exists to avoid name clash issues when subclassing.
# It is not for providing effective access protection
| mit |
petrutlucian94/nova | nova/virt/vmwareapi/images.py | 7 | 19145 | # Copyright (c) 2012 VMware, Inc.
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility functions for Image transfer and manipulation.
"""
import os
import tarfile
import tempfile
from lxml import etree
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import strutils
from oslo_utils import units
from oslo_vmware import rw_handles
from nova import exception
from nova.i18n import _, _LE, _LI
from nova import image
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import io_util
# NOTE(mdbooth): We use use_linked_clone below, but don't have to import it
# because nova.virt.vmwareapi.driver is imported first. In fact, it is not
# possible to import it here, as nova.virt.vmwareapi.driver calls
# CONF.register_opts() after the import chain which imports this module. This
# is not a problem as long as the import order doesn't change.
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
IMAGE_API = image.API()
QUEUE_BUFFER_SIZE = 10
LINKED_CLONE_PROPERTY = 'vmware_linked_clone'
class VMwareImage(object):
def __init__(self, image_id,
file_size=0,
os_type=constants.DEFAULT_OS_TYPE,
adapter_type=constants.DEFAULT_ADAPTER_TYPE,
disk_type=constants.DEFAULT_DISK_TYPE,
container_format=constants.CONTAINER_FORMAT_BARE,
file_type=constants.DEFAULT_DISK_FORMAT,
linked_clone=None,
vif_model=constants.DEFAULT_VIF_MODEL):
"""VMwareImage holds values for use in building VMs.
image_id (str): uuid of the image
file_size (int): size of file in bytes
os_type (str): name of guest os (use vSphere names only)
adapter_type (str): name of the adapter's type
disk_type (str): type of disk in thin, thick, etc
container_format (str): container format (bare or ova)
file_type (str): vmdk or iso
linked_clone(bool): use linked clone, or don't
"""
self.image_id = image_id
self.file_size = file_size
self.os_type = os_type
self.adapter_type = adapter_type
self.container_format = container_format
self.disk_type = disk_type
self.file_type = file_type
# NOTE(vui): This should be removed when we restore the
# descriptor-based validation.
if (self.file_type is not None and
self.file_type not in constants.DISK_FORMATS_ALL):
raise exception.InvalidDiskFormat(disk_format=self.file_type)
if linked_clone is not None:
self.linked_clone = linked_clone
else:
self.linked_clone = CONF.vmware.use_linked_clone
self.vif_model = vif_model
@property
def file_size_in_kb(self):
return self.file_size / units.Ki
@property
def is_sparse(self):
return self.disk_type == constants.DISK_TYPE_SPARSE
@property
def is_iso(self):
return self.file_type == constants.DISK_FORMAT_ISO
@property
def is_ova(self):
return self.container_format == constants.CONTAINER_FORMAT_OVA
@classmethod
def from_image(cls, image_id, image_meta=None):
"""Returns VMwareImage, the subset of properties the driver uses.
:param image_id - image id of image
:param image_meta - image metadata we are working with
:return: vmware image object
:rtype: nova.virt.vmwareapi.images.VmwareImage
"""
if image_meta is None:
image_meta = {}
properties = image_meta.get("properties", {})
# calculate linked_clone flag, allow image properties to override the
# global property set in the configurations.
image_linked_clone = properties.get(LINKED_CLONE_PROPERTY,
CONF.vmware.use_linked_clone)
# catch any string values that need to be interpreted as boolean values
linked_clone = strutils.bool_from_string(image_linked_clone)
props = {
'image_id': image_id,
'linked_clone': linked_clone,
'container_format': image_meta.get('container_format')
}
if 'size' in image_meta:
props['file_size'] = image_meta['size']
if 'disk_format' in image_meta:
props['file_type'] = image_meta['disk_format']
props_map = {
'vmware_ostype': 'os_type',
'vmware_adaptertype': 'adapter_type',
'vmware_disktype': 'disk_type',
'hw_vif_model': 'vif_model'
}
for k, v in props_map.iteritems():
if k in properties:
props[v] = properties[k]
return cls(**props)
def start_transfer(context, read_file_handle, data_size,
write_file_handle=None, image_id=None, image_meta=None):
"""Start the data transfer from the reader to the writer.
Reader writes to the pipe and the writer reads from the pipe. This means
that the total transfer time boils down to the slower of the read/write
and not the addition of the two times.
"""
if not image_meta:
image_meta = {}
# The pipe that acts as an intermediate store of data for reader to write
# to and writer to grab from.
thread_safe_pipe = io_util.ThreadSafePipe(QUEUE_BUFFER_SIZE, data_size)
# The read thread. In case of glance it is the instance of the
# GlanceFileRead class. The glance client read returns an iterator
# and this class wraps that iterator to provide datachunks in calls
# to read.
read_thread = io_util.IOThread(read_file_handle, thread_safe_pipe)
# In case of Glance - VMware transfer, we just need a handle to the
# HTTP Connection that is to send transfer data to the VMware datastore.
if write_file_handle:
write_thread = io_util.IOThread(thread_safe_pipe, write_file_handle)
# In case of VMware - Glance transfer, we relinquish VMware HTTP file read
# handle to Glance Client instance, but to be sure of the transfer we need
# to be sure of the status of the image on glance changing to active.
# The GlanceWriteThread handles the same for us.
elif image_id:
write_thread = io_util.GlanceWriteThread(context, thread_safe_pipe,
image_id, image_meta)
# Start the read and write threads.
read_event = read_thread.start()
write_event = write_thread.start()
try:
# Wait on the read and write events to signal their end
read_event.wait()
write_event.wait()
except Exception as exc:
# In case of any of the reads or writes raising an exception,
# stop the threads so that we un-necessarily don't keep the other one
# waiting.
read_thread.stop()
write_thread.stop()
# Log and raise the exception.
LOG.exception(_LE('Transfer data failed'))
raise exception.NovaException(exc)
finally:
# No matter what, try closing the read and write handles, if it so
# applies.
read_file_handle.close()
if write_file_handle:
write_file_handle.close()
def upload_iso_to_datastore(iso_path, instance, **kwargs):
LOG.debug("Uploading iso %s to datastore", iso_path,
instance=instance)
with open(iso_path, 'r') as iso_file:
write_file_handle = rw_handles.FileWriteHandle(
kwargs.get("host"),
kwargs.get("port"),
kwargs.get("data_center_name"),
kwargs.get("datastore_name"),
kwargs.get("cookies"),
kwargs.get("file_path"),
os.fstat(iso_file.fileno()).st_size)
LOG.debug("Uploading iso of size : %s ",
os.fstat(iso_file.fileno()).st_size)
block_size = 0x10000
data = iso_file.read(block_size)
while len(data) > 0:
write_file_handle.write(data)
data = iso_file.read(block_size)
write_file_handle.close()
LOG.debug("Uploaded iso %s to datastore", iso_path,
instance=instance)
def fetch_image(context, instance, host, port, dc_name, ds_name, file_path,
cookies=None):
"""Download image from the glance image server."""
image_ref = instance.image_ref
LOG.debug("Downloading image file data %(image_ref)s to the "
"data store %(data_store_name)s",
{'image_ref': image_ref,
'data_store_name': ds_name},
instance=instance)
metadata = IMAGE_API.get(context, image_ref)
file_size = int(metadata['size'])
read_iter = IMAGE_API.download(context, image_ref)
read_file_handle = rw_handles.ImageReadHandle(read_iter)
write_file_handle = rw_handles.FileWriteHandle(
host, port, dc_name, ds_name, cookies, file_path, file_size)
start_transfer(context, read_file_handle, file_size,
write_file_handle=write_file_handle)
LOG.debug("Downloaded image file data %(image_ref)s to "
"%(upload_name)s on the data store "
"%(data_store_name)s",
{'image_ref': image_ref,
'upload_name': 'n/a' if file_path is None else file_path,
'data_store_name': 'n/a' if ds_name is None else ds_name},
instance=instance)
def _build_shadow_vm_config_spec(session, name, size_kb, disk_type, ds_name):
"""Return spec for creating a shadow VM for image disk.
The VM is never meant to be powered on. When used in importing
a disk it governs the directory name created for the VM
and the disk type of the disk image to convert to.
:param name: Name of the backing
:param size_kb: Size in KB of the backing
:param disk_type: VMDK type for the disk
:param ds_name: Datastore name where the disk is to be provisioned
:return: Spec for creation
"""
cf = session.vim.client.factory
controller_device = cf.create('ns0:VirtualLsiLogicController')
controller_device.key = -100
controller_device.busNumber = 0
controller_device.sharedBus = 'noSharing'
controller_spec = cf.create('ns0:VirtualDeviceConfigSpec')
controller_spec.operation = 'add'
controller_spec.device = controller_device
disk_device = cf.create('ns0:VirtualDisk')
# for very small disks allocate at least 1KB
disk_device.capacityInKB = max(1, int(size_kb))
disk_device.key = -101
disk_device.unitNumber = 0
disk_device.controllerKey = -100
disk_device_bkng = cf.create('ns0:VirtualDiskFlatVer2BackingInfo')
if disk_type == constants.DISK_TYPE_EAGER_ZEROED_THICK:
disk_device_bkng.eagerlyScrub = True
elif disk_type == constants.DISK_TYPE_THIN:
disk_device_bkng.thinProvisioned = True
disk_device_bkng.fileName = '[%s]' % ds_name
disk_device_bkng.diskMode = 'persistent'
disk_device.backing = disk_device_bkng
disk_spec = cf.create('ns0:VirtualDeviceConfigSpec')
disk_spec.operation = 'add'
disk_spec.fileOperation = 'create'
disk_spec.device = disk_device
vm_file_info = cf.create('ns0:VirtualMachineFileInfo')
vm_file_info.vmPathName = '[%s]' % ds_name
create_spec = cf.create('ns0:VirtualMachineConfigSpec')
create_spec.name = name
create_spec.guestId = 'otherGuest'
create_spec.numCPUs = 1
create_spec.memoryMB = 128
create_spec.deviceChange = [controller_spec, disk_spec]
create_spec.files = vm_file_info
return create_spec
def _build_import_spec_for_import_vapp(session, vm_name, datastore_name):
vm_create_spec = _build_shadow_vm_config_spec(
session, vm_name, 0, constants.DISK_TYPE_THIN, datastore_name)
client_factory = session.vim.client.factory
vm_import_spec = client_factory.create('ns0:VirtualMachineImportSpec')
vm_import_spec.configSpec = vm_create_spec
return vm_import_spec
def fetch_image_stream_optimized(context, instance, session, vm_name,
ds_name, vm_folder_ref, res_pool_ref):
"""Fetch image from Glance to ESX datastore."""
image_ref = instance.image_ref
LOG.debug("Downloading image file data %(image_ref)s to the ESX "
"as VM named '%(vm_name)s'",
{'image_ref': image_ref, 'vm_name': vm_name},
instance=instance)
metadata = IMAGE_API.get(context, image_ref)
file_size = int(metadata['size'])
vm_import_spec = _build_import_spec_for_import_vapp(
session, vm_name, ds_name)
read_iter = IMAGE_API.download(context, image_ref)
read_handle = rw_handles.ImageReadHandle(read_iter)
write_handle = rw_handles.VmdkWriteHandle(session,
session._host,
session._port,
res_pool_ref,
vm_folder_ref,
vm_import_spec,
file_size)
start_transfer(context,
read_handle,
file_size,
write_file_handle=write_handle)
imported_vm_ref = write_handle.get_imported_vm()
LOG.info(_LI("Downloaded image file data %(image_ref)s"),
{'image_ref': instance.image_ref}, instance=instance)
session._call_method(session.vim, "UnregisterVM", imported_vm_ref)
LOG.info(_LI("The imported VM was unregistered"), instance=instance)
def get_vmdk_name_from_ovf(xmlstr):
"""Parse the OVA descriptor to extract the vmdk name."""
ovf = etree.fromstring(xmlstr)
nsovf = "{%s}" % ovf.nsmap["ovf"]
disk = ovf.find("./%sDiskSection/%sDisk" % (nsovf, nsovf))
file_id = disk.get("%sfileRef" % nsovf)
file = ovf.find('./%sReferences/%sFile[@%sid="%s"]' % (nsovf, nsovf,
nsovf, file_id))
vmdk_name = file.get("%shref" % nsovf)
return vmdk_name
def fetch_image_ova(context, instance, session, vm_name, ds_name,
vm_folder_ref, res_pool_ref):
"""Download the OVA image from the glance image server to the
Nova compute node.
"""
image_ref = instance.image_ref
LOG.debug("Downloading OVA image file %(image_ref)s to the ESX "
"as VM named '%(vm_name)s'",
{'image_ref': image_ref, 'vm_name': vm_name},
instance=instance)
metadata = IMAGE_API.get(context, image_ref)
file_size = int(metadata['size'])
vm_import_spec = _build_import_spec_for_import_vapp(
session, vm_name, ds_name)
read_iter = IMAGE_API.download(context, image_ref)
ova_fd, ova_path = tempfile.mkstemp()
try:
# NOTE(arnaud): Look to eliminate first writing OVA to file system
with os.fdopen(ova_fd, 'w') as fp:
for chunk in read_iter:
fp.write(chunk)
with tarfile.open(ova_path, mode="r") as tar:
vmdk_name = None
for tar_info in tar:
if tar_info and tar_info.name.endswith(".ovf"):
extracted = tar.extractfile(tar_info.name)
xmlstr = extracted.read()
vmdk_name = get_vmdk_name_from_ovf(xmlstr)
elif vmdk_name and tar_info.name.startswith(vmdk_name):
# Actual file name is <vmdk_name>.XXXXXXX
extracted = tar.extractfile(tar_info.name)
write_handle = rw_handles.VmdkWriteHandle(
session,
session._host,
session._port,
res_pool_ref,
vm_folder_ref,
vm_import_spec,
file_size)
start_transfer(context,
extracted,
file_size,
write_file_handle=write_handle)
extracted.close()
LOG.info(_LI("Downloaded OVA image file %(image_ref)s"),
{'image_ref': instance.image_ref}, instance=instance)
imported_vm_ref = write_handle.get_imported_vm()
session._call_method(session.vim, "UnregisterVM",
imported_vm_ref)
LOG.info(_LI("The imported VM was unregistered"),
instance=instance)
return
raise exception.ImageUnacceptable(
reason=_("Extracting vmdk from OVA failed."),
image_id=image_ref)
finally:
os.unlink(ova_path)
def upload_image_stream_optimized(context, image_id, instance, session,
vm, vmdk_size):
"""Upload the snapshotted vm disk file to Glance image server."""
LOG.debug("Uploading image %s", image_id, instance=instance)
metadata = IMAGE_API.get(context, image_id)
read_handle = rw_handles.VmdkReadHandle(session,
session._host,
session._port,
vm,
None,
vmdk_size)
# Set the image properties. It is important to set the 'size' to 0.
# Otherwise, the image service client will use the VM's disk capacity
# which will not be the image size after upload, since it is converted
# to a stream-optimized sparse disk.
image_metadata = {'disk_format': 'vmdk',
'is_public': metadata['is_public'],
'name': metadata['name'],
'status': 'active',
'container_format': 'bare',
'size': 0,
'properties': {'vmware_image_version': 1,
'vmware_disktype': 'streamOptimized',
'owner_id': instance.project_id}}
# Passing 0 as the file size since data size to be transferred cannot be
# predetermined.
start_transfer(context,
read_handle,
0,
image_id=image_id,
image_meta=image_metadata)
LOG.debug("Uploaded image %s to the Glance image server", image_id,
instance=instance)
| apache-2.0 |
Esri/community-addresses-python | source/arcrest/ags/featureservice.py | 2 | 11788 | from base import BaseAGSServer
import layer
from filters import LayerDefinitionFilter, GeometryFilter, TimeFilter
########################################################################
class FeatureService(BaseAGSServer):
""" contains information about a feature service """
_url = None
_currentVersion = None
_serviceDescription = None
_hasVersionedData = None
_supportsDisconnectedEditing = None
_hasStaticData = None
_maxRecordCount = None
_supportedQueryFormats = None
_capabilities = None
_description = None
_copyrightText = None
_spatialReference = None
_initialExtent = None
_fullExtent = None
_allowGeometryUpdates = None
_units = None
_syncEnabled = None
_syncCapabilities = None
_editorTrackingInfo = None
_documentInfo = None
_layers = None
_tables = None
_enableZDefaults = None
_zDefault = None
_proxy_url = None
_proxy_port = None
#----------------------------------------------------------------------
def __init__(self, url, token_url=None, username=None, password=None,
initialize=False, proxy_url=None, proxy_port=None):
"""Constructor"""
self._proxy_url = proxy_url
self._proxy_port = proxy_port
self._url = url
self._token_url = token_url
if not username is None and \
not password is None and \
not token_url is None:
self._username = username
self._password = password
self._token_url = token_url
if not username is None and \
not password is None and \
not username is "" and \
not password is "":
if not token_url is None:
res = self.generate_token(tokenURL=token_url,
proxy_port=proxy_port,
proxy_url=proxy_url)
else:
res = self.generate_token(proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
if res is None:
print "Token was not generated"
elif 'error' in res:
print res
else:
self._token = res[0]
if initialize:
self.__init()
#----------------------------------------------------------------------
def __init(self):
""" loads the data into the class """
if self._token is None:
param_dict = {"f": "json"}
else:
param_dict = {"f": "json",
"token" : self._token
}
json_dict = self._do_get(self._url, param_dict)
attributes = [attr for attr in dir(self)
if not attr.startswith('__') and \
not attr.startswith('_')]
for k,v in json_dict.iteritems():
if k in attributes:
setattr(self, "_"+ k, v)
else:
print k, " - attribute not implmented for Feature Service."
#----------------------------------------------------------------------
@property
def maxRecordCount(self):
"""returns the max record count"""
if self._maxRecordCount is None:
self.__init()
return self._maxRecordCount
#----------------------------------------------------------------------
@property
def supportedQueryFormats(self):
""""""
if self._supportedQueryFormats is None:
self.__init()
return self._supportedQueryFormats
#----------------------------------------------------------------------
@property
def capabilities(self):
""" returns a list of capabilities """
if self._capabilities is None:
self.__init()
return self._capabilities
#----------------------------------------------------------------------
@property
def description(self):
""" returns the service description """
if self._description is None:
self.__init()
return self._description
#----------------------------------------------------------------------
@property
def copyrightText(self):
""" returns the copyright text """
if self._copyrightText is None:
self.__init()
return self._copyrightText
#----------------------------------------------------------------------
@property
def spatialReference(self):
""" returns the spatial reference """
if self._spatialReference is None:
self.__init()
return self._spatialReference
#----------------------------------------------------------------------
@property
def initialExtent(self):
""" returns the initial extent of the feature service """
if self._initialExtent is None:
self.__init()
return self._initialExtent
#----------------------------------------------------------------------
@property
def fullExtent(self):
""" returns the full extent of the feature service """
if self._fullExtent is None:
self.__init()
return self._fullExtent
#----------------------------------------------------------------------
@property
def allowGeometryUpdates(self):
""" informs the user if the data allows geometry updates """
if self._allowGeometryUpdates is None:
self.__init()
return self._allowGeometryUpdates
#----------------------------------------------------------------------
@property
def units(self):
""" returns the measurement unit """
if self._units is None:
self.__init()
return self._units
#----------------------------------------------------------------------
@property
def syncEnabled(self):
""" informs the user if sync of data can be performed """
if self._syncEnabled is None:
self.__init()
return self._syncEnabled
#----------------------------------------------------------------------
@property
def syncCapabilities(self):
""" type of sync that can be performed """
if self._syncCapabilities is None:
self.__init()
return self._syncCapabilities
#----------------------------------------------------------------------
@property
def editorTrackingInfo(self):
""""""
if self._editorTrackingInfo is None:
self.__init()
return self._editorTrackingInfo
#----------------------------------------------------------------------
@property
def documentInfo(self):
""""""
if self._documentInfo is None:
self.__init()
return self._documentInfo
#----------------------------------------------------------------------
@property
def layers(self):
""" gets the layers for the feature service """
if self._layers is None:
self.__init()
self._getLayers()
return self._layers
def _getLayers(self):
""" gets layers for the featuer service """
if self._token is None:
param_dict = {"f": "json"}
else:
param_dict = {"f": "json",
"token" : self._token
}
json_dict = self._do_get(self._url, param_dict)
self._layers = []
if json_dict.has_key("layers"):
for l in json_dict["layers"]:
self._layers.append(
layer.FeatureLayer(url=self._url + "/%s" % l['id'],
username=self._username,
password=self._password,
token_url=self._token_url)
)
#----------------------------------------------------------------------
@property
def tables(self):
""""""
if self._tables is None:
self.__init()
return self._tables
#----------------------------------------------------------------------
@property
def enableZDefaults(self):
""""""
if self._enableZDefaults is None:
self.__init()
return self._enableZDefaults
#----------------------------------------------------------------------
@property
def zDefault(self):
""""""
if self._zDefault is None:
self.__init()
return self._zDefault
#----------------------------------------------------------------------
@property
def hasStaticData(self):
""""""
if self._hasStaticData is None:
self.__init()
return self._hasStaticData
#----------------------------------------------------------------------
@property
def currentVersion(self):
""" returns the map service current version """
if self._currentVersion is None:
self.__init()
return self._currentVersion
#----------------------------------------------------------------------
@property
def serviceDescription(self):
""" returns the serviceDescription of the map service """
if self._serviceDescription is None:
self.__init()
return self._serviceDescription
#----------------------------------------------------------------------
@property
def hasVersionedData(self):
""" returns boolean for versioned data """
if self._hasVersionedData is None:
self.__init()
return self._hasVersionedData
#----------------------------------------------------------------------
@property
def supportsDisconnectedEditing(self):
""" returns boolean is disconnecting editted supported """
if self._supportsDisconnectedEditing is None:
self.__init()
return self._supportsDisconnectedEditing
#----------------------------------------------------------------------
def query(self,
layerDefsFilter=None,
geometryFilter=None,
timeFilter=None,
returnGeometry=True,
returnIdsOnly=False,
returnCountOnly=False,
returnZ=False,
returnM=False,
outSR=None
):
"""
The Query operation is performed on a feature service resource
"""
qurl = self._url + "/query"
params = {"f": "json",
"returnGeometry": returnGeometry,
"returnIdsOnly": returnIdsOnly,
"returnCountOnly": returnCountOnly,
"returnZ": returnZ,
"returnM" : returnM}
if not self._token is None:
params["token"] = self._token
if not layerDefsFilter is None and \
isinstance(layerDefsFilter, LayerDefinitionFilter):
params['layerDefs'] = layerDefsFilter.filter
if not geometryFilter is None and \
isinstance(geometryFilter, GeometryFilter):
gf = geometryFilter.filter
params['geometryType'] = gf['geometryType']
params['spatialRel'] = gf['spatialRel']
params['geometry'] = gf['geometry']
params['inSR'] = gf['inSR']
if not outSR is None and \
isinstance(outSR, common.SpatialReference):
params['outSR'] = outSR.asDictionary
if not timeFilter is None and \
isinstance(timeFilter, TimeFilter):
params['time'] = timeFilter.filter
return self._do_get(url=qurl, param_dict=params) | apache-2.0 |
sebrandon1/neutron | neutron/agent/linux/daemon.py | 10 | 8106 | # Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import atexit
import fcntl
import grp
import logging as std_logging
from logging import handlers
import os
import pwd
import signal
import sys
from oslo_log import log as logging
from neutron._i18n import _, _LE, _LI
from neutron.common import exceptions
LOG = logging.getLogger(__name__)
DEVNULL = object()
# Note: We can't use sys.std*.fileno() here. sys.std* objects may be
# random file-like objects that may not match the true system std* fds
# - and indeed may not even have a file descriptor at all (eg: test
# fixtures that monkey patch fixtures.StringStream onto sys.stdout).
# Below we always want the _real_ well-known 0,1,2 Unix fds during
# os.dup2 manipulation.
STDIN_FILENO = 0
STDOUT_FILENO = 1
STDERR_FILENO = 2
def setuid(user_id_or_name):
try:
new_uid = int(user_id_or_name)
except (TypeError, ValueError):
new_uid = pwd.getpwnam(user_id_or_name).pw_uid
if new_uid != 0:
try:
os.setuid(new_uid)
except OSError:
msg = _('Failed to set uid %s') % new_uid
LOG.critical(msg)
raise exceptions.FailToDropPrivilegesExit(msg)
def setgid(group_id_or_name):
try:
new_gid = int(group_id_or_name)
except (TypeError, ValueError):
new_gid = grp.getgrnam(group_id_or_name).gr_gid
if new_gid != 0:
try:
os.setgid(new_gid)
except OSError:
msg = _('Failed to set gid %s') % new_gid
LOG.critical(msg)
raise exceptions.FailToDropPrivilegesExit(msg)
def unwatch_log():
"""Replace WatchedFileHandler handlers by FileHandler ones.
Neutron logging uses WatchedFileHandler handlers but they do not
support privileges drop, this method replaces them by FileHandler
handlers supporting privileges drop.
"""
log_root = logging.getLogger(None).logger
to_replace = [h for h in log_root.handlers
if isinstance(h, handlers.WatchedFileHandler)]
for handler in to_replace:
# NOTE(cbrandily): we use default delay(=False) to ensure the log file
# is opened before privileges drop.
new_handler = std_logging.FileHandler(handler.baseFilename,
mode=handler.mode,
encoding=handler.encoding)
log_root.removeHandler(handler)
log_root.addHandler(new_handler)
def drop_privileges(user=None, group=None):
"""Drop privileges to user/group privileges."""
if user is None and group is None:
return
if os.geteuid() != 0:
msg = _('Root permissions are required to drop privileges.')
LOG.critical(msg)
raise exceptions.FailToDropPrivilegesExit(msg)
if group is not None:
try:
os.setgroups([])
except OSError:
msg = _('Failed to remove supplemental groups')
LOG.critical(msg)
raise exceptions.FailToDropPrivilegesExit(msg)
setgid(group)
if user is not None:
setuid(user)
LOG.info(_LI("Process runs with uid/gid: %(uid)s/%(gid)s"),
{'uid': os.getuid(), 'gid': os.getgid()})
class Pidfile(object):
def __init__(self, pidfile, procname, uuid=None):
self.pidfile = pidfile
self.procname = procname
self.uuid = uuid
try:
self.fd = os.open(pidfile, os.O_CREAT | os.O_RDWR)
fcntl.flock(self.fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
LOG.exception(_LE("Error while handling pidfile: %s"), pidfile)
sys.exit(1)
def __str__(self):
return self.pidfile
def unlock(self):
fcntl.flock(self.fd, fcntl.LOCK_UN)
def write(self, pid):
os.ftruncate(self.fd, 0)
os.write(self.fd, "%d" % pid)
os.fsync(self.fd)
def read(self):
try:
pid = int(os.read(self.fd, 128))
os.lseek(self.fd, 0, os.SEEK_SET)
return pid
except ValueError:
return
def is_running(self):
pid = self.read()
if not pid:
return False
cmdline = '/proc/%s/cmdline' % pid
try:
with open(cmdline, "r") as f:
exec_out = f.readline()
return self.procname in exec_out and (not self.uuid or
self.uuid in exec_out)
except IOError:
return False
class Daemon(object):
"""A generic daemon class.
Usage: subclass the Daemon class and override the run() method
"""
def __init__(self, pidfile, stdin=DEVNULL, stdout=DEVNULL,
stderr=DEVNULL, procname='python', uuid=None,
user=None, group=None, watch_log=True):
"""Note: pidfile may be None."""
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.procname = procname
self.pidfile = (Pidfile(pidfile, procname, uuid)
if pidfile is not None else None)
self.user = user
self.group = group
self.watch_log = watch_log
def _fork(self):
try:
pid = os.fork()
if pid > 0:
os._exit(0)
except OSError:
LOG.exception(_LE('Fork failed'))
sys.exit(1)
def daemonize(self):
"""Daemonize process by doing Stevens double fork."""
# flush any buffered data before fork/dup2.
if self.stdout is not DEVNULL:
self.stdout.flush()
if self.stderr is not DEVNULL:
self.stderr.flush()
# sys.std* may not match STD{OUT,ERR}_FILENO. Tough.
for f in (sys.stdout, sys.stderr):
f.flush()
# fork first time
self._fork()
# decouple from parent environment
os.chdir("/")
os.setsid()
os.umask(0)
# fork second time
self._fork()
# redirect standard file descriptors
with open(os.devnull, 'w+') as devnull:
stdin = devnull if self.stdin is DEVNULL else self.stdin
stdout = devnull if self.stdout is DEVNULL else self.stdout
stderr = devnull if self.stderr is DEVNULL else self.stderr
os.dup2(stdin.fileno(), STDIN_FILENO)
os.dup2(stdout.fileno(), STDOUT_FILENO)
os.dup2(stderr.fileno(), STDERR_FILENO)
if self.pidfile is not None:
# write pidfile
atexit.register(self.delete_pid)
signal.signal(signal.SIGTERM, self.handle_sigterm)
self.pidfile.write(os.getpid())
def delete_pid(self):
if self.pidfile is not None:
os.remove(str(self.pidfile))
def handle_sigterm(self, signum, frame):
sys.exit(0)
def start(self):
"""Start the daemon."""
if self.pidfile is not None and self.pidfile.is_running():
self.pidfile.unlock()
LOG.error(_LE('Pidfile %s already exist. Daemon already '
'running?'), self.pidfile)
sys.exit(1)
# Start the daemon
self.daemonize()
self.run()
def run(self):
"""Override this method and call super().run when subclassing Daemon.
start() will call this method after the process has daemonized.
"""
if not self.watch_log:
unwatch_log()
drop_privileges(self.user, self.group)
| apache-2.0 |
MihaiMoldovanu/ansible | lib/ansible/modules/network/avi/avi_backupconfiguration.py | 27 | 4642 | #!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_backupconfiguration
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of BackupConfiguration Avi RESTful Object
description:
- This module is used to configure BackupConfiguration object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
backup_file_prefix:
description:
- Prefix of the exported configuration file.
- Field introduced in 17.1.1.
backup_passphrase:
description:
- Passphrase of backup configuration.
maximum_backups_stored:
description:
- Rotate the backup files based on this count.
- Allowed values are 1-20.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.
name:
description:
- Name of backup configuration.
required: true
remote_directory:
description:
- Directory at remote destination with write permission for ssh user.
remote_hostname:
description:
- Remote destination.
save_local:
description:
- Local backup.
ssh_user_ref:
description:
- Access credentials for remote destination.
- It is a reference to an object of type cloudconnectoruser.
tenant_ref:
description:
- It is a reference to an object of type tenant.
upload_to_remote_host:
description:
- Remote backup.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create BackupConfiguration object
avi_backupconfiguration:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_backupconfiguration
"""
RETURN = '''
obj:
description: BackupConfiguration (api/backupconfiguration) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
backup_file_prefix=dict(type='str',),
backup_passphrase=dict(type='str', no_log=True,),
maximum_backups_stored=dict(type='int',),
name=dict(type='str', required=True),
remote_directory=dict(type='str',),
remote_hostname=dict(type='str',),
save_local=dict(type='bool',),
ssh_user_ref=dict(type='str',),
tenant_ref=dict(type='str',),
upload_to_remote_host=dict(type='bool',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'backupconfiguration',
set(['backup_passphrase']))
if __name__ == '__main__':
main()
| gpl-3.0 |
Nowheresly/odoo | addons/hr_gamification/wizard/grant_badge.py | 195 | 2525 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_grant_badge_wizard(osv.TransientModel):
_name = 'gamification.badge.user.wizard'
_inherit = ['gamification.badge.user.wizard']
_columns = {
'employee_id': fields.many2one("hr.employee", string='Employee', required=True),
'user_id': fields.related("employee_id", "user_id",
type="many2one", relation="res.users",
store=True, string='User')
}
def action_grant_badge(self, cr, uid, ids, context=None):
"""Wizard action for sending a badge to a chosen employee"""
if context is None:
context = {}
badge_user_obj = self.pool.get('gamification.badge.user')
for wiz in self.browse(cr, uid, ids, context=context):
if not wiz.user_id:
raise osv.except_osv(_('Warning!'), _('You can send badges only to employees linked to a user.'))
if uid == wiz.user_id.id:
raise osv.except_osv(_('Warning!'), _('You can not send a badge to yourself'))
values = {
'user_id': wiz.user_id.id,
'sender_id': uid,
'badge_id': wiz.badge_id.id,
'employee_id': wiz.employee_id.id,
'comment': wiz.comment,
}
badge_user = badge_user_obj.create(cr, uid, values, context=context)
result = badge_user_obj._send_badge(cr, uid, [badge_user], context=context)
return result
| agpl-3.0 |
richardliaw/ray | python/ray/tests/test_runtime_context.py | 1 | 2279 | import ray
import os
import signal
import time
import sys
def test_was_current_actor_reconstructed(shutdown_only):
ray.init()
@ray.remote(max_restarts=10)
class A(object):
def __init__(self):
self._was_reconstructed = ray.get_runtime_context(
).was_current_actor_reconstructed
def get_was_reconstructed(self):
return self._was_reconstructed
def update_was_reconstructed(self):
return ray.get_runtime_context().was_current_actor_reconstructed
def get_pid(self):
return os.getpid()
a = A.remote()
# `was_reconstructed` should be False when it's called in actor.
assert ray.get(a.get_was_reconstructed.remote()) is False
# `was_reconstructed` should be False when it's called in a remote method
# and the actor never fails.
assert ray.get(a.update_was_reconstructed.remote()) is False
pid = ray.get(a.get_pid.remote())
os.kill(pid, signal.SIGKILL)
time.sleep(2)
# These 2 methods should be return True because
# this actor failed and restored.
assert ray.get(a.get_was_reconstructed.remote()) is True
assert ray.get(a.update_was_reconstructed.remote()) is True
@ray.remote(max_restarts=10)
class A(object):
def current_job_id(self):
return ray.get_runtime_context().job_id
def current_actor_id(self):
return ray.get_runtime_context().actor_id
@ray.remote
def f():
assert ray.get_runtime_context().actor_id is None
assert ray.get_runtime_context().task_id is not None
assert ray.get_runtime_context().node_id is not None
assert ray.get_runtime_context().job_id is not None
context = ray.get_runtime_context().get()
assert "actor_id" not in context
assert context["task_id"] == ray.get_runtime_context().task_id
assert context["node_id"] == ray.get_runtime_context().node_id
assert context["job_id"] == ray.get_runtime_context().job_id
a = A.remote()
assert ray.get(a.current_job_id.remote()) is not None
assert ray.get(a.current_actor_id.remote()) is not None
ray.get(f.remote())
if __name__ == "__main__":
import pytest
sys.exit(pytest.main(["-v", __file__]))
| apache-2.0 |
awahlig/skype4py | Skype4Py/lang/lt.py | 69 | 6414 | apiAttachAvailable = u'API Available'
apiAttachNotAvailable = u'Not Available'
apiAttachPendingAuthorization = u'Pending Authorization'
apiAttachRefused = u'Refused'
apiAttachSuccess = u'Success'
apiAttachUnknown = u'Unknown'
budDeletedFriend = u'Deleted From Friendlist'
budFriend = u'Friend'
budNeverBeenFriend = u'Never Been In Friendlist'
budPendingAuthorization = u'Pending Authorization'
budUnknown = u'Unknown'
cfrBlockedByRecipient = u'Call blocked by recipient'
cfrMiscError = u'Misc error'
cfrNoCommonCodec = u'No common codec found'
cfrNoProxyFound = u'No proxy found'
cfrNotAuthorizedByRecipient = u'Current user not authorized by recipient'
cfrRecipientNotFriend = u'Recipient not a friend'
cfrRemoteDeviceError = u'Problem with remote sound device'
cfrSessionTerminated = u'Session terminated'
cfrSoundIOError = u'Sound I/O error'
cfrSoundRecordingError = u'Sound recording error'
cfrUnknown = u'Unknown'
cfrUserDoesNotExist = u'User/phone number does not exist'
cfrUserIsOffline = u'User is offline'
chsAllCalls = u'Legacy Dialog'
chsDialog = u'Dialog'
chsIncomingCalls = u'Multi Need Accept'
chsLegacyDialog = u'Legacy Dialog'
chsMissedCalls = u'Dialog'
chsMultiNeedAccept = u'Multi Need Accept'
chsMultiSubscribed = u'Multi Subscribed'
chsOutgoingCalls = u'Multi Subscribed'
chsUnknown = u'Unknown'
chsUnsubscribed = u'Unsubscribed'
clsBusy = u'Busy'
clsCancelled = u'Cancelled'
clsEarlyMedia = u'Playing Early Media'
clsFailed = u'Sorry, call failed!'
clsFinished = u'Finished'
clsInProgress = u'Call in Progress'
clsLocalHold = u'On Local Hold'
clsMissed = u'Missed'
clsOnHold = u'On Hold'
clsRefused = u'Refused'
clsRemoteHold = u'On Remote Hold'
clsRinging = u'Calling'
clsRouting = u'Routing'
clsTransferred = u'Unknown'
clsTransferring = u'Unknown'
clsUnknown = u'Unknown'
clsUnplaced = u'Never placed'
clsVoicemailBufferingGreeting = u'Buffering Greeting'
clsVoicemailCancelled = u'Voicemail Has Been Cancelled'
clsVoicemailFailed = u'Voicemail Failed'
clsVoicemailPlayingGreeting = u'Playing Greeting'
clsVoicemailRecording = u'Recording'
clsVoicemailSent = u'Voicemail Has Been Sent'
clsVoicemailUploading = u'Uploading Voicemail'
cltIncomingP2P = u'Incoming Peer-to-Peer Call'
cltIncomingPSTN = u'Incoming Telephone Call'
cltOutgoingP2P = u'Outgoing Peer-to-Peer Call'
cltOutgoingPSTN = u'Outgoing Telephone Call'
cltUnknown = u'Unknown'
cmeAddedMembers = u'Added Members'
cmeCreatedChatWith = u'Created Chat With'
cmeEmoted = u'Unknown'
cmeLeft = u'Left'
cmeSaid = u'Said'
cmeSawMembers = u'Saw Members'
cmeSetTopic = u'Set Topic'
cmeUnknown = u'Unknown'
cmsRead = u'Read'
cmsReceived = u'Received'
cmsSending = u'Sending'
cmsSent = u'Sent'
cmsUnknown = u'Unknown'
conConnecting = u'Connecting'
conOffline = u'Offline'
conOnline = u'Online'
conPausing = u'Pausing'
conUnknown = u'Unknown'
cusAway = u'Away'
cusDoNotDisturb = u'Do Not Disturb'
cusInvisible = u'Invisible'
cusLoggedOut = u'Logged Out'
cusNotAvailable = u'Not Available'
cusOffline = u'Offline'
cusOnline = u'Online'
cusSkypeMe = u'Skype Me'
cusUnknown = u'Unknown'
cvsBothEnabled = u'Video Send and Receive'
cvsNone = u'No Video'
cvsReceiveEnabled = u'Video Receive'
cvsSendEnabled = u'Video Send'
cvsUnknown = u''
grpAllFriends = u'All Friends'
grpAllUsers = u'All Users'
grpCustomGroup = u'Custom'
grpOnlineFriends = u'Online Friends'
grpPendingAuthorizationFriends = u'Pending Authorization'
grpProposedSharedGroup = u'Proposed Shared Group'
grpRecentlyContactedUsers = u'Recently Contacted Users'
grpSharedGroup = u'Shared Group'
grpSkypeFriends = u'Skype Friends'
grpSkypeOutFriends = u'SkypeOut Friends'
grpUngroupedFriends = u'Ungrouped Friends'
grpUnknown = u'Unknown'
grpUsersAuthorizedByMe = u'Authorized By Me'
grpUsersBlockedByMe = u'Blocked By Me'
grpUsersWaitingMyAuthorization = u'Waiting My Authorization'
leaAddDeclined = u'Add Declined'
leaAddedNotAuthorized = u'Added Must Be Authorized'
leaAdderNotFriend = u'Adder Must Be Friend'
leaUnknown = u'Unknown'
leaUnsubscribe = u'Unsubscribed'
leaUserIncapable = u'User Incapable'
leaUserNotFound = u'User Not Found'
olsAway = u'Away'
olsDoNotDisturb = u'Do Not Disturb'
olsNotAvailable = u'Not Available'
olsOffline = u'Offline'
olsOnline = u'Online'
olsSkypeMe = u'SkypeMe'
olsSkypeOut = u'SkypeOut'
olsUnknown = u'Unknown'
smsMessageStatusComposing = u'Composing'
smsMessageStatusDelivered = u'Delivered'
smsMessageStatusFailed = u'Failed'
smsMessageStatusRead = u'Read'
smsMessageStatusReceived = u'Received'
smsMessageStatusSendingToServer = u'Sending to Server'
smsMessageStatusSentToServer = u'Sent to Server'
smsMessageStatusSomeTargetsFailed = u'Some Targets Failed'
smsMessageStatusUnknown = u'Unknown'
smsMessageTypeCCRequest = u'Confirmation Code Request'
smsMessageTypeCCSubmit = u'Confirmation Code Submit'
smsMessageTypeIncoming = u'Incoming'
smsMessageTypeOutgoing = u'Outgoing'
smsMessageTypeUnknown = u'Unknown'
smsTargetStatusAcceptable = u'Acceptable'
smsTargetStatusAnalyzing = u'Analyzing'
smsTargetStatusDeliveryFailed = u'Delivery Failed'
smsTargetStatusDeliveryPending = u'Delivery Pending'
smsTargetStatusDeliverySuccessful = u'Delivery Successful'
smsTargetStatusNotRoutable = u'Not Routable'
smsTargetStatusUndefined = u'Undefined'
smsTargetStatusUnknown = u'Unknown'
usexFemale = u'Female'
usexMale = u'Male'
usexUnknown = u'Unknown'
vmrConnectError = u'Connect Error'
vmrFileReadError = u'File Read Error'
vmrFileWriteError = u'File Write Error'
vmrMiscError = u'Misc Error'
vmrNoError = u'No Error'
vmrNoPrivilege = u'No Voicemail Privilege'
vmrNoVoicemail = u'No Such Voicemail'
vmrPlaybackError = u'Playback Error'
vmrRecordingError = u'Recording Error'
vmrUnknown = u'Unknown'
vmsBlank = u'Blank'
vmsBuffering = u'Buffering'
vmsDeleting = u'Deleting'
vmsDownloading = u'Downloading'
vmsFailed = u'Failed'
vmsNotDownloaded = u'Not Downloaded'
vmsPlayed = u'Played'
vmsPlaying = u'Playing'
vmsRecorded = u'Recorded'
vmsRecording = u'Recording Voicemail'
vmsUnknown = u'Unknown'
vmsUnplayed = u'Unplayed'
vmsUploaded = u'Uploaded'
vmsUploading = u'Uploading'
vmtCustomGreeting = u'Custom Greeting'
vmtDefaultGreeting = u'Default Greeting'
vmtIncoming = u'Incoming'
vmtOutgoing = u'Outgoing'
vmtUnknown = u'Unknown'
vssAvailable = u'Available'
vssNotAvailable = u'Not Available'
vssPaused = u'Paused'
vssRejected = u'Rejected'
vssRunning = u'Running'
vssStarting = u'Starting'
vssStopping = u'Stopping'
vssUnknown = u'Unknown'
| bsd-3-clause |
deepakselvaraj/federated-horizon | openstack_dashboard/dashboards/admin/info/tabs.py | 1 | 4164 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from horizon import tabs
from openstack_dashboard.api import base
from openstack_dashboard.api import keystone
from openstack_dashboard.api import nova
from openstack_dashboard.usage import quotas
from openstack_dashboard.dashboards.admin.info import tables
class DefaultQuotasTab(tabs.TableTab):
table_classes = (tables.QuotasTable,)
name = _("Default Quotas")
slug = "quotas"
template_name = ("horizon/common/_detail_table.html")
def get_quotas_data(self):
request = self.tab_group.request
try:
quota_set = quotas.get_default_quota_data(request)
data = quota_set.items
# There is no API to get the default system quotas in
# Neutron (cf. LP#1204956). Remove the network-related
# quotas from the list for now to avoid confusion
if base.is_service_enabled(self.request, 'network'):
data = [quota for quota in data
if quota.name not in ['floating_ips', 'fixed_ips']]
except Exception:
data = []
exceptions.handle(self.request, _('Unable to get quota info.'))
return data
class ServicesTab(tabs.TableTab):
table_classes = (tables.ServicesTable,)
name = _("Services")
slug = "services"
template_name = ("horizon/common/_detail_table.html")
def get_services_data(self):
request = self.tab_group.request
services = []
for i, service in enumerate(request.user.service_catalog):
service['id'] = i
services.append(
keystone.Service(service, request.user.services_region))
return services
class ZonesTab(tabs.TableTab):
table_classes = (tables.ZonesTable,)
name = _("Availability Zones")
slug = "zones"
template_name = ("horizon/common/_detail_table.html")
def get_zones_data(self):
request = self.tab_group.request
zones = []
try:
zones = nova.availability_zone_list(request, detailed=True)
except Exception:
msg = _('Unable to retrieve availability zone data.')
exceptions.handle(request, msg)
return zones
class HostAggregatesTab(tabs.TableTab):
table_classes = (tables.AggregatesTable,)
name = _("Host Aggregates")
slug = "aggregates"
template_name = ("horizon/common/_detail_table.html")
def get_aggregates_data(self):
aggregates = []
try:
aggregates = nova.aggregate_list(self.tab_group.request)
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve host aggregates list.'))
return aggregates
class NovaServicesTab(tabs.TableTab):
table_classes = (tables.NovaServicesTable,)
name = _("Compute Services")
slug = "nova_services"
template_name = ("horizon/common/_detail_table.html")
def get_nova_services_data(self):
try:
services = nova.service_list(self.tab_group.request)
except Exception:
services = []
msg = _('Unable to get nova services list.')
exceptions.check_message(["Connection", "refused"], msg)
raise
return services
class SystemInfoTabs(tabs.TabGroup):
slug = "system_info"
tabs = (ServicesTab, NovaServicesTab, ZonesTab, HostAggregatesTab,
DefaultQuotasTab)
sticky = True
| apache-2.0 |
khalibartan/pgmpy | pgmpy/extern/six.py | 172 | 30888 | # Copyright (c) 2010-2017 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Utilities for writing code that runs on Python 2 and 3"""
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.11.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("getoutput", "commands", "subprocess"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("splitvalue", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
try:
raise tp, value, tb
finally:
tb = None
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
try:
if from_value is None:
raise value
raise value from from_value
finally:
value = None
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
try:
raise value from from_value
finally:
value = None
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(type):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
@classmethod
def __prepare__(cls, name, this_bases):
return meta.__prepare__(name, bases)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| mit |
weilu/Hadoop-Resource-Aware-Scheduler | common/build/contrib/hod/hodlib/Common/hodsvc.py | 182 | 8438 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
# $Id:setup.py 5158 2007-04-09 00:14:35Z zim $
#
#------------------------------------------------------------------------------
import os, time, shutil, xmlrpclib, socket, pprint
from signal import *
from hodlib.Common.logger import hodLog, hodDummyLogger
from hodlib.Common.socketServers import hodXMLRPCServer
from hodlib.Common.util import local_fqdn
from hodlib.Common.xmlrpc import hodXRClient
class hodBaseService:
"""hodBaseService class - This class provides service registration, logging,
and configuration access methods. It also provides an XML-RPC server.
This class should be extended to create hod services. Methods beginning
with _xr_method will automatically be added to instances of this class.
"""
def __init__(self, name, config, xrtype='threaded'):
""" Initialization requires a name string and a config object of type
hodlib.Common.setup.options or hodlib.Common.setup.config."""
self.name = name
self.hostname = local_fqdn()
self._cfg = config
self._xrc = None
self.logs = {}
self._baseLogger = None
self._serviceID = os.getenv('PBS_JOBID')
self.__logDir = None
self.__svcrgy = None
self.__stop = False
self.__xrtype = xrtype
self._init_logging()
if name != 'serviceRegistry': self._init_signals()
self._init_xrc_server()
def __set_logging_level(self, level):
self.logs['main'].info("Setting log level to %s." % level)
for loggerName in self.loggers.keys():
self.logs['main'].set_logger_level(loggerName, level)
def __get_logging_level(self):
if self._cfg.has_key('stream'):
return self.loggers['main'].get_level('stream', 'main')
elif self._cfg.has_key('log-dir'):
return self.loggers['main'].get_level('file', 'main')
else:
return 0
def _xr_method_stop(self, *args):
"""XML-RPC method, calls stop() on ourselves."""
return self.stop()
def _xr_method_status(self, *args):
"""XML-RPC method, calls status() on ourselves."""
return self.status()
def _init_logging(self):
if self._cfg.has_key('debug'):
if self._cfg['debug'] > 0:
self._baseLogger = hodLog(self.name)
self.logs['main'] = self._baseLogger.add_logger('main')
if self._cfg.has_key('stream'):
if self._cfg['stream']:
self._baseLogger.add_stream(level=self._cfg['debug'],
addToLoggerNames=('main',))
if self._cfg.has_key('log-dir'):
if self._serviceID:
self.__logDir = os.path.join(self._cfg['log-dir'], "%s.%s" % (
self._cfg['userid'], self._serviceID))
else:
self.__logDir = os.path.join(self._cfg['log-dir'],
self._cfg['userid'])
if not os.path.exists(self.__logDir):
os.mkdir(self.__logDir)
self._baseLogger.add_file(logDirectory=self.__logDir,
level=self._cfg['debug'], addToLoggerNames=('main',))
if self._cfg.has_key('syslog-address'):
self._baseLogger.add_syslog(self._cfg['syslog-address'],
level=self._cfg['debug'], addToLoggerNames=('main',))
if not self.logs.has_key('main'):
self.logs['main'] = hodDummyLogger()
else:
self.logs['main'] = hodDummyLogger()
else:
self.logs['main'] = hodDummyLogger()
def _init_signals(self):
def sigStop(sigNum, handler):
self.sig_wrapper(sigNum, self.stop)
def toggleLevel():
currentLevel = self.__get_logging_level()
if currentLevel == 4:
self.__set_logging_level(1)
else:
self.__set_logging_level(currentLevel + 1)
def sigStop(sigNum, handler):
self._sig_wrapper(sigNum, self.stop)
def sigDebug(sigNum, handler):
self.sig_wrapper(sigNum, toggleLevel)
signal(SIGTERM, sigStop)
signal(SIGQUIT, sigStop)
signal(SIGINT, sigStop)
signal(SIGUSR2, sigDebug)
def _sig_wrapper(self, sigNum, handler, *args):
self.logs['main'].info("Caught signal %s." % sigNum)
if args:
handler(args)
else:
handler()
def _init_xrc_server(self):
host = None
ports = None
if self._cfg.has_key('xrs-address'):
(host, port) = (self._cfg['xrs-address'][0], self._cfg['xrs-address'][1])
ports = (port,)
elif self._cfg.has_key('xrs-port-range'):
host = ''
ports = self._cfg['xrs-port-range']
if host != None:
if self.__xrtype == 'threaded':
self._xrc = hodXMLRPCServer(host, ports)
elif self.__xrtype == 'twisted':
try:
from socketServers import twistedXMLRPCServer
self._xrc = twistedXMLRPCServer(host, ports, self.logs['main'])
except ImportError:
self.logs['main'].error("Twisted XML-RPC server not available, "
+ "falling back on threaded server.")
self._xrc = hodXMLRPCServer(host, ports)
for attr in dir(self):
if attr.startswith('_xr_method_'):
self._xrc.register_function(getattr(self, attr),
attr[11:])
self._xrc.register_introspection_functions()
def _register_service(self, port=None, installSignalHandlers=1):
if self.__svcrgy:
self.logs['main'].info(
"Registering service with service registery %s... " % self.__svcrgy)
svcrgy = hodXRClient(self.__svcrgy, None, None, 0, 0, installSignalHandlers)
if self._xrc and self._http:
svcrgy.registerService(self._cfg['userid'], self._serviceID,
self.hostname, self.name, 'hod', {
'xrs' : "http://%s:%s" % (
self._xrc.server_address[0],
self._xrc.server_address[1]),'http' :
"http://%s:%s" % (self._http.server_address[0],
self._http.server_address[1])})
elif self._xrc:
svcrgy.registerService(self._cfg['userid'], self._serviceID,
self.hostname, self.name, 'hod', {
'xrs' : "http://%s:%s" % (
self._xrc.server_address[0],
self._xrc.server_address[1]),})
elif self._http:
svcrgy.registerService(self._cfg['userid'], self._serviceID,
self.hostname, self.name, 'hod', {'http' :
"http://%s:%s" % (self._http.server_address[0],
self._http.server_address[1]),})
else:
svcrgy.registerService(self._cfg['userid'], self._serviceID,
self.hostname, name, 'hod', {} )
def start(self):
""" Start XML-RPC server and register service."""
self.logs['main'].info("Starting HOD service: %s ..." % self.name)
if self._xrc: self._xrc.serve_forever()
if self._cfg.has_key('register') and self._cfg['register']:
self._register_service()
def stop(self):
""" Stop XML-RPC server, unregister service and set stop flag. """
self.logs['main'].info("Stopping service...")
if self._xrc: self._xrc.stop()
self.__stop = True
return True
def status(self):
"""Returns true, should be overriden."""
return True
def wait(self):
"""Wait until stop method is called."""
while not self.__stop:
time.sleep(.1)
| apache-2.0 |
FlaPer87/qpid-proton | examples/python/reactor/unhandled.py | 4 | 1227 | #!/usr/bin/python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import time
from proton.reactor import Reactor
class Program:
# If an event occurs and its handler doesn't have an on_<event>
# method, the reactor will attempt to call the on_unhandled method
# if it exists. This can be useful not only for debugging, but for
# logging and for delegating/inheritance.
def on_unhandled(self, name, event):
print name, event
r = Reactor(Program())
r.run()
| apache-2.0 |
cloudsidekick/vcloudpy | vcloudpy/vcloudpy.py | 1 | 10080 | #########################################################################
# Copyright 2013 Cloud Sidekick
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#########################################################################
import urllib2
import httplib
import base64
import re
import time
try:
import xml.etree.cElementTree as ET
except (AttributeError, ImportError):
import xml.etree.ElementTree as ET
try:
ET.ElementTree.iterfind
except AttributeError as ex:
del(ET)
import etree.ElementTree as ET
def _xml_del_ns(xml):
"""A helper function that strips vcloud namespaces from xml text.
This is used because sometimes the vcloud namespace is used, other
times it is not. Befuddling."""
try:
p = re.compile("xmlns=*[\"\"][^\"\"]*[\"\"]")
allmatches = p.finditer(xml)
for match in allmatches:
xml = xml.replace(match.group(), "")
except Exception as e:
raise Exception(e)
if "xmlns:vcloud" in xml:
xml = xml.replace("vcloud:", "")
xml = xml.replace(" xmlns:vcloud=\"http://www.vmware.com/vcloud/v1.5\"", "")
return xml
def get_node_values(xml, path, attribs=[], elems=[], other=""):
"""Given an xml string and path, returns a list of dictionary objects.
Arguments:
xml -- a string of properly formatted xml
path -- an Xpath path.
See http://docs.python.org/2/library/xml.etree.elementtree.html#supported-xpath-syntax
attribs -- an optional list of xml node attribute names to retrieve the values for.
"*" will retrieve all attributes per node found.
elems -- an optional list of xml child elements to retrieve the text value for.
"*" will retrieve all child elements per node found.
other -- if the attribute or element is not found, return this value (e.g. "" or None)
Example:
print get_node_values(z, "./NetworkConnection", elems=["MACAddress", "IpAddress"],
attribs=["network", "needsCustomization", "aaaaaaaa"], other=None)
Might return a list of two interfaces with the following dictionary values:
[{'needsCustomization': 'false', 'aaaaaaaa': None, 'IpAddress': '212.54.150.58', 'network': 'Direct Internet connection', 'MACAddress': '00:50:56:01:02:eb'}, {'needsCustomization': 'false', 'aaaaaaaa': None, 'IpAddress': '212.54.150.82', 'network': 'Direct Internet connection', 'MACAddress': '00:50:56:01:02:e7'}]
"""
result = []
root = ET.fromstring(_xml_del_ns(xml))
if not path.startswith("./"):
path = "./" + path
nodes = root.findall(path)
for n in nodes:
node_result = {}
if "*" in attribs:
node_result = n.attrib
# we don't care about the rest of the list, move on to elems
else:
for a in attribs:
if a in n.attrib.keys():
node_result[a] = n.attrib.get(a)
else:
node_result[a] = other
if "*" in elems:
for e in n:
node_result[e.tag] = e.text
# we don't care about the rest of the list, move on to the next node
else:
for e in elems:
node_result[e] = n.findtext(e, other)
result.append(node_result)
del(root)
# don't forget: result will be a list, empty if path is not found
return result
class VCloudConn():
"""Example:
conn = vcloudpy.VCloudConn(user, password, endpoint, debug=True)
"""
def __init__(self, user, password, endpoint, protocol="https", api_version="1.5",
path="/api", timeout=30, debug=False):
"""Initiallizes the VCloudConn class.
Will automatically use parameters and establish connection to vCloud endpoint.
Arguments:
user -- vCloud userid in the form of user@orgid
password -- vCloud user's password
endpoint -- vCloud server endpoint, e.g. iad.vcloudservice.vmware.com
protocol -- optional, http or https (default), most likely https
api_version -- optional, usually either 5.1 (default) or 1.5
path -- optional, api uri path, most likely don't change
timeout -- optional, timeout in seconds for all http connections with vCloud, default 30
debug -- optional, prints html responses from vCloud, True or False (default)
"""
self.api_version = api_version
self.timeout = timeout
self.debug = debug
self.base_url = "%s://%s%s" % (protocol.lower(), endpoint, path)
# ok, we should be able to login now
self._login(user, password)
def _login(self, user, password):
"""Handles login duties, retrieves authorization token"""
if self.api_version is None:
self.api_version = self._determine_version(self.base_url + "/versions")
auth_url = self.base_url + "/sessions"
req = urllib2.Request(auth_url)
auth = "Basic " + base64.urlsafe_b64encode("%s:%s" % (user, password))
req.add_header("Authorization", auth)
req.get_method = lambda: "POST"
# this should be in a try / except with more specific error messages
result = self._send_request(req)
self.auth_token = result.info().getheader("x-vcloud-authorization")
def _determine_version(self, url):
req = urllib2.Request(url)
response = self._send_request(req)
xml = _xml_del_ns(response.read())
versions = get_node_values(xml, "VersionInfo", elems=["Version"])
lv = []
for v in versions:
lv.append(v["Version"])
lv.sort()
lv.reverse()
return lv[0]
def _make_request(self, url, verb, data=None, type=None, timeout=None):
"""Constructs the vCloud api request to send"""
url = url.replace(" ", "+")
req = urllib2.Request(url)
req.add_header("x-vcloud-authorization", self.auth_token)
if type:
req.add_header("Content-Type", type)
req.get_method = lambda: verb
if data:
req.add_data(data)
response = self._send_request(req, timeout=timeout)
return response.read()
def _send_request(self, req, timeout=None):
"""Sends the request and handles errors"""
req.add_header("Accept", "application/*+xml;version=%s" % self.api_version)
if self.debug:
print("vCloud api version being used: %s" % (self.api_version))
reattempt = True
attempt = 1
reattempt_http_codes = [401]
delay = 1
attempts_allowed = 10
if not timeout:
timeout = self.timeout
while reattempt is True and attempt <= attempts_allowed:
try:
response = urllib2.urlopen(req, timeout=timeout)
except urllib2.HTTPError, e:
if e.code in reattempt_http_codes and attempt < attempts_allowed:
print("HTTPError, will reattempt = %s, %s, %s\n%s" % (str(e.code), e.msg, e.read(), req.get_full_url()))
attempt += 1
time.sleep(delay)
continue
else:
raise Exception("HTTPError = %s, %s, %s\n%s" % (str(e.code), e.msg, e.read(), req.get_full_url()))
except urllib2.URLError, e:
raise Exception("URLError = %s\n%s" % (str(e.reason), req.get_full_url()))
except httplib.NotConnected, e:
raise e
except httplib.InvalidURL, e:
raise e
except httplib.UnknownProtocol, e:
raise e
except httplib.UnknownTransferEncoding, e:
raise e
except httplib.UnimplementedFileMode, e:
raise e
except httplib.IncompleteRead, e:
raise e
except httplib.ImproperConnectionState, e:
raise e
except httplib.BadStatusLine, e:
print req.get_full_url()
raise Exception("BadStatusLine: The server responded with an unknown status or an empty response. Possible causes: request was 'https' but the server is 'http', or vice versa.\n%s" % (str(e)))
except httplib.CannotSendRequest, e:
raise e
except httplib.CannotSendHeader, e:
raise e
except httplib.ResponseNotReady, e:
raise e
except httplib.HTTPException, e:
raise e
except Exception:
import traceback
raise Exception("generic exception: " + traceback.format_exc())
else:
# got here, request was successful, break out
reattempt = False
if self.debug:
print(response.info())
r = response.read()
print(r)
return response
def logout(self):
"""Handles the logout duties"""
self.make_method_request("session", "DELETE")
self.auth_token = None
def make_href_request_path(self, href, verb="GET", data=None, type=None, timeout=None):
"""Used to retrieve an object using a full path"""
return self._make_request(href, verb, data, type, timeout=timeout)
def make_method_request(self, method, verb="GET", timeout=None):
"""Used to make a method request."""
full_url = self.base_url + "/" + method
return self._make_request(full_url, verb, timeout=timeout)
| apache-2.0 |
arkusuma/mediapad_kernel_ics | scripts/rt-tester/rt-tester.py | 11005 | 5307 | #!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <tglx@linutronix.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 |
google/ci_edit | app/parser.py | 1 | 44726 | # -*- coding: utf-8 -*-
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
try:
unicode
except NameError:
unicode = str
unichr = chr
import curses.ascii
import os
import re
import sys
import threading
import time
import traceback
import third_party.pyperclip as clipboard
import app.config
import app.log
import app.selectable
# Keys to tuples within |parserNodes|.
# Reference to a prefs grammar dictionary.
kGrammar = 0
# The current grammar begins at byte offset |kBegin| in the source data.
kBegin = 1
# An index into the parserNodes list to the prior (or parent) grammar.
kPrior = 2
# Some characters display wider (or narrower) than others. Visual is a running
# display offset. E.g. if the first character in some utf-8 data is a double
# width and 3 bytes long the kBegin = 0, and kVisual = 0; the second character
# will start at kBegin = 3, kVisual = 2.
kVisual = 3
class ParserNode:
"""A parser node represents a span of grammar. i.e. from this point to that
point is HTML. Another parser node would represent the next segment, of
grammar (maybe JavaScript, CSS, comment, or quoted string for example."""
def __init__(self, grammar, begin, prior, visual):
self.grammar = grammar
# Offset from start of file.
self.begin = begin
# Index of prior grammar (like a stack of grammars).
self.prior = prior
# Visible width on screen (double wide chars, and tabs).
self.visual = visual
def debug_log(self, out, indent, data):
out(
"%sParserNode %26s prior %4s, b%4d, v%4d %s"
% (
indent,
self.grammar.get("name", "None"),
self.prior,
self.begin,
self.visual,
repr(data[self.begin : self.begin + 15])[1:-1],
)
)
class Parser:
"""A parser generates a set of grammar segments (ParserNode objects)."""
def __init__(self, appPrefs):
self.appPrefs = appPrefs
self._defaultGrammar = appPrefs.grammars["none"]
self.data = u""
self.emptyNode = ParserNode({}, None, None, 0)
self.endNode = ({}, sys.maxsize, sys.maxsize, sys.maxsize)
self.resumeAtRow = 0
self.pauseAtRow = 0
# A row on screen will consist of one or more ParserNodes. When a
# ParserNode is returned from the parser it will be an instance of
# ParserNode, but internally tuples are used in place of ParserNodes.
# This makes for some ugly code, but the performance difference (~5%) is
# worth it.
self.parserNodes = [({}, 0, None, 0)]
# Each entry in |self.rows| is an index into the |self.parserNodes|
# array to the parerNode that begins that row.
self.rows = [0] # Row parserNodes index.
app.log.parser("__init__")
def backspace(self, row, col):
"""Delete the character prior to |row, col|.
Return the new (row, col) position."""
self._fully_parse_to(row)
offset = self.data_offset(row, col)
if offset == 0:
# Top of file, nothing to do.
return row, col
if offset is None:
# Bottom of file (or past end of line, but assuming end of file).
offset = len(self.data)
ch = self.data[offset - 1]
if ch == u"\n":
row -= 1
col = self.row_width(row)
elif ch == u"\t":
col += self.prior_char_row_col(row, col)[1]
elif app.curses_util.is_double_width(ch):
col -= 2
else:
col -= 1
self.data = self.data[: offset - 1] + self.data[offset:]
self._begin_parsing_at(row)
if app.config.strict_debug:
assert isinstance(self.data, unicode)
assert row >= 0
assert col >= 0
return row, col
def data_offset(self, row, col):
"""Return the offset within self.data (as unicode, not utf-8) for the
start of the character at (row, col).
Normally this will be the character the cursor is 'on' when
using a block cursor; or to the 'right' of the when using a vertical
cursor. I.e. it would be the character deleted by the 'del' key.
Returns: offset (int) into self.data buffer; or None if (row, col) is
outside the document.
"""
self._fully_parse_to(row)
if row >= len(self.rows):
return None
rowIndex = self.rows[row]
node = self.parserNodes[rowIndex]
if row + 1 < len(self.rows):
nextLineNode = self.parserNodes[self.rows[row + 1]]
if col >= nextLineNode[kVisual] - node[kVisual]:
# The requested column is past the end of the line.
return None
elif row + 1 == len(self.rows):
# On the last row.
if col >= self.parserNodes[-1][kVisual] - node[kVisual]:
# The requested column is past the end of the line.
return None
else:
# The requested column is past the end of the document.
return None
subnode = self.parserNodes[rowIndex + self.grammar_index_from_row_col(row, col)]
subnodeCol = subnode[kVisual] - node[kVisual]
subnodeColDelta = col - subnodeCol
offset = subnode[kBegin]
if self.data[offset] == u"\t":
tabWidth = 8
flooredTabGrammarCol = subnodeCol // tabWidth * tabWidth
offset += (col - flooredTabGrammarCol) // tabWidth
elif app.curses_util.is_double_width(self.data[offset]):
char_width = 2
offset += subnodeColDelta // char_width
else:
offset += subnodeColDelta
return offset
def data_offset_row_col(self, offset):
"""Get the (row, col) for the given data |offset| or None if the offset
is beyond the file."""
if app.config.strict_debug:
assert isinstance(offset, int)
assert offset >= 0
# Binary search to find the row, then the col.
nodes = self.parserNodes
if offset >= nodes[-1][kBegin]:
return None
# Determine the row.
rows = self.rows
low = 0
high = len(rows) - 1
while True:
row = (high + low) // 2
if offset >= nodes[rows[row + 1]][kBegin]:
low = row
elif offset < nodes[rows[row]][kBegin]:
high = row
else:
break
# Determine the col.
low = rows[row]
high = rows[row + 1]
while True:
index = (high + low) // 2
if offset >= nodes[index + 1][kBegin]:
low = index
elif offset < nodes[index][kBegin]:
high = index
else:
break
col = nodes[index][kVisual] - nodes[rows[row]][kVisual]
remainingOffset = offset - nodes[index][kBegin]
if remainingOffset > 0:
ch = self.data[nodes[index][kBegin]]
if ch == u"\t":
tabWidth = self.appPrefs.editor.get(u"tabSize", 8)
# Add the (potentially) fractional tab.
col += app.curses_util.char_width(ch, col, tabWidth)
# Add the remaining tabs.
col += tabWidth * (remainingOffset - 1)
else:
col += app.curses_util.char_width(ch, col) * remainingOffset
return row, col
def default_grammar(self):
return self._defaultGrammar
def delete_block(self, upperRow, upperCol, lowerRow, lowerCol):
for row in range(lowerRow, upperRow - 1, -1):
begin = self.data_offset(row, upperCol)
end = self.data_offset(row, lowerCol)
if end is None:
if begin is not None:
self.data = self.data[:begin]
else:
self.data = self.data[:begin] + self.data[end:]
self._begin_parsing_at(upperRow)
def delete_char(self, row, col):
"""Delete the character after (or "at") |row, col|."""
self._fully_parse_to(row)
offset = self.data_offset(row, col)
if offset is None:
# Bottom of file, nothing to do.
return
self.data = self.data[:offset] + self.data[offset + 1 :]
self._begin_parsing_at(row)
def delete_range(self, upperRow, upperCol, lowerRow, lowerCol):
begin = self.data_offset(upperRow, upperCol)
end = self.data_offset(lowerRow, lowerCol)
if end is None:
if begin is not None:
self.data = self.data[:begin]
else:
self.data = self.data[:begin] + self.data[end:]
self._begin_parsing_at(upperRow)
def text_range(self, upperRow, upperCol, lowerRow, lowerCol):
begin = self.data_offset(upperRow, upperCol)
end = self.data_offset(lowerRow, lowerCol)
if end is None:
if begin is not None:
return self.data[begin:]
return self.data[begin:end]
def grammar_index_from_row_col(self, row, col):
"""
tip: as an optimization, check if |col == 0| prior to calling. The
result will always be zero (so the call can be avoided).
Returns:
index. |index| may then be passed to grammar_at_index().
"""
if app.config.strict_debug:
assert isinstance(row, int)
assert isinstance(col, int)
assert row >= 0
assert col >= 0
self._fully_parse_to(row)
if app.config.strict_debug:
assert row < len(self.rows), (row, len(self.rows), repr(self.data))
if row == len(self.rows) - 1:
# The last line.
assert row + 1 >= len(self.rows)
gl = self.parserNodes[self.rows[row] :] + [self.endNode]
else:
gl = self.parserNodes[self.rows[row] : self.rows[row + 1]] + [self.endNode]
offset = gl[0][kVisual] + col
# Binary search to find the node for the column.
low = 0
high = len(gl) - 1
while True:
index = (high + low) // 2
if offset >= gl[index + 1][kVisual]:
low = index
elif offset < gl[index][kVisual]:
high = index
else:
# assert index < len(gl) # Never return index to self.endNode.
return index
def grammar_at(self, row, col):
"""Get the grammar at row, col.
It's more efficient to use grammar_index_from_row_col() and grammar_at_index()
individually if grammars are requested contiguously. This function is
just for one-off needs.
"""
self._fully_parse_to(row)
grammarIndex = self.grammar_index_from_row_col(row, col)
node, _, _, _ = self.grammar_at_index(row, col, grammarIndex)
return node.grammar
def grammar_at_index(self, row, col, index):
"""Call grammar_index_from_row_col() to get the index parameter.
Returns:
(node, preceding, remaining, eol). |proceeding| and |remaining| are
relative to the |col| parameter.
"""
if app.config.strict_debug:
assert isinstance(row, int)
assert isinstance(col, int)
assert isinstance(index, int)
assert row < len(self.rows), row
self._fully_parse_to(row)
eol = True
finalResult = (self.emptyNode, 0, 0, eol)
rowIndex = self.rows[row]
if rowIndex + index + 1 >= len(self.parserNodes):
return finalResult
nextOffset = self.parserNodes[rowIndex + index + 1][kVisual]
offset = self.parserNodes[rowIndex][kVisual] + col
remaining = nextOffset - offset
if remaining < 0:
return finalResult
node = self.parserNodes[rowIndex + index]
eol = False
return ParserNode(*node), offset - node[kVisual], remaining, eol
def grammar_text_at(self, row, col):
"""Get the run of text for the given position."""
if app.config.strict_debug:
assert isinstance(row, int)
assert isinstance(col, int)
assert row < len(self.rows), row
self._fully_parse_to(row)
rowIndex = self.rows[row]
grammarIndex = self.grammar_index_from_row_col(row, col)
node = self.parserNodes[rowIndex + grammarIndex]
nextNode = self.parserNodes[rowIndex + grammarIndex + 1]
return (
self.data[node[kBegin] : nextNode[kBegin]],
node[kGrammar].get(u"link_type"),
)
def in_document(self, row, col):
if app.config.strict_debug:
assert isinstance(row, int)
assert isinstance(col, int)
assert row >= 0
assert col >= 0
self._fully_parse_to(row)
return row < len(self.rows) and col < self.parserNodes[self.rows[row]][kVisual]
def insert(self, row, col, text):
if app.config.strict_debug:
assert isinstance(row, int)
assert isinstance(col, int)
assert isinstance(text, unicode)
assert row >= 0
assert col >= 0
assert len(text) > 0
offset = self.data_offset(row, col)
if offset is None:
row = len(self.rows) - 1
self.data += text
else:
self.data = self.data[:offset] + text + self.data[offset:]
self._begin_parsing_at(row)
def insert_block(self, row, col, lines):
for i in range(len(lines) - 1, -1, -1):
offset = self.data_offset(row + i, col)
if offset is None:
self.data += lines[i]
else:
self.data = self.data[:offset] + lines[i] + self.data[offset:]
self._begin_parsing_at(row)
def insert_lines(self, row, col, lines):
if app.config.strict_debug:
assert isinstance(row, int)
assert isinstance(col, int)
# assert isinstance(lines, tuple)
assert row >= 0
assert col >= 0
assert len(lines) > 0
text = u"\n".join(lines)
self.insert(row, col, text)
def next_char_row_col(self, row, col):
"""Get the next column value for the character to the right.
Returns: None if there is no remaining characters.
or (row, col) deltas of the next character in the document.
"""
if app.config.strict_debug:
assert isinstance(row, int)
assert isinstance(col, int)
assert row >= 0
assert col >= 0
assert len(self.rows) > 0
self._fully_parse_to(row)
ch = self.char_at(row, col)
if ch is None:
return (1, -col) if self.in_document(row + 1, 0) else None
return 0, app.curses_util.char_width(ch, col)
def prior_char_row_col(self, row, col):
"""Get the prior column value for the character to the left.
Returns: None if there is no remaining characters.
or (row, col) deltas of the next character in the document.
"""
if app.config.strict_debug:
assert isinstance(row, int)
assert isinstance(col, int)
assert row >= 0
assert col >= 0
assert len(self.rows) > 0
self._fully_parse_to(row)
if col == 0:
if row == 0:
return None
return (-1, self.row_width(row - 1))
return 0, app.curses_util.prior_char_col(col, self.row_text(row)) - col
def parse(self, bgThread, data, grammar, beginRow, endRow):
"""
Args:
data (string): The file contents. The document.
grammar (object): The initial grammar (often determined by the file
extension). If |beginRow| is not zero then grammar is ignored.
beginRow (int): is the first row (which is line number - 1) in data
that is has changed since the previous parse of this data. Pass
zero to parse the entire document. If beginRow >= len(data) then
no parse is done.
endRow (int): The row to stop parsing. This stops the parser from
going over the entire file if, for example, only 100 rows out of
a million rows are needed (which can save a lot of cpu time).
"""
if app.config.strict_debug:
assert bgThread is None or isinstance(bgThread, threading.Thread)
assert isinstance(data, unicode), type(data)
assert isinstance(grammar, dict)
assert isinstance(beginRow, int)
assert isinstance(endRow, int)
assert beginRow >= 0
assert endRow >= 0
assert isinstance(self.appPrefs, app.prefs.Prefs)
self._defaultGrammar = grammar
self.emptyNode = ParserNode(grammar, None, None, 0)
self.data = data
self._begin_parsing_at(beginRow)
self._fully_parse_to(endRow, bgThread)
# self.debug_check_lines(app.log.parser, data)
# startTime = time.time()
if app.log.enabledChannels.get("parser", False):
self.debug_log(app.log.parser, data)
# app.log.startup('parsing took', time.time() - startTime)
def _begin_parsing_at(self, beginRow):
if app.config.strict_debug:
assert isinstance(beginRow, int)
assert beginRow >= 0, beginRow
assert isinstance(self.resumeAtRow, int)
assert self.resumeAtRow >= 0, self.resumeAtRow
if beginRow > self.resumeAtRow:
# Already beginning at an earlier row.
return
if beginRow > 0:
# Trim partially parsed data.
if beginRow < len(self.rows):
self.parserNodes = self.parserNodes[: self.rows[beginRow]]
self.rows = self.rows[:beginRow]
self.resumeAtRow = len(self.rows)
else:
# Parse the whole file.
self.parserNodes = [(self.default_grammar(), 0, None, 0)]
self.rows = [0]
self.resumeAtRow = 0
def _fast_line_parse(self, grammar):
"""If there's not enough time to thoroughly parse the file, identify the
lines so that the document can still be edited.
"""
data = self.data
offset = self.parserNodes[-1][kBegin]
limit = len(data)
if offset == limit:
# Already parsed to end of data.
return
visual = self.parserNodes[-1][kVisual]
# Track the |visual| value for the start of the line. The difference
# between |visual| and |visualStartCol| is the column index of the line.
visualStartCol = 0
while True:
while offset < limit and data[offset] != "\n":
if data[offset] < u"ᄀ":
# The char is less than the first double width character.
# (An optimization to avoid calling char_width().)
visual += 1
else:
# From here on, the width of the character is messy to
# determine, ask an authority.
visual += app.curses_util.char_width(
data[offset], visual - visualStartCol
)
offset += 1
if offset >= limit:
# The document is missing the last new-line.
if self.parserNodes[-1][kBegin] != limit:
# Add a terminating (end) node.
self.parserNodes.append((grammar, limit, None, visual))
break
visualStartCol = visual
offset += 1
visual += 1
self.rows.append(len(self.parserNodes))
self.parserNodes.append((grammar, offset, None, visual))
def _fully_parse_to(self, endRow, bgThread=None):
"""Parse up to and including |endRow|."""
if app.config.strict_debug:
assert isinstance(endRow, int)
assert endRow >= 0
assert bgThread is None or isinstance(bgThread, threading.Thread)
# To parse |endRow| go one past because of the exclusive end of range.
self.pauseAtRow = endRow + 1
if self.pauseAtRow <= self.resumeAtRow:
# Already parsed to that row.
return
self._begin_parsing_at(self.resumeAtRow)
if len(self.rows) <= self.pauseAtRow:
self._build_grammar_list(bgThread)
self._fast_line_parse(self.default_grammar())
if app.config.strict_debug:
assert self.resumeAtRow >= 0
assert self.resumeAtRow <= len(self.rows)
if bgThread is not None and endRow <= len(self.rows):
assert self.resumeAtRow >= endRow + 1, (self.resumeAtRow, endRow)
def row_count(self):
self._fast_line_parse(self.default_grammar())
return len(self.rows)
def row_text(self, row, beginCol=None, endCol=None):
"""Get the text for |row|.
Args:
row (int): row is zero based.
beginCol (int): subindex within the row (similar to a slice).
endCol (int): subindex within the row (similar to a slice).
Returns:
document text (unicode)
"""
if app.config.strict_debug:
assert isinstance(row, int)
assert beginCol is None or isinstance(beginCol, int)
assert endCol is None or isinstance(endCol, int)
assert row >= 0
assert isinstance(self.data, unicode)
self._fully_parse_to(row)
if beginCol is endCol is None:
begin = self.parserNodes[self.rows[row]][kBegin]
if row + 1 >= len(self.rows):
return self.data[begin:]
end = self.parserNodes[self.rows[row + 1]][kBegin]
if len(self.data) and self.data[end - 1] == u"\n":
end -= 1
return self.data[begin:end]
if beginCol >= 0:
begin = self.data_offset(row, beginCol)
else:
width = self.row_width(row)
begin = self.data_offset(row, width + beginCol)
if begin is None:
return u""
if endCol is None:
end = self.data_offset(row + 1, 0)
elif endCol < 0:
width = self.row_width(row)
end = self.data_offset(row, width + endCol)
else:
width = self.row_width(row)
if endCol >= width:
endCol = width
end = self.data_offset(row, endCol)
if end is None:
end = len(self.data)
if end > 0 and self.data[end - 1] == u"\n":
end -= 1
return self.data[begin:end]
def char_at(self, row, col):
"""Get the character at |row|, |col|.
Args:
row (int): zero based index into list of rows.
col (int): zero based visual offset from start of line.
Returns:
character (unicode) or None if row, col is outside of the document.
"""
if app.config.strict_debug:
assert isinstance(row, int)
assert isinstance(col, int)
assert isinstance(self.data, unicode)
assert row >= 0
assert col >= 0
self._fully_parse_to(row)
if row > len(self.rows):
return None
string, width = self.row_text_and_width(row)
if col > width:
return None
return app.curses_util.char_at_column(col, string)
def row_text_and_width(self, row):
"""Get the character data and the visual/display column width of those
characters.
If the text is all ASCII then len(text) will equal the column width. If
there are double wide characters (e.g. Chinese or some emoji) the column
width may be larger than len(text).
Args:
row (int): the row index is zero based (so it's line number - 1).
Returns:
(text, column_width) (tuple)
"""
if app.config.strict_debug:
assert isinstance(row, int)
self._fully_parse_to(row)
begin = self.parserNodes[self.rows[row]][kBegin]
visual = self.parserNodes[self.rows[row]][kVisual]
if row + 1 < len(self.rows):
end = self.parserNodes[self.rows[row + 1]][kBegin]
visualEnd = self.parserNodes[self.rows[row + 1]][kVisual]
if len(self.data) and self.data[end - 1] == "\n":
end -= 1
visualEnd -= 1
else:
# There is a sentinel node at the end that records the end of
# document.
lastNode = self.parserNodes[-1]
end = lastNode[kBegin]
visualEnd = lastNode[kVisual]
return self.data[begin:end], visualEnd - visual
def row_width(self, row):
"""Get the visual/display column width of a row.
If the text is all ASCII then len(text) will equal the column width. If
there are double wide characters (e.g. Chinese or some emoji) the column
width may be larger than len(text).
Args:
row (int): the row index is zero based (so it's `line_number - 1`).
Returns:
column_width (int)
"""
if app.config.strict_debug:
assert isinstance(row, int)
if row < 0:
row = len(self.rows) + row
self._fully_parse_to(row)
visual = self.parserNodes[self.rows[row]][kVisual]
if row + 1 < len(self.rows):
end = self.parserNodes[self.rows[row + 1]][kBegin]
visualEnd = self.parserNodes[self.rows[row + 1]][kVisual]
if len(self.data) and self.data[end - 1] == "\n":
visualEnd -= 1
else:
# There is a sentinel node at the end that records the end of
# document.
lastNode = self.parserNodes[-1]
visualEnd = lastNode[kVisual]
return visualEnd - visual
def _build_grammar_list(self, bgThread):
"""The guts of the parser. This is where the heavy lifting is done.
This code can be interrupted (by |bgThread|) and resumed (by calling it
again).
"""
appPrefs = self.appPrefs
# An arbitrary limit to avoid run-away looping.
leash = 50000
topNode = self.parserNodes[-1]
cursor = topNode[kBegin]
visual = topNode[kVisual]
# If we are at the start of a grammar, skip the 'begin' part of the
# grammar.
if 0:
if (
len(self.parserNodes) == 1
or (topNode[kGrammar] is not self.parserNodes[-2][kGrammar])
and topNode[kGrammar].get("end") is not None
):
beginRegex = topNode[kGrammar].get("begin")
if beginRegex is not None:
sre = re.match(beginRegex, self.data[cursor:])
if sre is not None:
assert False
cursor += sre.regs[0][1]
# Assumes single-wide characters.
visual += sre.regs[0][1]
while len(self.rows) <= self.pauseAtRow:
if not leash:
# app.log.error('grammar likely caught in a loop')
break
leash -= 1
if bgThread and bgThread.has_user_event():
break
subdata = self.data[cursor:]
found = self.parserNodes[-1][kGrammar].get("matchRe").search(subdata)
if not found:
# app.log.info('parser exit, match not found')
# todo(dschuyler): mark parent grammars as unterminated (if they
# expect be terminated). e.g. unmatched string quote or xml tag.
if cursor != len(self.data):
# The last bit of the last line.
self.parserNodes.append(
(topNode[kGrammar], cursor, topNode[kPrior], visual)
)
break
index = -1
foundGroups = found.groups()
for k in foundGroups:
index += 1
if k is not None:
break
reg = found.regs[index + 1]
if index == 0:
# Found escaped value.
cursor += reg[1]
visual += reg[1]
continue
if index == len(foundGroups) - 1:
# Found new line.
child = (
self.parserNodes[-1][kGrammar],
cursor + reg[1],
self.parserNodes[-1][kPrior],
visual + reg[1],
)
cursor += reg[1]
visual += reg[1]
self.rows.append(len(self.parserNodes))
elif index == len(foundGroups) - 2:
# Found potentially double wide characters.
topNode = self.parserNodes[-1]
regBegin, regEnd = reg
width = app.curses_util.char_width
if regBegin > 0:
# Add single wide characters.
self.parserNodes.append(
(topNode[kGrammar], cursor, topNode[kPrior], visual)
)
cursor += regBegin
visual += regBegin
regEnd -= regBegin
regBegin = 0
while regBegin < regEnd:
# Check for zero width characters.
while (
regBegin < regEnd
and width(self.data[cursor + regBegin], 0) == 0
):
regBegin += 1
if regBegin > 0:
# Add zero width characters.
self.parserNodes.append(
(topNode[kGrammar], cursor, topNode[kPrior], visual)
)
cursor += regBegin
regEnd -= regBegin
regBegin = 0
# Check for single wide characters.
while (
regBegin < regEnd
and width(self.data[cursor + regBegin], 0) == 1
):
regBegin += 1
if regBegin > 0:
# Add single wide characters.
self.parserNodes.append(
(topNode[kGrammar], cursor, topNode[kPrior], visual)
)
cursor += regBegin
visual += regBegin
regEnd -= regBegin
regBegin = 0
# Check for double wide characters.
while (
regBegin < regEnd
and width(self.data[cursor + regBegin], 0) == 2
):
regBegin += 1
if regBegin > 0:
# Add double wide characters.
self.parserNodes.append(
(topNode[kGrammar], cursor, topNode[kPrior], visual)
)
cursor += regBegin
visual += regBegin * 2
regEnd -= regBegin
regBegin = 0
continue
elif index == len(foundGroups) - 3:
# Found variable width (tab) character.
topNode = self.parserNodes[-1]
regBegin, regEnd = reg
# First, add any preceding single wide characters.
if regBegin > 0:
self.parserNodes.append(
(topNode[kGrammar], cursor, topNode[kPrior], visual)
)
cursor += regBegin
visual += regBegin
# Remove the regular text from reg values.
regEnd -= regBegin
regBegin = 0
# Add tabs grammar; store the variable width characters.
rowStart = self.parserNodes[self.rows[-1]][kVisual]
col = visual - rowStart
# Advance to the next tab stop.
self.parserNodes.append(
(appPrefs.grammars["tabs"], cursor, topNode[kPrior], visual)
)
cursor += regEnd
visual = rowStart + ((col + 8) // 8 * 8)
visual += (regEnd - 1) * 8
# Resume current grammar; store the variable width characters.
child = (topNode[kGrammar], cursor, topNode[kPrior], visual)
elif index == 1:
# Found end of current grammar section (an 'end').
child = (
self.parserNodes[self.parserNodes[-1][kPrior]][kGrammar],
cursor + reg[1],
self.parserNodes[self.parserNodes[-1][kPrior]][kPrior],
visual + reg[1],
)
cursor = child[kBegin]
visual += reg[1]
if subdata[reg[1] - 1] == "\n":
# This 'end' ends with a new line.
self.rows.append(len(self.parserNodes))
else:
[
containsGrammarIndexLimit,
nextGrammarIndexLimit,
errorIndexLimit,
keywordIndexLimit,
typeIndexLimit,
specialIndexLimit,
] = self.parserNodes[-1][kGrammar]["indexLimits"]
if index < containsGrammarIndexLimit:
# A new grammar within this grammar (a 'contains').
if subdata[reg[0]] == "\n":
# This 'begin' begins with a new line.
self.rows.append(len(self.parserNodes))
priorGrammar = self.parserNodes[-1][kGrammar].get(
"matchGrammars", []
)[index]
if priorGrammar["end"] is None:
# Found single regex match (a leaf grammar).
self.parserNodes.append(
(
priorGrammar,
cursor + reg[0],
len(self.parserNodes) - 1,
visual + reg[0],
)
)
# Resume the current grammar.
child = (
self.parserNodes[self.parserNodes[-1][kPrior]][kGrammar],
cursor + reg[1],
self.parserNodes[self.parserNodes[-1][kPrior]][kPrior],
visual + reg[1],
)
else:
if priorGrammar.get("end_key"):
# A dynamic end tag.
hereKey = re.search(
priorGrammar["end_key"], subdata[reg[0] :]
).groups()[0]
markers = priorGrammar["markers"]
markers[1] = priorGrammar["end"].replace(
r"\0", re.escape(hereKey)
)
priorGrammar["matchRe"] = re.compile(
app.regex.join_re_list(markers)
)
child = (
priorGrammar,
cursor + reg[0],
len(self.parserNodes) - 1,
visual + reg[0],
)
cursor += reg[1]
visual += reg[1]
elif index < nextGrammarIndexLimit:
# A new grammar follows this grammar (a 'begin').
if subdata[reg[0]] == "\n":
# This 'begin' begins with a new line.
self.rows.append(len(self.parserNodes))
priorGrammar = self.parserNodes[-1][kGrammar].get(
"matchGrammars", []
)[index]
if priorGrammar.get("end_key"):
# A dynamic end tag.
hereKey = re.search(
priorGrammar["end_key"], subdata[reg[0] :]
).groups()[0]
markers = priorGrammar["markers"]
markers[1] = priorGrammar["end"].replace(
r"\0", re.escape(hereKey)
)
priorGrammar["matchRe"] = re.compile(
app.regex.join_re_list(markers)
)
child = (
priorGrammar,
cursor + reg[0],
len(self.parserNodes) - 2,
visual + reg[0],
)
cursor += reg[1]
visual += reg[1]
elif index < errorIndexLimit:
# A special doesn't change the nodeIndex.
self.parserNodes.append(
(
appPrefs.grammars["error"],
cursor + reg[0],
len(self.parserNodes) - 1,
visual + reg[0],
)
)
# Resume the current grammar.
child = (
self.parserNodes[self.parserNodes[-1][kPrior]][kGrammar],
cursor + reg[1],
self.parserNodes[self.parserNodes[-1][kPrior]][kPrior],
visual + reg[1],
)
cursor += reg[1]
visual += reg[1]
elif index < keywordIndexLimit:
# A keyword doesn't change the nodeIndex.
self.parserNodes.append(
(
appPrefs.grammars["keyword"],
cursor + reg[0],
len(self.parserNodes) - 1,
visual + reg[0],
)
)
# Resume the current grammar.
child = (
self.parserNodes[self.parserNodes[-1][kPrior]][kGrammar],
cursor + reg[1],
self.parserNodes[self.parserNodes[-1][kPrior]][kPrior],
visual + reg[1],
)
cursor += reg[1]
visual += reg[1]
elif index < typeIndexLimit:
# A type doesn't change the nodeIndex.
self.parserNodes.append(
(
appPrefs.grammars["type"],
cursor + reg[0],
len(self.parserNodes) - 1,
visual + reg[0],
)
)
# Resume the current grammar.
child = (
self.parserNodes[self.parserNodes[-1][kPrior]][kGrammar],
cursor + reg[1],
self.parserNodes[self.parserNodes[-1][kPrior]][kPrior],
visual + reg[1],
)
cursor += reg[1]
visual += reg[1]
elif index < specialIndexLimit:
# A special doesn't change the nodeIndex.
self.parserNodes.append(
(
appPrefs.grammars["special"],
cursor + reg[0],
len(self.parserNodes) - 1,
visual + reg[0],
)
)
# Resume the current grammar.
child = (
self.parserNodes[self.parserNodes[-1][kPrior]][kGrammar],
cursor + reg[1],
self.parserNodes[self.parserNodes[-1][kPrior]][kPrior],
visual + reg[1],
)
cursor += reg[1]
visual += reg[1]
else:
app.log.error("invalid grammar index")
self.parserNodes.append(child)
self.resumeAtRow = len(self.rows)
def _print_last_node(self, msg):
node = self.parserNodes[-1]
print(
"_print_node",
node[0]["name"],
node[1],
node[2],
node[3],
msg,
repr(self.data),
)
def _print_node(self, node, msg):
print("_print_node", node[0]["name"], node[1], node[2], node[3], msg)
def debug_log(self, out, data):
out("parser debug:")
out("RowList ----------------", len(self.rows))
for i, start in enumerate(self.rows):
if i + 1 < len(self.rows):
end = self.rows[i + 1]
else:
end = len(self.parserNodes)
out("row", i, "(line", str(i + 1) + ") index", start, "to", end)
for node in self.parserNodes[start:end]:
if node is None:
out("a None")
continue
nodeBegin = node[kBegin]
out(
" ParserNode %26s prior %4s, b%4d, v%4d, %s"
% (
node[kGrammar].get("name", "None"),
node[kPrior],
nodeBegin,
node[kVisual],
repr(data[nodeBegin : nodeBegin + 15])[1:-1],
)
)
def debug_check_lines(self, out, data):
"""Debug test that all the lines were recognized by the parser. This is
very slow, so it's normally disabled.
"""
# Check that all the lines got identified.
lines = data.split(u"\n")
if out is not None:
out(lines)
assert len(lines) == self.row_count()
for i, line in enumerate(lines):
parsedLine, column_width = self.row_text_and_width(i)
assert line == parsedLine, "\nexpected:{}\n actual:{}".format(
repr(line), repr(parsedLine)
)
parsedLine = self.row_text(i)
assert line == parsedLine, "\nexpected:{}\n actual:{}".format(
line, parsedLine
)
if out is not None:
out("----------- ", line)
piecedLine = u""
k = 0
grammarIndex = 0
while True:
node, preceding, remaining, eol = self.grammar_at_index(
i, k, grammarIndex
)
grammarIndex += 1
piecedLine += line[k - preceding : k + remaining]
if out is not None:
out(i, preceding, remaining, i, k, piecedLine)
if eol:
assert piecedLine == line, "\nexpected:{}\n actual:{}".format(
repr(line), repr(piecedLine)
)
break
k += remaining
| apache-2.0 |
niteoweb/libcloud | libcloud/test/storage/test_s3.py | 10 | 39819 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import hmac
import os
import sys
import unittest
from hashlib import sha1
try:
from lxml import etree as ET
except ImportError:
from xml.etree import ElementTree as ET
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlparse
from libcloud.utils.py3 import parse_qs
from libcloud.common.types import InvalidCredsError
from libcloud.common.types import LibcloudError, MalformedResponseError
from libcloud.storage.base import Container, Object
from libcloud.storage.types import ContainerDoesNotExistError
from libcloud.storage.types import ContainerError
from libcloud.storage.types import ContainerIsNotEmptyError
from libcloud.storage.types import InvalidContainerNameError
from libcloud.storage.types import ObjectDoesNotExistError
from libcloud.storage.types import ObjectHashMismatchError
from libcloud.storage.drivers.s3 import BaseS3Connection
from libcloud.storage.drivers.s3 import S3StorageDriver, S3USWestStorageDriver
from libcloud.storage.drivers.s3 import S3EUWestStorageDriver
from libcloud.storage.drivers.s3 import S3APSEStorageDriver
from libcloud.storage.drivers.s3 import S3APNEStorageDriver
from libcloud.storage.drivers.s3 import CHUNK_SIZE
from libcloud.storage.drivers.dummy import DummyIterator
from libcloud.utils.py3 import b
from libcloud.test import StorageMockHttp, MockRawResponse # pylint: disable-msg=E0611
from libcloud.test import MockHttpTestCase # pylint: disable-msg=E0611
from libcloud.test.file_fixtures import StorageFileFixtures # pylint: disable-msg=E0611
from libcloud.test.secrets import STORAGE_S3_PARAMS
class S3MockHttp(StorageMockHttp, MockHttpTestCase):
fixtures = StorageFileFixtures('s3')
base_headers = {}
def _UNAUTHORIZED(self, method, url, body, headers):
return (httplib.UNAUTHORIZED,
'',
self.base_headers,
httplib.responses[httplib.OK])
def _DIFFERENT_REGION(self, method, url, body, headers):
return (httplib.MOVED_PERMANENTLY,
'',
self.base_headers,
httplib.responses[httplib.OK])
def _list_containers_EMPTY(self, method, url, body, headers):
body = self.fixtures.load('list_containers_empty.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _list_containers_TOKEN(self, method, url, body, headers):
self.assertEqual(headers['x-amz-security-token'], 'asdf')
body = self.fixtures.load('list_containers_empty.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _list_containers(self, method, url, body, headers):
body = self.fixtures.load('list_containers.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _test_container_EMPTY(self, method, url, body, headers):
body = self.fixtures.load('list_container_objects_empty.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _test_container(self, method, url, body, headers):
body = self.fixtures.load('list_container_objects.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _test_container_ITERATOR(self, method, url, body, headers):
if url.find('3.zip') == -1:
# First part of the response (first 3 objects)
file_name = 'list_container_objects_not_exhausted1.xml'
else:
file_name = 'list_container_objects_not_exhausted2.xml'
body = self.fixtures.load(file_name)
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _test2_get_object(self, method, url, body, headers):
body = self.fixtures.load('list_container_objects.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _test2_test_get_object(self, method, url, body, headers):
# test_get_object
body = self.fixtures.load('list_containers.xml')
headers = {'content-type': 'application/zip',
'etag': '"e31208wqsdoj329jd"',
'x-amz-meta-rabbits': 'monkeys',
'content-length': 12345,
'last-modified': 'Thu, 13 Sep 2012 07:13:22 GMT'
}
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _new_container_INVALID_NAME(self, method, url, body, headers):
# test_create_container
return (httplib.BAD_REQUEST,
body,
headers,
httplib.responses[httplib.OK])
def _new_container_ALREADY_EXISTS(self, method, url, body, headers):
# test_create_container
return (httplib.CONFLICT,
body,
headers,
httplib.responses[httplib.OK])
def _new_container(self, method, url, body, headers):
# test_create_container, test_delete_container
if method == 'PUT':
status = httplib.OK
elif method == 'DELETE':
status = httplib.NO_CONTENT
return (status,
body,
headers,
httplib.responses[httplib.OK])
def _new_container_DOESNT_EXIST(self, method, url, body, headers):
# test_delete_container
return (httplib.NOT_FOUND,
body,
headers,
httplib.responses[httplib.OK])
def _new_container_NOT_EMPTY(self, method, url, body, headers):
# test_delete_container
return (httplib.CONFLICT,
body,
headers,
httplib.responses[httplib.OK])
def _test1_get_container(self, method, url, body, headers):
body = self.fixtures.load('list_container_objects.xml')
return (httplib.OK,
body,
self.base_headers,
httplib.responses[httplib.OK])
def _container1_get_container(self, method, url, body, headers):
return (httplib.NOT_FOUND,
'',
self.base_headers,
httplib.responses[httplib.NOT_FOUND])
def _test_inexistent_get_object(self, method, url, body, headers):
return (httplib.NOT_FOUND,
'',
self.base_headers,
httplib.responses[httplib.NOT_FOUND])
def _foo_bar_container(self, method, url, body, headers):
# test_delete_container
return (httplib.NO_CONTENT,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_NOT_FOUND(self, method, url, body, headers):
# test_delete_container_not_found
return (httplib.NOT_FOUND,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_foo_bar_object_NOT_FOUND(self, method, url, body,
headers):
# test_delete_object_not_found
return (httplib.NOT_FOUND,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_foo_bar_object(self, method, url, body, headers):
# test_delete_object
return (httplib.NO_CONTENT,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_foo_test_stream_data(self, method, url, body,
headers):
# test_upload_object_via_stream
body = ''
headers = {'etag': '"0cc175b9c0f1b6a831c399e269772661"'}
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_foo_test_stream_data_MULTIPART(self, method, url,
body, headers):
headers = {'etag': '"0cc175b9c0f1b6a831c399e269772661"'}
TEST_ID = 'VXBsb2FkIElEIGZvciA2aWWpbmcncyBteS1tb3ZpZS5tMnRzIHVwbG9hZA'
query_string = urlparse.urlsplit(url).query
query = parse_qs(query_string)
if not query.get('uploadId', False):
self.fail('Request doesnt contain uploadId query parameter')
upload_id = query['uploadId'][0]
if upload_id != TEST_ID:
self.fail('first uploadId doesnt match TEST_ID')
if method == 'PUT':
# PUT is used for uploading the part. part number is mandatory
if not query.get('partNumber', False):
self.fail('Request is missing partNumber query parameter')
body = ''
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
elif method == 'DELETE':
# DELETE is done for aborting the upload
body = ''
return (httplib.NO_CONTENT,
body,
headers,
httplib.responses[httplib.NO_CONTENT])
else:
# POST is done for committing the upload. Parse the XML and
# check if the commit is proper (TODO: XML Schema based check?)
commit = ET.fromstring(body)
count = 0
for part in commit.findall('Part'):
count += 1
part_no = part.find('PartNumber').text
etag = part.find('ETag').text
self.assertEqual(part_no, str(count))
self.assertEqual(etag, headers['etag'])
# Make sure that manifest contains at least one part
self.assertTrue(count >= 1)
body = self.fixtures.load('complete_multipart.xml')
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_LIST_MULTIPART(self, method, url, body, headers):
query_string = urlparse.urlsplit(url).query
query = parse_qs(query_string)
if 'key-marker' not in query:
body = self.fixtures.load('list_multipart_1.xml')
else:
body = self.fixtures.load('list_multipart_2.xml')
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_my_divisor_LIST_MULTIPART(self, method, url,
body, headers):
body = ''
return (httplib.NO_CONTENT,
body,
headers,
httplib.responses[httplib.NO_CONTENT])
def _foo_bar_container_my_movie_m2ts_LIST_MULTIPART(self, method, url,
body, headers):
body = ''
return (httplib.NO_CONTENT,
body,
headers,
httplib.responses[httplib.NO_CONTENT])
class S3MockRawResponse(MockRawResponse):
fixtures = StorageFileFixtures('s3')
def parse_body(self):
if len(self.body) == 0 and not self.parse_zero_length_body:
return self.body
try:
try:
body = ET.XML(self.body)
except ValueError:
# lxml wants a bytes and tests are basically hard-coded to str
body = ET.XML(self.body.encode('utf-8'))
except:
raise MalformedResponseError("Failed to parse XML",
body=self.body,
driver=self.connection.driver)
return body
def _foo_bar_container_foo_bar_object(self, method, url, body, headers):
# test_download_object_success
body = self._generate_random_data(1000)
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_foo_test_upload_INVALID_HASH1(self, method, url,
body, headers):
body = ''
headers = {}
headers['etag'] = '"foobar"'
# test_upload_object_invalid_hash1
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_foo_test_upload_INVALID_HASH2(self, method, url,
body, headers):
# test_upload_object_invalid_hash2
body = ''
headers = {'etag': '"hash343hhash89h932439jsaa89"'}
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_foo_test_upload(self, method, url, body, headers):
# test_upload_object_success
body = ''
headers = {'etag': '"0cc175b9c0f1b6a831c399e269772661"'}
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_foo_bar_object_INVALID_SIZE(self, method, url,
body, headers):
# test_upload_object_invalid_file_size
body = ''
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_foo_test_stream_data(self, method, url, body,
headers):
# test_upload_object_via_stream
body = ''
headers = {'etag': '"0cc175b9c0f1b6a831c399e269772661"'}
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
def _foo_bar_container_foo_test_stream_data_MULTIPART(self, method, url,
body, headers):
headers = {}
# POST is done for initiating multipart upload
if method == 'POST':
body = self.fixtures.load('initiate_multipart.xml')
return (httplib.OK,
body,
headers,
httplib.responses[httplib.OK])
else:
body = ''
return (httplib.BAD_REQUEST,
body,
headers,
httplib.responses[httplib.BAD_REQUEST])
class S3Tests(unittest.TestCase):
driver_type = S3StorageDriver
driver_args = STORAGE_S3_PARAMS
mock_response_klass = S3MockHttp
mock_raw_response_klass = S3MockRawResponse
@classmethod
def create_driver(self):
return self.driver_type(*self.driver_args)
def setUp(self):
self.driver_type.connectionCls.conn_classes = (None,
self.mock_response_klass)
self.driver_type.connectionCls.rawResponseCls = \
self.mock_raw_response_klass
self.mock_response_klass.type = None
self.mock_raw_response_klass.type = None
self.driver = self.create_driver()
def tearDown(self):
self._remove_test_file()
def _remove_test_file(self):
file_path = os.path.abspath(__file__) + '.temp'
try:
os.unlink(file_path)
except OSError:
pass
def test_invalid_credentials(self):
self.mock_response_klass.type = 'UNAUTHORIZED'
try:
self.driver.list_containers()
except InvalidCredsError:
e = sys.exc_info()[1]
self.assertEqual(True, isinstance(e, InvalidCredsError))
else:
self.fail('Exception was not thrown')
def test_token(self):
self.mock_response_klass.type = 'list_containers_TOKEN'
self.driver = self.driver_type(*self.driver_args, token='asdf')
self.driver.list_containers()
def test_signature(self):
secret_key = 'ssssh!'
sig = BaseS3Connection.get_auth_signature(
method='GET',
headers={'foo': 'bar',
'content-type': 'TYPE!',
'x-aws-test': 'test_value'},
params={'hello': 'world'},
expires=None,
secret_key=secret_key,
path='/',
vendor_prefix='x-aws'
)
string_to_sign = 'GET\n\nTYPE!\n\nx-aws-test:test_value\n/'
b64_hmac = base64.b64encode(
hmac.new(b(secret_key), b(string_to_sign), digestmod=sha1).digest()
)
expected_sig = b64_hmac.decode('utf-8')
self.assertEqual(sig, expected_sig)
def test_bucket_is_located_in_different_region(self):
self.mock_response_klass.type = 'DIFFERENT_REGION'
try:
self.driver.list_containers()
except LibcloudError:
pass
else:
self.fail('Exception was not thrown')
def test_list_containers_empty(self):
self.mock_response_klass.type = 'list_containers_EMPTY'
containers = self.driver.list_containers()
self.assertEqual(len(containers), 0)
def test_list_containers_success(self):
self.mock_response_klass.type = 'list_containers'
containers = self.driver.list_containers()
self.assertEqual(len(containers), 2)
self.assertTrue('creation_date' in containers[1].extra)
def test_list_container_objects_empty(self):
self.mock_response_klass.type = 'EMPTY'
container = Container(name='test_container', extra={},
driver=self.driver)
objects = self.driver.list_container_objects(container=container)
self.assertEqual(len(objects), 0)
def test_list_container_objects_success(self):
self.mock_response_klass.type = None
container = Container(name='test_container', extra={},
driver=self.driver)
objects = self.driver.list_container_objects(container=container)
self.assertEqual(len(objects), 1)
obj = [o for o in objects if o.name == '1.zip'][0]
self.assertEqual(obj.hash, '4397da7a7649e8085de9916c240e8166')
self.assertEqual(obj.size, 1234567)
self.assertEqual(obj.container.name, 'test_container')
self.assertEqual(
obj.extra['last_modified'], '2011-04-09T19:05:18.000Z')
self.assertTrue('owner' in obj.meta_data)
def test_list_container_objects_iterator_has_more(self):
self.mock_response_klass.type = 'ITERATOR'
container = Container(name='test_container', extra={},
driver=self.driver)
objects = self.driver.list_container_objects(container=container)
obj = [o for o in objects if o.name == '1.zip'][0]
self.assertEqual(obj.hash, '4397da7a7649e8085de9916c240e8166')
self.assertEqual(obj.size, 1234567)
self.assertEqual(obj.container.name, 'test_container')
self.assertTrue(obj in objects)
self.assertEqual(len(objects), 5)
def test_list_container_objects_with_prefix(self):
self.mock_response_klass.type = None
container = Container(name='test_container', extra={},
driver=self.driver)
objects = self.driver.list_container_objects(container=container,
ex_prefix='test_prefix')
self.assertEqual(len(objects), 1)
obj = [o for o in objects if o.name == '1.zip'][0]
self.assertEqual(obj.hash, '4397da7a7649e8085de9916c240e8166')
self.assertEqual(obj.size, 1234567)
self.assertEqual(obj.container.name, 'test_container')
self.assertTrue('owner' in obj.meta_data)
def test_get_container_doesnt_exist(self):
self.mock_response_klass.type = 'get_container'
try:
self.driver.get_container(container_name='container1')
except ContainerDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_get_container_success(self):
self.mock_response_klass.type = 'get_container'
container = self.driver.get_container(container_name='test1')
self.assertTrue(container.name, 'test1')
def test_get_object_container_doesnt_exist(self):
# This method makes two requests which makes mocking the response a bit
# trickier
self.mock_response_klass.type = 'get_object'
try:
self.driver.get_object(container_name='test-inexistent',
object_name='test')
except ContainerDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_get_object_success(self):
# This method makes two requests which makes mocking the response a bit
# trickier
self.mock_response_klass.type = 'get_object'
obj = self.driver.get_object(container_name='test2',
object_name='test')
self.assertEqual(obj.name, 'test')
self.assertEqual(obj.container.name, 'test2')
self.assertEqual(obj.size, 12345)
self.assertEqual(obj.hash, 'e31208wqsdoj329jd')
self.assertEqual(obj.extra['last_modified'],
'Thu, 13 Sep 2012 07:13:22 GMT')
self.assertEqual(obj.extra['content_type'], 'application/zip')
self.assertEqual(obj.meta_data['rabbits'], 'monkeys')
def test_create_container_bad_request(self):
# invalid container name, returns a 400 bad request
self.mock_response_klass.type = 'INVALID_NAME'
try:
self.driver.create_container(container_name='new_container')
except ContainerError:
pass
else:
self.fail('Exception was not thrown')
def test_create_container_already_exists(self):
# container with this name already exists
self.mock_response_klass.type = 'ALREADY_EXISTS'
try:
self.driver.create_container(container_name='new-container')
except InvalidContainerNameError:
pass
else:
self.fail('Exception was not thrown')
def test_create_container_success(self):
# success
self.mock_response_klass.type = None
name = 'new_container'
container = self.driver.create_container(container_name=name)
self.assertEqual(container.name, name)
def test_delete_container_doesnt_exist(self):
container = Container(name='new_container', extra=None,
driver=self.driver)
self.mock_response_klass.type = 'DOESNT_EXIST'
try:
self.driver.delete_container(container=container)
except ContainerDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_delete_container_not_empty(self):
container = Container(name='new_container', extra=None,
driver=self.driver)
self.mock_response_klass.type = 'NOT_EMPTY'
try:
self.driver.delete_container(container=container)
except ContainerIsNotEmptyError:
pass
else:
self.fail('Exception was not thrown')
# success
self.mock_response_klass.type = None
self.assertTrue(self.driver.delete_container(container=container))
def test_delete_container_not_found(self):
self.mock_response_klass.type = 'NOT_FOUND'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
try:
self.driver.delete_container(container=container)
except ContainerDoesNotExistError:
pass
else:
self.fail('Container does not exist but an exception was not' +
'thrown')
def test_delete_container_success(self):
self.mock_response_klass.type = None
container = Container(name='new_container', extra=None,
driver=self.driver)
self.assertTrue(self.driver.delete_container(container=container))
def test_download_object_success(self):
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver_type)
destination_path = os.path.abspath(__file__) + '.temp'
result = self.driver.download_object(obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
self.assertTrue(result)
def test_download_object_invalid_file_size(self):
self.mock_raw_response_klass.type = 'INVALID_SIZE'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver_type)
destination_path = os.path.abspath(__file__) + '.temp'
result = self.driver.download_object(obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
self.assertFalse(result)
def test_download_object_invalid_file_already_exists(self):
self.mock_raw_response_klass.type = 'INVALID_SIZE'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver_type)
destination_path = os.path.abspath(__file__)
try:
self.driver.download_object(obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
except LibcloudError:
pass
else:
self.fail('Exception was not thrown')
def test_download_object_as_stream_success(self):
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver_type)
stream = self.driver.download_object_as_stream(obj=obj,
chunk_size=None)
self.assertTrue(hasattr(stream, '__iter__'))
def test_upload_object_invalid_ex_storage_class(self):
# Invalid hash is detected on the amazon side and BAD_REQUEST is
# returned
file_path = os.path.abspath(__file__)
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_upload'
try:
self.driver.upload_object(file_path=file_path, container=container,
object_name=object_name,
verify_hash=True,
ex_storage_class='invalid-class')
except ValueError:
e = sys.exc_info()[1]
self.assertTrue(str(e).lower().find('invalid storage class') != -1)
else:
self.fail('Exception was not thrown')
def test_upload_object_invalid_hash1(self):
# Invalid hash is detected on the amazon side and BAD_REQUEST is
# returned
def upload_file(self, response, file_path, chunked=False,
calculate_hash=True):
return True, 'hash343hhash89h932439jsaa89', 1000
self.mock_raw_response_klass.type = 'INVALID_HASH1'
old_func = self.driver_type._upload_file
self.driver_type._upload_file = upload_file
file_path = os.path.abspath(__file__)
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_upload'
try:
self.driver.upload_object(file_path=file_path, container=container,
object_name=object_name,
verify_hash=True)
except ObjectHashMismatchError:
pass
else:
self.fail(
'Invalid hash was returned but an exception was not thrown')
finally:
self.driver_type._upload_file = old_func
def test_upload_object_invalid_hash2(self):
# Invalid hash is detected when comparing hash provided in the response
# ETag header
def upload_file(self, response, file_path, chunked=False,
calculate_hash=True):
return True, '0cc175b9c0f1b6a831c399e269772661', 1000
self.mock_raw_response_klass.type = 'INVALID_HASH2'
old_func = self.driver_type._upload_file
self.driver_type._upload_file = upload_file
file_path = os.path.abspath(__file__)
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_upload'
try:
self.driver.upload_object(file_path=file_path, container=container,
object_name=object_name,
verify_hash=True)
except ObjectHashMismatchError:
pass
else:
self.fail(
'Invalid hash was returned but an exception was not thrown')
finally:
self.driver_type._upload_file = old_func
def test_upload_object_success(self):
def upload_file(self, response, file_path, chunked=False,
calculate_hash=True):
return True, '0cc175b9c0f1b6a831c399e269772661', 1000
old_func = self.driver_type._upload_file
self.driver_type._upload_file = upload_file
file_path = os.path.abspath(__file__)
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_upload'
extra = {'meta_data': {'some-value': 'foobar'}}
obj = self.driver.upload_object(file_path=file_path,
container=container,
object_name=object_name,
extra=extra,
verify_hash=True)
self.assertEqual(obj.name, 'foo_test_upload')
self.assertEqual(obj.size, 1000)
self.assertTrue('some-value' in obj.meta_data)
self.driver_type._upload_file = old_func
def test_upload_object_with_acl(self):
def upload_file(self, response, file_path, chunked=False,
calculate_hash=True):
return True, '0cc175b9c0f1b6a831c399e269772661', 1000
old_func = self.driver_type._upload_file
self.driver_type._upload_file = upload_file
file_path = os.path.abspath(__file__)
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_upload'
extra = {'acl': 'public-read'}
obj = self.driver.upload_object(file_path=file_path,
container=container,
object_name=object_name,
extra=extra,
verify_hash=True)
self.assertEqual(obj.name, 'foo_test_upload')
self.assertEqual(obj.size, 1000)
self.assertEqual(obj.extra['acl'], 'public-read')
self.driver_type._upload_file = old_func
def test_upload_empty_object_via_stream(self):
if self.driver.supports_s3_multipart_upload:
self.mock_raw_response_klass.type = 'MULTIPART'
self.mock_response_klass.type = 'MULTIPART'
else:
self.mock_raw_response_klass.type = None
self.mock_response_klass.type = None
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_stream_data'
iterator = DummyIterator(data=[''])
extra = {'content_type': 'text/plain'}
obj = self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator,
extra=extra)
self.assertEqual(obj.name, object_name)
self.assertEqual(obj.size, 0)
def test_upload_small_object_via_stream(self):
if self.driver.supports_s3_multipart_upload:
self.mock_raw_response_klass.type = 'MULTIPART'
self.mock_response_klass.type = 'MULTIPART'
else:
self.mock_raw_response_klass.type = None
self.mock_response_klass.type = None
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_stream_data'
iterator = DummyIterator(data=['2', '3', '5'])
extra = {'content_type': 'text/plain'}
obj = self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator,
extra=extra)
self.assertEqual(obj.name, object_name)
self.assertEqual(obj.size, 3)
def test_upload_big_object_via_stream(self):
if self.driver.supports_s3_multipart_upload:
self.mock_raw_response_klass.type = 'MULTIPART'
self.mock_response_klass.type = 'MULTIPART'
else:
self.mock_raw_response_klass.type = None
self.mock_response_klass.type = None
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_stream_data'
iterator = DummyIterator(
data=['2' * CHUNK_SIZE, '3' * CHUNK_SIZE, '5'])
extra = {'content_type': 'text/plain'}
obj = self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator,
extra=extra)
self.assertEqual(obj.name, object_name)
self.assertEqual(obj.size, CHUNK_SIZE * 2 + 1)
def test_upload_object_via_stream_abort(self):
if not self.driver.supports_s3_multipart_upload:
return
self.mock_raw_response_klass.type = 'MULTIPART'
self.mock_response_klass.type = 'MULTIPART'
def _faulty_iterator():
for i in range(0, 5):
yield str(i)
raise RuntimeError('Error in fetching data')
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
object_name = 'foo_test_stream_data'
iterator = _faulty_iterator()
extra = {'content_type': 'text/plain'}
try:
self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator,
extra=extra)
except Exception:
pass
return
def test_s3_list_multipart_uploads(self):
if not self.driver.supports_s3_multipart_upload:
return
self.mock_response_klass.type = 'LIST_MULTIPART'
S3StorageDriver.RESPONSES_PER_REQUEST = 2
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
for upload in self.driver.ex_iterate_multipart_uploads(container):
self.assertNotEqual(upload.key, None)
self.assertNotEqual(upload.id, None)
self.assertNotEqual(upload.created_at, None)
self.assertNotEqual(upload.owner, None)
self.assertNotEqual(upload.initiator, None)
def test_s3_abort_multipart_uploads(self):
if not self.driver.supports_s3_multipart_upload:
return
self.mock_response_klass.type = 'LIST_MULTIPART'
S3StorageDriver.RESPONSES_PER_REQUEST = 2
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
self.driver.ex_cleanup_all_multipart_uploads(container)
def test_delete_object_not_found(self):
self.mock_response_klass.type = 'NOT_FOUND'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1234, hash=None, extra=None,
meta_data=None, container=container, driver=self.driver)
try:
self.driver.delete_object(obj=obj)
except ObjectDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_delete_object_success(self):
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1234, hash=None, extra=None,
meta_data=None, container=container, driver=self.driver)
result = self.driver.delete_object(obj=obj)
self.assertTrue(result)
class S3USWestTests(S3Tests):
driver_type = S3USWestStorageDriver
class S3EUWestTests(S3Tests):
driver_type = S3EUWestStorageDriver
class S3APSETests(S3Tests):
driver_type = S3APSEStorageDriver
class S3APNETests(S3Tests):
driver_tyoe = S3APNEStorageDriver
if __name__ == '__main__':
sys.exit(unittest.main())
| apache-2.0 |
svagionitis/youtube-dl | youtube_dl/extractor/sport5.py | 4 | 3251 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import ExtractorError
class Sport5IE(InfoExtractor):
_VALID_URL = r'http://(?:www|vod)?\.sport5\.co\.il/.*\b(?:Vi|docID)=(?P<id>\d+)'
_TESTS = [
{
'url': 'http://vod.sport5.co.il/?Vc=147&Vi=176331&Page=1',
'info_dict': {
'id': 's5-Y59xx1-GUh2',
'ext': 'mp4',
'title': 'ולנסיה-קורדובה 0:3',
'description': 'אלקאסר, גאייה ופגולי סידרו לקבוצה של נונו ניצחון על קורדובה ואת המקום הראשון בליגה',
'duration': 228,
'categories': list,
},
'skip': 'Blocked outside of Israel',
}, {
'url': 'http://www.sport5.co.il/articles.aspx?FolderID=3075&docID=176372&lang=HE',
'info_dict': {
'id': 's5-SiXxx1-hKh2',
'ext': 'mp4',
'title': 'GOALS_CELTIC_270914.mp4',
'description': '',
'duration': 87,
'categories': list,
},
'skip': 'Blocked outside of Israel',
}
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
media_id = mobj.group('id')
webpage = self._download_webpage(url, media_id)
video_id = self._html_search_regex('clipId=([\w-]+)', webpage, 'video id')
metadata = self._download_xml(
'http://sport5-metadata-rr-d.nsacdn.com/vod/vod/%s/HDS/metadata.xml' % video_id,
video_id)
error = metadata.find('./Error')
if error is not None:
raise ExtractorError(
'%s returned error: %s - %s' % (
self.IE_NAME,
error.find('./Name').text,
error.find('./Description').text),
expected=True)
title = metadata.find('./Title').text
description = metadata.find('./Description').text
duration = int(metadata.find('./Duration').text)
posters_el = metadata.find('./PosterLinks')
thumbnails = [{
'url': thumbnail.text,
'width': int(thumbnail.get('width')),
'height': int(thumbnail.get('height')),
} for thumbnail in posters_el.findall('./PosterIMG')] if posters_el is not None else []
categories_el = metadata.find('./Categories')
categories = [
cat.get('name') for cat in categories_el.findall('./Category')
] if categories_el is not None else []
formats = [{
'url': fmt.text,
'ext': 'mp4',
'vbr': int(fmt.get('bitrate')),
'width': int(fmt.get('width')),
'height': int(fmt.get('height')),
} for fmt in metadata.findall('./PlaybackLinks/FileURL')]
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnails': thumbnails,
'duration': duration,
'categories': categories,
'formats': formats,
} | unlicense |
chris-wood/ndnSIM | .waf-tools/doxygen.py | 67 | 6272 | #! /usr/bin/env python
# encoding: UTF-8
# Thomas Nagy 2008-2010 (ita)
"""
Doxygen support
Variables passed to bld():
* doxyfile -- the Doxyfile to use
When using this tool, the wscript will look like:
def options(opt):
opt.load('doxygen')
def configure(conf):
conf.load('doxygen')
# check conf.env.DOXYGEN, if it is mandatory
def build(bld):
if bld.env.DOXYGEN:
bld(features="doxygen", doxyfile='Doxyfile', ...)
def doxygen(bld):
if bld.env.DOXYGEN:
bld(features="doxygen", doxyfile='Doxyfile', ...)
"""
from fnmatch import fnmatchcase
import os, os.path, re, stat
from waflib import Task, Utils, Node, Logs, Errors, Build
from waflib.TaskGen import feature
DOXY_STR = '"${DOXYGEN}" - '
DOXY_FMTS = 'html latex man rft xml'.split()
DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
'''.split())
re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
re_nl = re.compile('\r*\n', re.M)
def parse_doxy(txt):
tbl = {}
txt = re_rl.sub('', txt)
lines = re_nl.split(txt)
for x in lines:
x = x.strip()
if not x or x.startswith('#') or x.find('=') < 0:
continue
if x.find('+=') >= 0:
tmp = x.split('+=')
key = tmp[0].strip()
if key in tbl:
tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
else:
tbl[key] = '+='.join(tmp[1:]).strip()
else:
tmp = x.split('=')
tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
return tbl
class doxygen(Task.Task):
vars = ['DOXYGEN', 'DOXYFLAGS']
color = 'BLUE'
def runnable_status(self):
'''
self.pars are populated in runnable_status - because this function is being
run *before* both self.pars "consumers" - scan() and run()
set output_dir (node) for the output
'''
for x in self.run_after:
if not x.hasrun:
return Task.ASK_LATER
if not getattr(self, 'pars', None):
txt = self.inputs[0].read()
self.pars = parse_doxy(txt)
if not self.pars.get('OUTPUT_DIRECTORY'):
self.pars['OUTPUT_DIRECTORY'] = self.inputs[0].parent.get_bld().abspath()
# Override with any parameters passed to the task generator
if getattr(self.generator, 'pars', None):
for k, v in self.generator.pars.iteritems():
self.pars[k] = v
self.doxy_inputs = getattr(self, 'doxy_inputs', [])
if not self.pars.get('INPUT'):
self.doxy_inputs.append(self.inputs[0].parent)
else:
for i in self.pars.get('INPUT').split():
if os.path.isabs(i):
node = self.generator.bld.root.find_node(i)
else:
node = self.generator.path.find_node(i)
if not node:
self.generator.bld.fatal('Could not find the doxygen input %r' % i)
self.doxy_inputs.append(node)
if not getattr(self, 'output_dir', None):
bld = self.generator.bld
# First try to find an absolute path, then find or declare a relative path
self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])
if not self.output_dir:
self.output_dir = bld.path.find_or_declare(self.pars['OUTPUT_DIRECTORY'])
self.signature()
return Task.Task.runnable_status(self)
def scan(self):
exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
file_patterns = self.pars.get('FILE_PATTERNS','').split()
if not file_patterns:
file_patterns = DOXY_FILE_PATTERNS
if self.pars.get('RECURSIVE') == 'YES':
file_patterns = ["**/%s" % pattern for pattern in file_patterns]
nodes = []
names = []
for node in self.doxy_inputs:
if os.path.isdir(node.abspath()):
for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns):
nodes.append(m)
else:
nodes.append(node)
return (nodes, names)
def run(self):
dct = self.pars.copy()
dct['INPUT'] = ' '.join(['"%s"' % x.abspath() for x in self.doxy_inputs])
code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
code = code.encode() # for python 3
#fmt = DOXY_STR % (self.inputs[0].parent.abspath())
cmd = Utils.subst_vars(DOXY_STR, self.env)
env = self.env.env or None
proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.generator.bld.path.get_bld().abspath())
proc.communicate(code)
return proc.returncode
def post_run(self):
nodes = self.output_dir.ant_glob('**/*', quiet=True)
for x in nodes:
x.sig = Utils.h_file(x.abspath())
self.outputs += nodes
return Task.Task.post_run(self)
class tar(Task.Task):
"quick tar creation"
run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
color = 'RED'
after = ['doxygen']
def runnable_status(self):
for x in getattr(self, 'input_tasks', []):
if not x.hasrun:
return Task.ASK_LATER
if not getattr(self, 'tar_done_adding', None):
# execute this only once
self.tar_done_adding = True
for x in getattr(self, 'input_tasks', []):
self.set_inputs(x.outputs)
if not self.inputs:
return Task.SKIP_ME
return Task.Task.runnable_status(self)
def __str__(self):
tgt_str = ' '.join([a.nice_path(self.env) for a in self.outputs])
return '%s: %s\n' % (self.__class__.__name__, tgt_str)
@feature('doxygen')
def process_doxy(self):
if not getattr(self, 'doxyfile', None):
self.generator.bld.fatal('no doxyfile??')
node = self.doxyfile
if not isinstance(node, Node.Node):
node = self.path.find_resource(node)
if not node:
raise ValueError('doxygen file not found')
# the task instance
dsk = self.create_task('doxygen', node)
if getattr(self, 'doxy_tar', None):
tsk = self.create_task('tar')
tsk.input_tasks = [dsk]
tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
if self.doxy_tar.endswith('bz2'):
tsk.env['TAROPTS'] = ['cjf']
elif self.doxy_tar.endswith('gz'):
tsk.env['TAROPTS'] = ['czf']
else:
tsk.env['TAROPTS'] = ['cf']
def configure(conf):
'''
Check if doxygen and tar commands are present in the system
If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
variables will be set. Detection can be controlled by setting DOXYGEN and
TAR environmental variables.
'''
conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
conf.find_program('tar', var='TAR', mandatory=False)
# doxygen docs
from waflib.Build import BuildContext
class doxy(BuildContext):
cmd = "doxygen"
fun = "doxygen"
| gpl-3.0 |
Aravinthu/odoo | addons/membership/__manifest__.py | 38 | 1228 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Membership Management',
'version': '1.0',
'category': 'Sales',
'description': """
This module allows you to manage all operations for managing memberships.
=========================================================================
It supports different kind of members:
--------------------------------------
* Free member
* Associated member (e.g.: a group subscribes to a membership for all subsidiaries)
* Paid members
* Special member prices
It is integrated with sales and accounting to allow you to automatically
invoice and send propositions for membership renewal.
""",
'depends': ['base', 'product', 'account'],
'data': [
'security/ir.model.access.csv',
'wizard/membership_invoice_views.xml',
'data/membership_data.xml',
'views/product_views.xml',
'views/partner_views.xml',
'report/report_membership_views.xml',
],
'demo': [
'data/membership_demo.xml',
],
'website': 'https://www.odoo.com/page/community-builder',
'test': [
'../account/test/account_minimal_test.xml',
],
}
| agpl-3.0 |
andyzsf/django-cms | cms/menu.py | 6 | 15710 | # -*- coding: utf-8 -*-
from django.utils.translation import get_language
from cms import constants
from cms.apphook_pool import apphook_pool
from cms.utils.permissions import load_view_restrictions, has_global_page_permission
from cms.utils import get_language_from_request
from cms.utils.conf import get_cms_setting
from cms.utils.helpers import current_site
from cms.utils.i18n import get_fallback_languages, hide_untranslated
from cms.utils.page_resolver import get_page_queryset
from cms.utils.moderator import get_title_queryset, use_draft
from menus.base import Menu, NavigationNode, Modifier
from menus.menu_pool import menu_pool
def get_visible_page_objects(request, pages, site=None):
"""
This code is basically a many-pages-at-once version of
Page.has_view_permission.
pages contains all published pages
check if there is ANY restriction
that needs a permission page visibility calculation
"""
public_for = get_cms_setting('PUBLIC_FOR')
can_see_unrestricted = public_for == 'all' or (
public_for == 'staff' and request.user.is_staff)
is_auth_user = request.user.is_authenticated()
restricted_pages = load_view_restrictions(request, pages)
if not restricted_pages:
if can_see_unrestricted:
return pages
elif not is_auth_user:
return [] # Unauth user can't acquire global or user perm to see pages
if get_cms_setting('PERMISSION') and not site:
site = current_site(request) # avoid one extra query when possible
if has_global_page_permission(request, site, can_view=True):
return pages
def has_global_perm():
if has_global_perm.cache < 0:
if request.user.has_perm('cms.view_page'):
has_global_perm.cache = 1
else:
has_global_perm.cache = 0
return bool(has_global_perm.cache)
has_global_perm.cache = -1
def has_permission_membership(page_id):
"""
PagePermission user group membership tests
"""
user_pk = request.user.pk
for perm in restricted_pages[page_id]:
if perm.user_id == user_pk:
return True
if not perm.group_id:
continue
if has_permission_membership.user_groups is None:
has_permission_membership.user_groups = request.user.groups.all().values_list(
'pk', flat=True)
if perm.group_id in has_permission_membership.user_groups:
return True
return False
has_permission_membership.user_groups = None
visible_pages = []
for page in pages:
to_add = False
page_id = page.pk
is_restricted = page_id in restricted_pages
# restricted_pages contains as key any page.pk that is
# affected by a permission grant_on
if not is_restricted and can_see_unrestricted:
to_add = True
elif is_auth_user:
# setting based handling of unrestricted pages
# check group and user memberships to restricted pages
if is_restricted and has_permission_membership(page_id):
to_add = True
elif has_global_perm():
to_add = True
if to_add:
visible_pages.append(page)
return visible_pages
def get_visible_pages(request, pages, site=None):
"""Returns the IDs of all visible pages"""
pages = get_visible_page_objects(request, pages, site)
return [page.pk for page in pages]
def page_to_node(page, home, cut):
"""
Transform a CMS page into a navigation node.
:param page: the page you wish to transform
:param home: a reference to the "home" page (the page with path="0001)
:param cut: Should we cut page from its parent pages? This means the node will not
have a parent anymore.
"""
# Theses are simple to port over, since they are not calculated.
# Other attributes will be added conditionnally later.
attr = {'soft_root': page.soft_root,
'auth_required': page.login_required,
'reverse_id': page.reverse_id, }
parent_id = page.parent_id
# Should we cut the Node from its parents?
if home and page.parent_id == home.pk and cut:
parent_id = None
# possible fix for a possible problem
# if parent_id and not page.parent.get_calculated_status():
# parent_id = None # ????
if page.limit_visibility_in_menu is constants.VISIBILITY_ALL:
attr['visible_for_authenticated'] = True
attr['visible_for_anonymous'] = True
else:
attr['visible_for_authenticated'] = page.limit_visibility_in_menu == constants.VISIBILITY_USERS
attr['visible_for_anonymous'] = page.limit_visibility_in_menu == constants.VISIBILITY_ANONYMOUS
attr['is_home'] = page.is_home
# Extenders can be either navigation extenders or from apphooks.
extenders = []
if page.navigation_extenders:
extenders.append(page.navigation_extenders)
# Is this page an apphook? If so, we need to handle the apphooks's nodes
lang = get_language()
# Only run this if we have a translation in the requested language for this
# object. The title cache should have been prepopulated in CMSMenu.get_nodes
# but otherwise, just request the title normally
if not hasattr(page, 'title_cache') or lang in page.title_cache:
app_name = page.get_application_urls(fallback=False)
if app_name: # it means it is an apphook
app = apphook_pool.get_apphook(app_name)
extenders += app.menus
exts = []
for ext in extenders:
if hasattr(ext, "get_instances"):
# CMSAttachMenus are treated a bit differently to allow them to be
# able to be attached to multiple points in the navigation.
exts.append("{0}:{1}".format(ext.__name__, page.pk))
elif hasattr(ext, '__name__'):
exts.append(ext.__name__)
else:
exts.append(ext)
if exts:
attr['navigation_extenders'] = exts
# Do we have a redirectURL?
attr['redirect_url'] = page.get_redirect() # save redirect URL if any
# Now finally, build the NavigationNode object and return it.
ret_node = NavigationNode(
page.get_menu_title(),
page.get_absolute_url(),
page.pk,
parent_id,
attr=attr,
visible=page.in_navigation,
)
return ret_node
class CMSMenu(Menu):
def get_nodes(self, request):
page_queryset = get_page_queryset(request)
site = current_site(request)
lang = get_language_from_request(request)
filters = {
'site': site,
}
if hide_untranslated(lang, site.pk):
filters['title_set__language'] = lang
if not use_draft(request):
page_queryset = page_queryset.published()
pages = page_queryset.filter(**filters).order_by("path")
ids = {}
nodes = []
first = True
home_cut = False
home_children = []
home = None
actual_pages = []
# cache view perms
visible_pages = get_visible_pages(request, pages, site)
for page in pages:
# Pages are ordered by path, therefore the first page is the root
# of the page tree (a.k.a "home")
if page.pk not in visible_pages:
# Don't include pages the user doesn't have access to
continue
if not home:
home = page
if first and page.pk != home.pk:
home_cut = True
if (page.parent_id == home.pk or page.parent_id in home_children) and home_cut:
home_children.append(page.pk)
if (page.pk == home.pk and home.in_navigation) or page.pk != home.pk:
first = False
ids[page.id] = page
actual_pages.append(page)
page.title_cache = {}
langs = [lang]
if not hide_untranslated(lang):
langs.extend(get_fallback_languages(lang))
titles = list(get_title_queryset(request).filter(page__in=ids, language__in=langs))
for title in titles: # add the title and slugs and some meta data
page = ids[title.page_id]
page.title_cache[title.language] = title
for page in actual_pages:
if page.title_cache:
nodes.append(page_to_node(page, home, home_cut))
return nodes
menu_pool.register_menu(CMSMenu)
class NavExtender(Modifier):
def modify(self, request, nodes, namespace, root_id, post_cut, breadcrumb):
if post_cut:
return nodes
exts = []
# rearrange the parent relations
home = None
for node in nodes:
if node.attr.get("is_home", False):
home = node
extenders = node.attr.get("navigation_extenders", None)
if extenders:
for ext in extenders:
if ext not in exts:
exts.append(ext)
for extnode in nodes:
if extnode.namespace == ext and not extnode.parent_id:
# if home has nav extenders but home is not visible
if (node.attr.get("is_home", False)
and not node.visible):
extnode.parent_id = None
extnode.parent_namespace = None
extnode.parent = None
else:
extnode.parent_id = node.id
extnode.parent_namespace = node.namespace
extnode.parent = node
node.children.append(extnode)
removed = []
# find all not assigned nodes
for menu in menu_pool.menus.items():
if (hasattr(menu[1], 'cms_enabled')
and menu[1].cms_enabled and not menu[0] in exts):
for node in nodes:
if node.namespace == menu[0]:
removed.append(node)
if breadcrumb:
# if breadcrumb and home not in navigation add node
if breadcrumb and home and not home.visible:
home.visible = True
if request.path_info == home.get_absolute_url():
home.selected = True
else:
home.selected = False
# remove all nodes that are nav_extenders and not assigned
for node in removed:
nodes.remove(node)
return nodes
menu_pool.register_modifier(NavExtender)
class SoftRootCutter(Modifier):
"""
Ask evildmp/superdmp if you don't understand softroots!
Softroot description from the docs:
A soft root is a page that acts as the root for a menu navigation tree.
Typically, this will be a page that is the root of a significant new
section on your site.
When the soft root feature is enabled, the navigation menu for any page
will start at the nearest soft root, rather than at the real root of
the site’s page hierarchy.
This feature is useful when your site has deep page hierarchies (and
therefore multiple levels in its navigation trees). In such a case, you
usually don’t want to present site visitors with deep menus of nested
items.
For example, you’re on the page -Introduction to Bleeding-?, so the menu
might look like this:
School of Medicine
Medical Education
Departments
Department of Lorem Ipsum
Department of Donec Imperdiet
Department of Cras Eros
Department of Mediaeval Surgery
Theory
Cures
Bleeding
Introduction to Bleeding <this is the current page>
Bleeding - the scientific evidence
Cleaning up the mess
Cupping
Leaches
Maggots
Techniques
Instruments
Department of Curabitur a Purus
Department of Sed Accumsan
Department of Etiam
Research
Administration
Contact us
Impressum
which is frankly overwhelming.
By making -Department of Mediaeval Surgery-? a soft root, the menu
becomes much more manageable:
Department of Mediaeval Surgery
Theory
Cures
Bleeding
Introduction to Bleeding <current page>
Bleeding - the scientific evidence
Cleaning up the mess
Cupping
Leaches
Maggots
Techniques
Instruments
"""
def modify(self, request, nodes, namespace, root_id, post_cut, breadcrumb):
# only apply this modifier if we're pre-cut (since what we do is cut)
# or if no id argument is provided, indicating {% show_menu_below_id %}
if post_cut or root_id:
return nodes
selected = None
root_nodes = []
# find the selected node as well as all the root nodes
for node in nodes:
if node.selected:
selected = node
if not node.parent:
root_nodes.append(node)
# if we found a selected ...
if selected:
# and the selected is a softroot
if selected.attr.get("soft_root", False):
# get it's descendants
nodes = selected.get_descendants()
# remove the link to parent
selected.parent = None
# make the selected page the root in the menu
nodes = [selected] + nodes
else:
# if it's not a soft root, walk ancestors (upwards!)
nodes = self.find_ancestors_and_remove_children(selected, nodes)
return nodes
def find_and_remove_children(self, node, nodes):
for child in node.children:
if child.attr.get("soft_root", False):
self.remove_children(child, nodes)
return nodes
def remove_children(self, node, nodes):
for child in node.children:
nodes.remove(child)
self.remove_children(child, nodes)
node.children = []
def find_ancestors_and_remove_children(self, node, nodes):
"""
Check ancestors of node for soft roots
"""
if node.parent:
if node.parent.attr.get("soft_root", False):
nodes = node.parent.get_descendants()
node.parent.parent = None
nodes = [node.parent] + nodes
else:
nodes = self.find_ancestors_and_remove_children(node.parent, nodes)
else:
for newnode in nodes:
if newnode != node and not newnode.parent:
self.find_and_remove_children(newnode, nodes)
for child in node.children:
if child != node:
self.find_and_remove_children(child, nodes)
return nodes
menu_pool.register_modifier(SoftRootCutter)
| bsd-3-clause |
lukeiwanski/tensorflow | tensorflow/python/kernel_tests/constant_op_test.py | 16 | 34602 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ConstantOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from google.protobuf import text_format
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import tensor_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import logging_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.util import compat
class ConstantTest(test.TestCase):
def _testCpu(self, x):
np_ans = np.array(x)
with self.test_session(use_gpu=False):
tf_ans = ops.convert_to_tensor(x).eval()
dtype = dtypes_lib.as_dtype(np_ans.dtype)
if dtype.is_floating or dtype.is_complex:
self.assertAllClose(np_ans, tf_ans)
else:
self.assertAllEqual(np_ans, tf_ans)
def _testGpu(self, x):
np_ans = np.array(x)
with self.test_session(use_gpu=True):
tf_ans = ops.convert_to_tensor(x).eval()
dtype = dtypes_lib.as_dtype(np_ans.dtype)
if dtype.is_floating or dtype.is_complex:
self.assertAllClose(np_ans, tf_ans)
else:
self.assertAllEqual(np_ans, tf_ans)
def _testAll(self, x):
self._testCpu(x)
self._testGpu(x)
def testInvalidDType(self):
# Test case for GitHub issue 18474
with self.assertRaises(TypeError):
constant_op.constant(dtypes_lib.string, "[,]")
def testBFloat16(self):
bfloat16 = dtypes_lib.bfloat16.as_numpy_dtype
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(bfloat16))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(bfloat16))
self._testAll(np.empty((2, 0, 5)).astype(bfloat16))
def testHalf(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float16))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.float16))
self._testAll(np.empty((2, 0, 5)).astype(np.float16))
def testFloat(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.float32))
self._testAll(np.empty((2, 0, 5)).astype(np.float32))
def testDouble(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float64))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.float64))
self._testAll(np.empty((2, 0, 5)).astype(np.float64))
def testInt32(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.int32))
self._testAll((100 * np.random.normal(size=30)).reshape([2, 3, 5]).astype(
np.int32))
self._testAll(np.empty((2, 0, 5)).astype(np.int32))
def testInt64(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.int64))
self._testAll((100 * np.random.normal(size=30)).reshape([2, 3, 5]).astype(
np.int64))
self._testAll(np.empty((2, 0, 5)).astype(np.int64))
def testComplex64(self):
self._testAll(
np.complex(1, 2) *
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.complex64))
self._testAll(
np.complex(1, 2) *
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.complex64))
self._testAll(np.empty((2, 0, 5)).astype(np.complex64))
def testComplex128(self):
self._testAll(
np.complex(1, 2) *
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.complex128))
self._testAll(
np.complex(1, 2) *
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.complex128))
self._testAll(np.empty((2, 0, 5)).astype(np.complex128))
def testString(self):
self._testCpu(
np.array([compat.as_bytes(str(x)) for x in np.arange(-15, 15)]).reshape(
[2, 3, 5]))
self._testCpu(np.empty((2, 0, 5)).astype(np.str_))
def testVariant(self):
# TODO(ebrevdo): Re-enable use_gpu=True once non-DMA Variant
# copying between CPU and GPU is supported.
with self.test_session(use_gpu=False):
variant_tensor = tensor_pb2.TensorProto(
dtype=dtypes_lib.variant.as_datatype_enum,
tensor_shape=tensor_shape.TensorShape([]).as_proto(),
variant_val=[
tensor_pb2.VariantTensorDataProto(
# Match registration in variant_op_registry.cc
type_name=b"int",
metadata=np.array(1, dtype=np.int32).tobytes())
])
const = constant_op.constant(variant_tensor)
const_value = const.op.get_attr("value")
# Ensure we stored the tensor proto properly.
self.assertProtoEquals(variant_tensor, const_value)
# Smoke test -- ensure this executes without trouble.
# Right now, non-numpy-compatible objects cannot be returned from a
# session.run call; similarly, objects that can't be converted to
# native numpy types cannot be passed to ops.convert_to_tensor.
# TODO(ebrevdo): Add registration mechanism for
# ops.convert_to_tensor and for session.run output.
logging_const_op = logging_ops.Print(
const, [const],
message="Variant storing an int, decoded const value:").op
logging_const_op.run()
def testStringWithNulls(self):
with self.test_session():
val = ops.convert_to_tensor(b"\0\0\0\0").eval()
self.assertEqual(len(val), 4)
self.assertEqual(val, b"\0\0\0\0")
with self.test_session():
val = ops.convert_to_tensor(b"xx\0xx").eval()
self.assertEqual(len(val), 5)
self.assertAllEqual(val, b"xx\0xx")
nested = [[b"\0\0\0\0", b"xx\0xx"], [b"\0_\0_\0_\0", b"\0"]]
with self.test_session():
val = ops.convert_to_tensor(nested).eval()
# NOTE(mrry): Do not use assertAllEqual, because it converts nested to a
# numpy array, which loses the null terminators.
self.assertEqual(val.tolist(), nested)
def testExplicitShapeNumPy(self):
with ops.Graph().as_default():
c = constant_op.constant(
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32),
shape=[2, 3, 5])
self.assertEqual(c.get_shape(), [2, 3, 5])
@test_util.assert_no_new_pyobjects_executing_eagerly
def testEagerMemory(self):
"""Tests PyObject refs are managed correctly when executing eagerly."""
constant_op.constant([[1.]])
def testImplicitShapeNumPy(self):
with ops.Graph().as_default():
c = constant_op.constant(
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32))
self.assertEqual(c.get_shape(), [2, 3, 5])
def testExplicitShapeList(self):
with ops.Graph().as_default():
c = constant_op.constant([1, 2, 3, 4, 5, 6, 7], shape=[7])
self.assertEqual(c.get_shape(), [7])
def testImplicitShapeList(self):
with ops.Graph().as_default():
c = constant_op.constant([1, 2, 3, 4, 5, 6, 7])
self.assertEqual(c.get_shape(), [7])
def testExplicitShapeNumber(self):
with ops.Graph().as_default():
c = constant_op.constant(1, shape=[1])
self.assertEqual(c.get_shape(), [1])
def testImplicitShapeNumber(self):
with ops.Graph().as_default():
c = constant_op.constant(1)
self.assertEqual(c.get_shape(), [])
def testShapeInconsistent(self):
with ops.Graph().as_default():
c = constant_op.constant([1, 2, 3, 4, 5, 6, 7], shape=[10])
self.assertEqual(c.get_shape(), [10])
# pylint: disable=g-long-lambda
def testShapeWrong(self):
with ops.Graph().as_default():
with self.assertRaisesWithPredicateMatch(
ValueError,
lambda e: ("Too many elements provided. Needed at most 5, "
"but received 7" == str(e))):
constant_op.constant([1, 2, 3, 4, 5, 6, 7], shape=[5])
# pylint: enable=g-long-lambda
# TODO(b/35396543): Temporarily disable: suspicion that
# this is causing test timeouts.
def _testTooLargeConstant(self):
with ops.Graph().as_default():
large_array = np.zeros((512, 1024, 1024), dtype=np.float32)
with self.assertRaisesRegexp(
ValueError,
"Cannot create a tensor proto whose content is larger than 2GB."):
c = constant_op.constant(large_array)
# TODO(b/35396543): Temporarily disable: suspicion that
# this is causing test timeouts.
def _testTooLargeGraph(self):
with ops.Graph().as_default() as g:
large_array = np.zeros((256, 1024, 1024), dtype=np.float32)
c = constant_op.constant(large_array)
d = constant_op.constant(large_array)
with self.assertRaisesRegexp(ValueError,
"GraphDef cannot be larger than 2GB."):
g.as_graph_def()
def testSparseValuesRaiseErrors(self):
with self.assertRaisesRegexp(ValueError,
"setting an array element with a sequence"):
c = constant_op.constant([[1, 2], [3]], dtype=dtypes_lib.int32)
with self.assertRaisesRegexp(ValueError, "must be a dense"):
c = constant_op.constant([[1, 2], [3]])
with self.assertRaisesRegexp(ValueError, "must be a dense"):
c = constant_op.constant([[1, 2], [3], [4, 5]])
class AsTensorTest(test.TestCase):
def testAsTensorForTensorInput(self):
with ops.Graph().as_default():
t = constant_op.constant(10.0)
x = ops.convert_to_tensor(t)
self.assertIs(t, x)
def testAsTensorForNonTensorInput(self):
with ops.Graph().as_default():
x = ops.convert_to_tensor(10.0)
self.assertTrue(isinstance(x, ops.Tensor))
def testAsTensorForShapeInput(self):
with self.test_session():
x = ops.convert_to_tensor(tensor_shape.TensorShape([]))
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual([], x.eval())
x = ops.convert_to_tensor(tensor_shape.TensorShape([1, 2, 3]))
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual([1, 2, 3], x.eval())
x = ops.convert_to_tensor(tensor_shape.TensorShape([2**31-1, 2, 3]))
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual([2**31-1, 2, 3], x.eval())
x = ops.convert_to_tensor(tensor_shape.TensorShape([2**31-1, 2, 3]),
dtype=dtypes_lib.int32)
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual([2**31-1, 2, 3], x.eval())
x = ops.convert_to_tensor(tensor_shape.TensorShape([2**31, 2, 3]))
self.assertEqual(dtypes_lib.int64, x.dtype)
self.assertAllEqual([2**31, 2, 3], x.eval())
x = ops.convert_to_tensor(tensor_shape.TensorShape([2**31, 2, 3]),
dtype=dtypes_lib.int64)
self.assertEqual(dtypes_lib.int64, x.dtype)
self.assertAllEqual([2**31, 2, 3], x.eval())
with self.assertRaisesRegexp(
ValueError, "a dimension is too large .2147483648."):
x = ops.convert_to_tensor(tensor_shape.TensorShape([2**31, 2, 3]),
dtype=dtypes_lib.int32)
x = ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3]), dtype=dtypes_lib.int64)
self.assertEqual(dtypes_lib.int64, x.dtype)
self.assertAllEqual([1, 2, 3], x.eval())
x = array_ops.reshape(
array_ops.zeros([6]), tensor_shape.TensorShape([2, 3]))
self.assertAllEqual([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], x.eval())
with self.assertRaisesRegexp(ValueError, "partially known"):
ops.convert_to_tensor(tensor_shape.TensorShape(None))
with self.assertRaisesRegexp(ValueError, "partially known"):
ops.convert_to_tensor(tensor_shape.TensorShape([1, None, 64]))
with self.assertRaises(TypeError):
ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3]), dtype=dtypes_lib.float32)
def testAsTensorForDimensionInput(self):
with self.test_session():
x = ops.convert_to_tensor(tensor_shape.TensorShape([1, 2, 3])[1])
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual(2, x.eval())
x = ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3])[1], dtype=dtypes_lib.int64)
self.assertEqual(dtypes_lib.int64, x.dtype)
self.assertAllEqual(2, x.eval())
with self.assertRaisesRegexp(ValueError, "unknown Dimension"):
ops.convert_to_tensor(tensor_shape.TensorShape(None)[1])
with self.assertRaisesRegexp(ValueError, "unknown Dimension"):
ops.convert_to_tensor(tensor_shape.TensorShape([1, None, 64])[1])
with self.assertRaises(TypeError):
ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3])[1], dtype=dtypes_lib.float32)
class IdentityOpTest(test.TestCase):
def testIdTensor(self):
with ops.Graph().as_default():
x = constant_op.constant(2.0, shape=[6], name="input")
id_op = array_ops.identity(x, name="id")
self.assertTrue(isinstance(id_op.op.inputs[0], ops.Tensor))
self.assertProtoEquals("name: 'id' op: 'Identity' input: 'input' "
"attr { key: 'T' value { type: DT_FLOAT } }",
id_op.op.node_def)
class ZerosTest(test.TestCase):
def _Zeros(self, shape):
with self.test_session():
ret = array_ops.zeros(shape)
self.assertEqual(shape, ret.get_shape())
return ret.eval()
def testConst(self):
self.assertTrue(
np.array_equal(self._Zeros([2, 3]), np.array([[0] * 3] * 2)))
def testScalar(self):
self.assertEqual(0, self._Zeros([]))
self.assertEqual(0, self._Zeros(()))
with self.test_session():
scalar = array_ops.zeros(constant_op.constant([], dtype=dtypes_lib.int32))
self.assertEqual(0, scalar.eval())
def testDynamicSizes(self):
np_ans = np.array([[0] * 3] * 2)
with self.test_session():
# Creates a tensor of 2 x 3.
d = array_ops.fill([2, 3], 12., name="fill")
# Constructs a tensor of zeros of the same dimensions as "d".
z = array_ops.zeros(array_ops.shape(d))
out = z.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, d)
self.assertShapeEqual(np_ans, z)
def testDtype(self):
with self.test_session():
d = array_ops.fill([2, 3], 12., name="fill")
self.assertEqual(d.get_shape(), [2, 3])
# Test default type for both constant size and dynamic size
z = array_ops.zeros([2, 3])
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
z = array_ops.zeros(array_ops.shape(d))
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
# Test explicit type control
for dtype in [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int32,
dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.int8,
dtypes_lib.complex64, dtypes_lib.complex128, dtypes_lib.int64,
dtypes_lib.bool, dtypes_lib.string
]:
z = array_ops.zeros([2, 3], dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
z_value = z.eval()
self.assertFalse(np.any(z_value))
self.assertEqual((2, 3), z_value.shape)
z = array_ops.zeros(array_ops.shape(d), dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
z_value = z.eval()
self.assertFalse(np.any(z_value))
self.assertEqual((2, 3), z_value.shape)
class ZerosLikeTest(test.TestCase):
def _compareZeros(self, dtype, fully_defined_shape, use_gpu):
with self.test_session(use_gpu=use_gpu):
# Creates a tensor of non-zero values with shape 2 x 3.
# NOTE(kearnes): The default numpy dtype associated with tf.string is
# np.object (and can't be changed without breaking a lot things), which
# causes a TypeError in constant_op.constant below. Here we catch the
# special case of tf.string and set the numpy dtype appropriately.
if dtype == dtypes_lib.string:
numpy_dtype = np.string_
else:
numpy_dtype = dtype.as_numpy_dtype
if fully_defined_shape:
d = constant_op.constant(
np.ones((2, 3), dtype=numpy_dtype), dtype=dtype)
else:
d = array_ops.placeholder(dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = array_ops.zeros_like(d)
# Test that the type is correct
self.assertEqual(z_var.dtype, dtype)
# Test that the shape is correct
if fully_defined_shape:
self.assertEqual([2, 3], z_var.get_shape())
# Test that the value is correct
feed_dict = {}
if not fully_defined_shape:
feed_dict[d] = np.ones((2, 3), dtype=numpy_dtype)
z_value = z_var.eval(feed_dict=feed_dict)
self.assertFalse(np.any(z_value))
self.assertEqual((2, 3), z_value.shape)
def testZerosLikeCPU(self):
for dtype in [
dtypes_lib.half, dtypes_lib.float32, dtypes_lib.float64,
dtypes_lib.int8, dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.uint16,
dtypes_lib.int32, dtypes_lib.int64, dtypes_lib.bool,
dtypes_lib.complex64, dtypes_lib.complex128, dtypes_lib.string
]:
self._compareZeros(dtype, fully_defined_shape=False, use_gpu=False)
self._compareZeros(dtype, fully_defined_shape=True, use_gpu=False)
def testZerosLikeGPU(self):
for dtype in [
dtypes_lib.half, dtypes_lib.float32, dtypes_lib.float64,
dtypes_lib.int32, dtypes_lib.int64, dtypes_lib.complex64,
dtypes_lib.complex128, dtypes_lib.bool
]:
self._compareZeros(dtype, fully_defined_shape=False, use_gpu=True)
self._compareZeros(dtype, fully_defined_shape=True, use_gpu=True)
def testZerosLikePartialShape(self):
d = array_ops.placeholder(dtypes_lib.float32, shape=[None, 4, None])
z = array_ops.zeros_like(d)
self.assertEqual(d.get_shape().as_list(), z.get_shape().as_list())
def testZerosLikeDtype(self):
# Make sure zeros_like works even for dtypes that cannot be cast between
with self.test_session():
shape = (3, 5)
dtypes = np.float32, np.complex64
for in_type in dtypes:
x = np.arange(15).astype(in_type).reshape(*shape)
for out_type in dtypes:
y = array_ops.zeros_like(x, dtype=out_type).eval()
self.assertEqual(y.dtype, out_type)
self.assertEqual(y.shape, shape)
self.assertAllEqual(y, np.zeros(shape, dtype=out_type))
def testZerosLikeVariant(self):
# TODO(ebrevdo): Re-enable use_gpu=True once non-DMA Variant
# copying between CPU and GPU is supported AND we register a
# ZerosLike callback for GPU for Variant storing primitive types
# in variant_op_registry.cc.
with self.test_session(use_gpu=False):
variant_tensor = tensor_pb2.TensorProto(
dtype=dtypes_lib.variant.as_datatype_enum,
tensor_shape=tensor_shape.TensorShape([]).as_proto(),
variant_val=[
tensor_pb2.VariantTensorDataProto(
# Match registration in variant_op_registry.cc
type_name=b"int",
metadata=np.array(1, dtype=np.int32).tobytes())
])
const_variant = constant_op.constant(variant_tensor)
zeros_like = array_ops.zeros_like(const_variant)
zeros_like_op = logging_ops.Print(
zeros_like, [const_variant, zeros_like],
message="Variant storing an int, input and output of zeros_like:").op
# Smoke test -- ensure this executes without trouble.
# Right now, non-numpy-compatible objects cannot be returned from a
# session.run call; similarly, objects that can't be converted to
# native numpy types cannot be passed to ops.convert_to_tensor.
# TODO(ebrevdo): Add registration mechanism for
# ops.convert_to_tensor and for session.run output.
zeros_like_op.run()
class OnesTest(test.TestCase):
def _Ones(self, shape):
with self.test_session():
ret = array_ops.ones(shape)
self.assertEqual(shape, ret.get_shape())
return ret.eval()
def testConst(self):
self.assertTrue(np.array_equal(self._Ones([2, 3]), np.array([[1] * 3] * 2)))
def testScalar(self):
self.assertEqual(1, self._Ones([]))
self.assertEqual(1, self._Ones(()))
with self.test_session():
scalar = array_ops.ones(constant_op.constant([], dtype=dtypes_lib.int32))
self.assertEqual(1, scalar.eval())
def testDynamicSizes(self):
np_ans = np.array([[1] * 3] * 2)
with self.test_session():
# Creates a tensor of 2 x 3.
d = array_ops.fill([2, 3], 12., name="fill")
# Constructs a tensor of ones of the same dimensions as "d".
z = array_ops.ones(array_ops.shape(d))
out = z.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, d)
self.assertShapeEqual(np_ans, z)
def testAutoPack(self):
with self.test_session():
h = array_ops.placeholder(dtypes_lib.int32, shape=[])
w = array_ops.placeholder(dtypes_lib.int32, shape=[])
z = array_ops.ones([h, w])
out = z.eval(feed_dict={h: 4, w: 16})
self.assertAllEqual(out, np.array([[1] * 16] * 4))
def testDtype(self):
with self.test_session():
d = array_ops.fill([2, 3], 12., name="fill")
self.assertEqual(d.get_shape(), [2, 3])
# Test default type for both constant size and dynamic size
z = array_ops.ones([2, 3])
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
z = array_ops.ones(array_ops.shape(d))
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
# Test explicit type control
for dtype in (dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int32,
dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.int8,
dtypes_lib.complex64, dtypes_lib.complex128,
dtypes_lib.int64, dtypes_lib.bool):
z = array_ops.ones([2, 3], dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
z = array_ops.ones(array_ops.shape(d), dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
class OnesLikeTest(test.TestCase):
def testOnesLike(self):
for dtype in [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int8,
dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.uint16, dtypes_lib.int32,
dtypes_lib.int64, dtypes_lib.bool, dtypes_lib.complex64,
dtypes_lib.complex128
]:
numpy_dtype = dtype.as_numpy_dtype
with self.test_session():
# Creates a tensor of non-zero values with shape 2 x 3.
d = constant_op.constant(
np.ones(
(2, 3), dtype=numpy_dtype), dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = array_ops.ones_like(d)
# Test that the type is correct
self.assertEqual(z_var.dtype, dtype)
z_value = z_var.eval()
# Test that the value is correct
self.assertTrue(np.array_equal(z_value, np.array([[1] * 3] * 2)))
self.assertEqual([2, 3], z_var.get_shape())
def testOnesLikePartialShape(self):
d = array_ops.placeholder(dtypes_lib.float32, shape=[None, 4, None])
z = array_ops.ones_like(d)
self.assertEqual(d.get_shape().as_list(), z.get_shape().as_list())
class FillTest(test.TestCase):
def _compare(self, dims, val, np_ans, use_gpu):
with self.test_session(use_gpu=use_gpu):
tf_ans = array_ops.fill(dims, val, name="fill")
out = tf_ans.eval()
self.assertAllClose(np_ans, out)
# Fill does not set the shape.
# self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, dims, val, np_ans):
self._compare(dims, val, np_ans, False)
self._compare(dims, val, np_ans, True)
def testFillFloat(self):
np_ans = np.array([[3.1415] * 3] * 2).astype(np.float32)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillDouble(self):
np_ans = np.array([[3.1415] * 3] * 2).astype(np.float64)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillInt32(self):
np_ans = np.array([[42] * 3] * 2).astype(np.int32)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillInt64(self):
np_ans = np.array([[-42] * 3] * 2).astype(np.int64)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillComplex64(self):
np_ans = np.array([[0.15 + 0.3j] * 3] * 2).astype(np.complex64)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillComplex128(self):
np_ans = np.array([[0.15 + 0.3j] * 3] * 2).astype(np.complex128)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillString(self):
np_ans = np.array([[b"yolo"] * 3] * 2)
with self.test_session(use_gpu=False):
tf_ans = array_ops.fill([2, 3], np_ans[0][0], name="fill").eval()
self.assertAllEqual(np_ans, tf_ans)
def testFillNegative(self):
with self.test_session():
for shape in (-1,), (2, -1), (-1, 2), (-2), (-3):
with self.assertRaises(ValueError):
array_ops.fill(shape, 7)
# Using a placeholder so this won't be caught in static analysis.
dims = array_ops.placeholder(dtypes_lib.int32)
fill_t = array_ops.fill(dims, 3.0)
for shape in (-1,), (2, -1), (-1, 2), (-2), (-3):
with self.assertRaises(errors_impl.InvalidArgumentError):
fill_t.eval({dims: shape})
def testShapeFunctionEdgeCases(self):
# Non-vector dimensions.
with self.assertRaises(ValueError):
array_ops.fill([[0, 1], [2, 3]], 1.0)
# Non-scalar value.
with self.assertRaises(ValueError):
array_ops.fill([3, 2], [1.0, 2.0])
# Partial dimension information.
f = array_ops.fill(array_ops.placeholder(dtypes_lib.int32, shape=(4,)), 3.0)
self.assertEqual([None, None, None, None], f.get_shape().as_list())
f = array_ops.fill(
[array_ops.placeholder(
dtypes_lib.int32, shape=()), 17], 1.0)
self.assertEqual([None, 17], f.get_shape().as_list())
def testGradient(self):
with self.test_session():
in_v = constant_op.constant(5.0)
out_shape = [3, 2]
out_filled = array_ops.fill(out_shape, in_v)
err = gradient_checker.compute_gradient_error(in_v, [], out_filled,
out_shape)
self.assertLess(err, 1e-3)
class PlaceholderTest(test.TestCase):
def testDtype(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=(10, 10), name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float"):
p_identity.eval()
def testShape(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=(10, 10), name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float and "
r"shape \[10,10\]"):
p_identity.eval()
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :5]})
def testUnknownShape(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=None, name="p")
p_identity = array_ops.identity(p)
# can feed anything
feed_array = np.random.rand(10, 3)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
feed_array = np.random.rand(4, 2, 5)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
def testScalarShape(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=[], name="p")
p_identity = array_ops.identity(p)
self.assertAllClose(p_identity.eval(feed_dict={p: 5}), 5)
def testPartialShape(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=[None, 3], name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 3)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :2]})
def testPartialShapeWhenNotFed(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=[None, 3], name="p")
p_identity = array_ops.identity(p)
# Should trigger an operator error, not a shape error.
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float"):
p_identity.eval()
def testControlDependency(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.int32, shape=[], name="p")
with ops.control_dependencies([p]):
c = constant_op.constant(5, dtypes_lib.int32)
d = math_ops.multiply(p, c)
val = np.array(2).astype(np.int)
self.assertEqual(10, d.eval(feed_dict={p: val}))
def testBadShape(self):
with self.assertRaises(ValueError):
array_ops.placeholder(dtypes_lib.float32, shape=(-1, 10))
def testTensorStr(self):
a = array_ops.placeholder(dtypes_lib.float32, shape=None, name="a")
self.assertEqual("<tf.Tensor 'a:0' shape=<unknown> dtype=float32>", repr(a))
b = array_ops.placeholder(dtypes_lib.int32, shape=(32, 40), name="b")
self.assertEqual("<tf.Tensor 'b:0' shape=(32, 40) dtype=int32>", repr(b))
c = array_ops.placeholder(dtypes_lib.qint32, shape=(32, None, 2), name="c")
self.assertEqual("<tf.Tensor 'c:0' shape=(32, ?, 2) dtype=qint32>", repr(c))
def testOldGraph(self):
# Load graph generated from earlier version of TF where
# placeholder shape was not set.
#
# a = tf.placeholder(tf.float32)
# b = a + 1.0
#
# Older graph's default shape is 'shape {}', not 'shape {
# unknown_rank: true }'
graph = """
node {
name: "Placeholder"
op: "Placeholder"
attr {
key: "dtype"
value {
type: DT_FLOAT
}
}
attr {
key: "shape"
value {
shape {
}
}
}
}
node {
name: "add/y"
op: "Const"
attr {
key: "dtype"
value {
type: DT_FLOAT
}
}
attr {
key: "value"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
}
float_val: 1.0
}
}
}
}
node {
name: "add"
op: "Add"
input: "Placeholder"
input: "add/y"
attr {
key: "T"
value {
type: DT_FLOAT
}
}
}
versions {
producer: 21
}
"""
gdef = graph_pb2.GraphDef()
text_format.Merge(graph, gdef)
with self.test_session():
p, ret = importer.import_graph_def(
gdef, return_elements=["Placeholder:0", "add:0"])
# Feed in a vector of two elements. Since the producer version
# of 21, a shape of {} is interpreted as "any shape". If
# producer version were 22, then we'd get a shape mismatch
# error.
self.assertAllEqual([2.0, 3.0], ret.eval(feed_dict={p: [1.0, 2.0]}))
class PlaceholderWithDefaultTest(test.TestCase):
def testFullShape(self):
with self.test_session(force_gpu=test_util.is_gpu_available()):
p = array_ops.placeholder_with_default([[2, 2], [2, 2]], shape=[2, 2])
a = array_ops.identity(p)
self.assertAllEqual([[2, 2], [2, 2]], a.eval())
self.assertAllEqual(
[[3, 3], [3, 3]], a.eval(feed_dict={p: [[3, 3], [3, 3]]}))
with self.assertRaises(ValueError):
a.eval(feed_dict={p: [[6, 6, 6], [6, 6, 6]]})
def testPartialShape(self):
with self.test_session(force_gpu=test_util.is_gpu_available()):
p = array_ops.placeholder_with_default([1, 2, 3], shape=[None])
a = array_ops.identity(p)
self.assertAllEqual([1, 2, 3], a.eval())
self.assertAllEqual([3, 37], a.eval(feed_dict={p: [3, 37]}))
with self.assertRaises(ValueError):
a.eval(feed_dict={p: [[2, 2], [2, 2]]})
def testNoShape(self):
with self.test_session(force_gpu=test_util.is_gpu_available()):
p = array_ops.placeholder_with_default([17], shape=None)
a = array_ops.identity(p)
self.assertAllEqual([17], a.eval())
self.assertAllEqual([3, 37], a.eval(feed_dict={p: [3, 37]}))
self.assertAllEqual(
[[3, 3], [3, 3]], a.eval(feed_dict={p: [[3, 3], [3, 3]]}))
def testGradient(self):
with self.test_session(force_gpu=test_util.is_gpu_available()):
x = array_ops.placeholder(dtypes_lib.float32, [5, 7])
y = array_ops.placeholder_with_default(x, None)
err = gradient_checker.compute_gradient_error(x, [5, 7], y, [5, 7])
self.assertLess(err, 1e-3)
if __name__ == "__main__":
test.main()
| apache-2.0 |
citrix-openstack-build/python-keystoneclient | keystoneclient/v2_0/roles.py | 4 | 3130 | # Copyright 2011 OpenStack LLC.
# Copyright 2011 Nebula, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient import base
class Role(base.Resource):
"""Represents a Keystone role."""
def __repr__(self):
return "<Role %s>" % self._info
def delete(self):
return self.manager.delete(self)
class RoleManager(base.ManagerWithFind):
"""Manager class for manipulating Keystone roles."""
resource_class = Role
def get(self, role):
return self._get("/OS-KSADM/roles/%s" % base.getid(role), "role")
def create(self, name):
"""Create a role."""
params = {"role": {"name": name}}
return self._create('/OS-KSADM/roles', params, "role")
def delete(self, role):
"""Delete a role."""
return self._delete("/OS-KSADM/roles/%s" % base.getid(role))
def list(self):
"""List all available roles."""
return self._list("/OS-KSADM/roles", "roles")
def roles_for_user(self, user, tenant=None):
user_id = base.getid(user)
if tenant:
tenant_id = base.getid(tenant)
route = "/tenants/%s/users/%s/roles"
return self._list(route % (tenant_id, user_id), "roles")
else:
return self._list("/users/%s/roles" % user_id, "roles")
def add_user_role(self, user, role, tenant=None):
"""Adds a role to a user.
If tenant is specified, the role is added just for that tenant,
otherwise the role is added globally.
"""
user_id = base.getid(user)
role_id = base.getid(role)
if tenant:
route = "/tenants/%s/users/%s/roles/OS-KSADM/%s"
params = (base.getid(tenant), user_id, role_id)
return self._update(route % params, None, "role")
else:
route = "/users/%s/roles/OS-KSADM/%s"
return self._update(route % (user_id, role_id), None, "roles")
def remove_user_role(self, user, role, tenant=None):
"""Removes a role from a user.
If tenant is specified, the role is removed just for that tenant,
otherwise the role is removed from the user's global roles.
"""
user_id = base.getid(user)
role_id = base.getid(role)
if tenant:
route = "/tenants/%s/users/%s/roles/OS-KSADM/%s"
params = (base.getid(tenant), user_id, role_id)
return self._delete(route % params)
else:
route = "/users/%s/roles/OS-KSADM/%s"
return self._delete(route % (user_id, role_id))
| apache-2.0 |
Gus42/Conference-Organization-App | conference.py | 1 | 35146 | #!/usr/bin/env python
"""
conference.py -- Udacity conference server-side Python App Engine API;
uses Google Cloud Endpoints
$Id: conference.py,v 1.25 2014/05/24 23:42:19 wesc Exp wesc $
created by wesc on 2014 apr 21
"""
__author__ = 'wesc+api@google.com (Wesley Chun)'
from datetime import datetime
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
from google.appengine.api import memcache
from google.appengine.api import taskqueue
from google.appengine.ext import ndb
from models import ConflictException
from models import Profile
from models import ProfileMiniForm
from models import ProfileForm
from models import StringMessage
from models import BooleanMessage
from models import Conference
from models import ConferenceForm
from models import ConferenceForms
from models import ConferenceQueryForm
from models import ConferenceQueryForms
from models import TeeShirtSize
from models import Session
from models import SessionForm
from models import SessionForms
from settings import WEB_CLIENT_ID
from settings import ANDROID_CLIENT_ID
from settings import IOS_CLIENT_ID
from settings import ANDROID_AUDIENCE
from utils import getUserId
EMAIL_SCOPE = endpoints.EMAIL_SCOPE
API_EXPLORER_CLIENT_ID = endpoints.API_EXPLORER_CLIENT_ID
MEMCACHE_ANNOUNCEMENTS_KEY = "RECENT_ANNOUNCEMENTS"
ANNOUNCEMENT_TPL = ('Last chance to attend! The following conferences '
'are nearly sold out: %s')
MEMCACHE_FEATURED_SPEAKER_KEY = "FEATURED_SPEAKER"
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
DEFAULTS = {
"city": "Default City",
"maxAttendees": 0,
"seatsAvailable": 0,
"topics": [ "Default", "Topic" ],
}
OPERATORS = {
'EQ': '=',
'GT': '>',
'GTEQ': '>=',
'LT': '<',
'LTEQ': '<=',
'NE': '!='
}
FIELDS = {
'CITY': 'city',
'TOPIC': 'topics',
'MONTH': 'month',
'MAX_ATTENDEES': 'maxAttendees',
}
CONF_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
)
CONF_POST_REQUEST = endpoints.ResourceContainer(
ConferenceForm,
websafeConferenceKey=messages.StringField(1),
)
DEFAULTS_SESSION = {
"highlights": ["Default", "Hightlight"],
"location": "Default Location",
"typeOfSession": ["Default"],
"date": "2016-01-01",
"startTime": "00:00",
"duration": "00:00"
}
SESSION_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
)
SPEC_SESSION_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeSessionKey=messages.StringField(1),
)
SESSION_BY_TYPE_GET_REQUEST = endpoints.ResourceContainer(
typeOfSession=messages.StringField(1),
websafeConferenceKey=messages.StringField(2),
)
SESSION_BY_SPEAKER_GET_REQUEST = endpoints.ResourceContainer(
speaker=messages.StringField(1),
)
SESSION_OF_CONF_BY_SPEAKER_GET_REQUEST = endpoints.ResourceContainer(
websafeConferenceKey=messages.StringField(1),
speaker=messages.StringField(2),
)
SESSION_POST_REQUEST = endpoints.ResourceContainer(
SessionForm,
websafeConferenceKey=messages.StringField(1),
)
WISHLIST_POST_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeSessionKey=messages.StringField(1)
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@endpoints.api(name='conference', version='v1', audiences=[ANDROID_AUDIENCE],
allowed_client_ids=[WEB_CLIENT_ID, API_EXPLORER_CLIENT_ID, ANDROID_CLIENT_ID, IOS_CLIENT_ID],
scopes=[EMAIL_SCOPE])
class ConferenceApi(remote.Service):
"""Conference API v0.1"""
# - - - Conference objects - - - - - - - - - - - - - - - - -
def _copyConferenceToForm(self, conf, displayName):
"""Copy relevant fields from Conference to ConferenceForm."""
cf = ConferenceForm()
for field in cf.all_fields():
if hasattr(conf, field.name):
# convert Date to date string; just copy others
if field.name.endswith('Date'):
setattr(cf, field.name, str(getattr(conf, field.name)))
else:
setattr(cf, field.name, getattr(conf, field.name))
elif field.name == "websafeKey":
setattr(cf, field.name, conf.key.urlsafe())
if displayName:
setattr(cf, 'organizerDisplayName', displayName)
cf.check_initialized()
return cf
def _createConferenceObject(self, request):
"""Create or update Conference object, returning ConferenceForm/request."""
# preload necessary data items
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
if not request.name:
raise endpoints.BadRequestException("Conference 'name' field required")
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
del data['websafeKey']
del data['organizerDisplayName']
# add default values for those missing (both data model & outbound Message)
for df in DEFAULTS:
if data[df] in (None, []):
data[df] = DEFAULTS[df]
setattr(request, df, DEFAULTS[df])
# convert dates from strings to Date objects; set month based on start_date
if data['startDate']:
data['startDate'] = datetime.strptime(data['startDate'][:10], "%Y-%m-%d").date()
data['month'] = data['startDate'].month
else:
data['month'] = 0
if data['endDate']:
data['endDate'] = datetime.strptime(data['endDate'][:10], "%Y-%m-%d").date()
# set seatsAvailable to be same as maxAttendees on creation
if data["maxAttendees"] > 0:
data["seatsAvailable"] = data["maxAttendees"]
# generate Profile Key based on user ID and Conference
# ID based on Profile key get Conference key from ID
p_key = ndb.Key(Profile, user_id)
c_id = Conference.allocate_ids(size=1, parent=p_key)[0]
c_key = ndb.Key(Conference, c_id, parent=p_key)
data['key'] = c_key
data['organizerUserId'] = request.organizerUserId = user_id
# create Conference, send email to organizer confirming
# creation of Conference & return (modified) ConferenceForm
Conference(**data).put()
taskqueue.add(params={'email': user.email(),
'conferenceInfo': repr(request)},
url='/tasks/send_confirmation_email'
)
return request
@ndb.transactional()
def _updateConferenceObject(self, request):
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
# update existing conference
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
# check that conference exists
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
# check that user is owner
if user_id != conf.organizerUserId:
raise endpoints.ForbiddenException(
'Only the owner can update the conference.')
# Not getting all the fields, so don't create a new object; just
# copy relevant fields from ConferenceForm to Conference object
for field in request.all_fields():
data = getattr(request, field.name)
# only copy fields where we get data
if data not in (None, []):
# special handling for dates (convert string to Date)
if field.name in ('startDate', 'endDate'):
data = datetime.strptime(data, "%Y-%m-%d").date()
if field.name == 'startDate':
conf.month = data.month
# write to Conference object
setattr(conf, field.name, data)
conf.put()
prof = ndb.Key(Profile, user_id).get()
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@endpoints.method(ConferenceForm, ConferenceForm, path='conference',
http_method='POST', name='createConference')
def createConference(self, request):
"""Create new conference."""
return self._createConferenceObject(request)
@endpoints.method(CONF_POST_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='PUT', name='updateConference')
def updateConference(self, request):
"""Update conference w/provided fields & return w/updated info."""
return self._updateConferenceObject(request)
@endpoints.method(CONF_GET_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='GET', name='getConference')
def getConference(self, request):
"""Return requested conference (by websafeConferenceKey)."""
# get Conference object from request; bail if not found
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
prof = conf.key.parent().get()
# return ConferenceForm
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='getConferencesCreated',
http_method='POST', name='getConferencesCreated')
def getConferencesCreated(self, request):
"""Return conferences created by user."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# create ancestor query for all key matches for this user
confs = Conference.query(ancestor=ndb.Key(Profile, user_id))
prof = ndb.Key(Profile, user_id).get()
# return set of ConferenceForm objects per Conference
return ConferenceForms(
items=[self._copyConferenceToForm(conf, getattr(prof, 'displayName')) for conf in confs]
)
def _getQuery(self, request):
"""Return formatted query from the submitted filters."""
q = Conference.query()
inequality_filter, filters = self._formatFilters(request.filters)
# If exists, sort on inequality filter first
if not inequality_filter:
q = q.order(Conference.name)
else:
q = q.order(ndb.GenericProperty(inequality_filter))
q = q.order(Conference.name)
for filtr in filters:
if filtr["field"] in ["month", "maxAttendees"]:
filtr["value"] = int(filtr["value"])
formatted_query = ndb.query.FilterNode(filtr["field"], filtr["operator"], filtr["value"])
q = q.filter(formatted_query)
return q
def _formatFilters(self, filters):
"""Parse, check validity and format user supplied filters."""
formatted_filters = []
inequality_field = None
for f in filters:
filtr = {field.name: getattr(f, field.name) for field in f.all_fields()}
try:
filtr["field"] = FIELDS[filtr["field"]]
filtr["operator"] = OPERATORS[filtr["operator"]]
except KeyError:
raise endpoints.BadRequestException("Filter contains invalid field or operator.")
# Every operation except "=" is an inequality
if filtr["operator"] != "=":
# check if inequality operation has been used in previous filters
# disallow the filter if inequality was performed on a different field before
# track the field on which the inequality operation is performed
if inequality_field and inequality_field != filtr["field"]:
raise endpoints.BadRequestException("Inequality filter is allowed on only one field.")
else:
inequality_field = filtr["field"]
formatted_filters.append(filtr)
return (inequality_field, formatted_filters)
@endpoints.method(ConferenceQueryForms, ConferenceForms,
path='queryConferences',
http_method='POST',
name='queryConferences')
def queryConferences(self, request):
"""Query for conferences."""
conferences = self._getQuery(request)
# need to fetch organiser displayName from profiles
# get all keys and use get_multi for speed
organisers = [(ndb.Key(Profile, conf.organizerUserId)) for conf in conferences]
profiles = ndb.get_multi(organisers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return individual ConferenceForm object per Conference
return ConferenceForms(
items=[self._copyConferenceToForm(conf, names[conf.organizerUserId]) for conf in \
conferences]
)
# - - - Profile objects - - - - - - - - - - - - - - - - - - -
def _copyProfileToForm(self, prof):
"""Copy relevant fields from Profile to ProfileForm."""
# copy relevant fields from Profile to ProfileForm
pf = ProfileForm()
for field in pf.all_fields():
if hasattr(prof, field.name):
# convert t-shirt string to Enum; just copy others
if field.name == 'teeShirtSize':
setattr(pf, field.name, getattr(TeeShirtSize, getattr(prof, field.name)))
else:
setattr(pf, field.name, getattr(prof, field.name))
pf.check_initialized()
return pf
def _getProfileFromUser(self):
"""Return user Profile from datastore, creating new one if non-existent."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# get Profile from datastore
user_id = getUserId(user)
p_key = ndb.Key(Profile, user_id)
profile = p_key.get()
# create new Profile if not there
if not profile:
profile = Profile(
key = p_key,
displayName = user.nickname(),
mainEmail= user.email(),
teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),
)
profile.put()
return profile # return Profile
def _doProfile(self, save_request=None):
"""Get user Profile and return to user, possibly updating it first."""
# get user Profile
prof = self._getProfileFromUser()
# if saveProfile(), process user-modifyable fields
if save_request:
for field in ('displayName', 'teeShirtSize'):
if hasattr(save_request, field):
val = getattr(save_request, field)
if val:
setattr(prof, field, str(val))
#if field == 'teeShirtSize':
# setattr(prof, field, str(val).upper())
#else:
# setattr(prof, field, val)
prof.put()
# return ProfileForm
return self._copyProfileToForm(prof)
@endpoints.method(message_types.VoidMessage, ProfileForm,
path='profile', http_method='GET', name='getProfile')
def getProfile(self, request):
"""Return user profile."""
return self._doProfile()
@endpoints.method(ProfileMiniForm, ProfileForm,
path='profile', http_method='POST', name='saveProfile')
def saveProfile(self, request):
"""Update & return user profile."""
return self._doProfile(request)
# - - - Announcements - - - - - - - - - - - - - - - - - - - -
@staticmethod
def _cacheAnnouncement():
"""Create Announcement & assign to memcache; used by
memcache cron job & putAnnouncement().
"""
confs = Conference.query(ndb.AND(
Conference.seatsAvailable <= 5,
Conference.seatsAvailable > 0)
).fetch(projection=[Conference.name])
if confs:
# If there are almost sold out conferences,
# format announcement and set it in memcache
announcement = ANNOUNCEMENT_TPL % (
', '.join(conf.name for conf in confs))
memcache.set(MEMCACHE_ANNOUNCEMENTS_KEY, announcement)
else:
# If there are no sold out conferences,
# delete the memcache announcements entry
announcement = ""
memcache.delete(MEMCACHE_ANNOUNCEMENTS_KEY)
return announcement
@endpoints.method(message_types.VoidMessage, StringMessage,
path='conference/announcement/get',
http_method='GET', name='getAnnouncement')
def getAnnouncement(self, request):
"""Return Announcement from memcache."""
return StringMessage(data=memcache.get(MEMCACHE_ANNOUNCEMENTS_KEY) or "")
# - - - Registration - - - - - - - - - - - - - - - - - - - -
@ndb.transactional(xg=True)
def _conferenceRegistration(self, request, reg=True):
"""Register or unregister user for selected conference."""
retval = None
prof = self._getProfileFromUser() # get user Profile
# check if conf exists given websafeConfKey
# get conference; check that it exists
wsck = request.websafeConferenceKey
conf = ndb.Key(urlsafe=wsck).get()
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % wsck)
# register
if reg:
# check if user already registered otherwise add
if wsck in prof.conferenceKeysToAttend:
raise ConflictException(
"You have already registered for this conference")
# check if seats avail
if conf.seatsAvailable <= 0:
raise ConflictException(
"There are no seats available.")
# register user, take away one seat
prof.conferenceKeysToAttend.append(wsck)
conf.seatsAvailable -= 1
retval = True
# unregister
else:
# check if user already registered
if wsck in prof.conferenceKeysToAttend:
# unregister user, add back one seat
prof.conferenceKeysToAttend.remove(wsck)
conf.seatsAvailable += 1
retval = True
else:
retval = False
# write things back to the datastore & return
prof.put()
conf.put()
return BooleanMessage(data=retval)
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='conferences/attending',
http_method='GET', name='getConferencesToAttend')
def getConferencesToAttend(self, request):
"""Get list of conferences that user has registered for."""
prof = self._getProfileFromUser() # get user Profile
conf_keys = [ndb.Key(urlsafe=wsck) for wsck in prof.conferenceKeysToAttend]
conferences = ndb.get_multi(conf_keys)
# get organizers
organisers = [ndb.Key(Profile, conf.organizerUserId) for conf in conferences]
profiles = ndb.get_multi(organisers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return set of ConferenceForm objects per Conference
return ConferenceForms(items=[self._copyConferenceToForm(conf, names[conf.organizerUserId])\
for conf in conferences]
)
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='POST', name='registerForConference')
def registerForConference(self, request):
"""Register user for selected conference."""
return self._conferenceRegistration(request)
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='DELETE', name='unregisterFromConference')
def unregisterFromConference(self, request):
"""Unregister user for selected conference."""
return self._conferenceRegistration(request, reg=False)
# - - - Play with queries - - - - - - - - - - - - - - - - - - - -
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='filterPlayground',
http_method='GET', name='filterPlayground')
def filterPlayground(self, request):
"""Filter Playground"""
q = Conference.query()
# field = "city"
# operator = "="
# value = "London"
# f = ndb.query.FilterNode(field, operator, value)
# q = q.filter(f)
q = q.filter(Conference.city=="London")
q = q.filter(Conference.topics=="Medical Innovations")
q = q.filter(Conference.month==6)
return ConferenceForms(
items=[self._copyConferenceToForm(conf, "") for conf in q]
)
# - - - Sessions - - - - - - - - - - - - - - - - - - - -
def _copySessionToForm(self, sess):
# copy relevant fields from Session to SessionForm
sf = SessionForm()
for field in sf.all_fields():
if hasattr(sess, field.name):
# Convert date, duration and startTime to string
if field.name in ('date', 'startTime', 'duration'):
setattr(sf, field.name, str(getattr(sess, field.name)))
# Just copy the other fields that are already strings
else:
setattr(sf, field.name, getattr(sess, field.name))
elif field.name == "websafeKey":
setattr(sf, field.name, sess.key.urlsafe())
elif field.name == "websafeConfKey":
setattr(sf, field.name, sess.key.parent().urlsafe())
sf.check_initialized()
return sf
def _getConferenceSessions(self, request):
"""
args: websafeConferenceKey of a Conference
returns: All sessions of that Conference
"""
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
# check that conference exists
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
# get the conference key
c_key = conf.key
# create ancestor query for this user
sessions = Session.query(ancestor=c_key)
return sessions
def _createSessionObject(self, request):
"""
args: A SessionForm and the websafeConferenceKey of the Conference
returns: A filled SessionForm, with a websafekey of the session
"""
# Check if user is logged in
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# Check that session has a name
if not request.name:
raise endpoints.BadRequestException("Session 'name' field required")
# Check that conference exists
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
if not conf:
raise endpoints.NotFoundException('No conference found with key: %s' % request.websafeConferenceKey)
# Check that user is the owner of the conference
if user_id != conf.organizerUserId:
raise endpoints.ForbiddenException('Only the owner can update the conference.')
# copy SessionForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
del data['websafeKey']
del data['websafeConfKey']
del data['websafeConferenceKey']
# add default values for those missing (both data model & outbound Message)
for df in DEFAULTS_SESSION:
if data[df] in (None, []):
data[df] = DEFAULTS_SESSION[df]
setattr(request, df, DEFAULTS_SESSION[df])
# Convert date from string to Date objects
if data['date']:
data['date'] = datetime.strptime(data['date'][:10], "%Y-%m-%d").date()
# Convert startTime and duration from string to Time objects
if data['startTime']:
data['startTime'] = datetime.strptime(data['startTime'][:5],"%H:%M").time()
if data['duration']:
data['duration'] = datetime.strptime(data['duration'][:5],"%H:%M").time()
# Check if the speaker has more then one session and create a task queue
if data['speaker']:
sessions = self._getConferenceSessions(request)
for s in sessions:
for speak in s.speaker:
if speak in data['speaker']:
# if the speaker is in more than one session, we are here
taskqueue.add(
params = {'websafeConferenceKey': request.websafeConferenceKey,
'speaker': data['speaker']},
url = '/tasks/set_featured_speaker'
)
# generate Conf Key based on conf ID and Session
# ID based on Conf key get Session key from ID
conf_key = conf.key
sess_id = Session.allocate_ids(size=1, parent=conf_key)[0]
sess_key = ndb.Key(Session, sess_id, parent=conf_key)
data['key'] = sess_key
# create Session & return (modified) SessionForm
Session(**data).put()
sess = sess_key.get()
return self._copySessionToForm(sess)
@endpoints.method(SESSION_GET_REQUEST, SessionForms,
path='conference/{websafeConferenceKey}/sessions',
http_method='GET', name='getConferenceSessions')
def getConferenceSessions(self, request):
"""Gets all the sessions in the given conference"""
sessions = self._getConferenceSessions(request)
return SessionForms(
items=[self._copySessionToForm(sess) for sess in sessions]
)
@endpoints.method(SESSION_BY_TYPE_GET_REQUEST, SessionForms,
path='conference/{websafeConferenceKey}/sessions/byType',
http_method='GET', name='getConferenceSessionsByType')
def getConferenceSessionsByType(self, request):
""" Gets all the sessions of a specified type, in the given conference"""
sessions = self._getConferenceSessions(request)
# filter by requested typeOfSession
sessions = sessions.filter(Session.typeOfSession == request.typeOfSession)
return SessionForms(
items=[self._copySessionToForm(sess) for sess in sessions]
)
@endpoints.method(SESSION_BY_SPEAKER_GET_REQUEST, SessionForms,
path='sessions/bySpeaker',
http_method='GET', name='getSessionsBySpeaker')
def getSessionsBySpeaker(self, request):
""" Gets all the sessions of a specified speaker"""
allSess = Session.query()
allSess = allSess.filter(Session.speaker == request.speaker)
return SessionForms(
items=[self._copySessionToForm(sess) for sess in allSess]
)
@endpoints.method(SESSION_POST_REQUEST, SessionForm,
path='conference/{websafeConferenceKey}/sessions',
http_method='POST', name='createSession')
def createSession(self, request):
""" Creates a new session for a conference"""
return self._createSessionObject(request)
# TASK 2 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -- - - - -
@endpoints.method(WISHLIST_POST_REQUEST, BooleanMessage,
path='sessions/{websafeSessionKey}/wishlist',
http_method='POST', name='addSessionToWishlist')
def addSessionToWishlist(self, request):
""" Add a given session to the user's wishlist """
inserted = None
prof = self._getProfileFromUser() # get user Profile
# Take the session
wssk = request.websafeSessionKey
sess = ndb.Key(urlsafe=wssk).get()
# check if sess is a session object
if not type(sess) is (Session):
raise endpoints.NotFoundException('No session found with key: %s' % wssk)
# check if the session exists
if not sess:
raise endpoints.NotFoundException('No session found with key: %s' % wssk)
# check if session is already on the user wishlist
if wssk in prof.wishlist:
raise ConflictException("This session is already on your wishlist.")
# Add session
prof.wishlist.append(wssk)
inserted = True
prof.put()
return BooleanMessage(data=inserted)
@endpoints.method(message_types.VoidMessage, SessionForms,
path='wishlist',
http_method='GET', name='getSessionsInWishlist')
def getSessionsInWishlist(self, request):
"""Get list of sessions that user has put in his wishlist."""
prof = self._getProfileFromUser() # get user profile
# get the wishlist sessions from profile.
sessions_keys = [ndb.Key(urlsafe=wssk) for wssk in prof.wishlist]
sessions = ndb.get_multi(sessions_keys)
# return set of SessionForm objects per Session
return SessionForms(
items=[self._copySessionToForm(sess) for sess in sessions]
)
@endpoints.method(WISHLIST_POST_REQUEST, BooleanMessage,
path='sessions/{websafeSessionKey}/deletefromwishlist',
http_method='DELETE', name='deleteSessionInWishlist')
def deleteSessionInWishlist(self, request):
""" Delete a given session from the user's wishlist """
deleted = None
prof = self._getProfileFromUser() # get user Profile
# Take the session
wssk = request.websafeSessionKey
sess = ndb.Key(urlsafe=wssk).get()
# check if the session exists
if not sess:
raise endpoints.NotFoundException('No session found with key: %s' % wssk)
# check if session is already on the user wishlist
if wssk in prof.wishlist:
prof.wishlist.remove(wssk)
deleted = True
prof.put()
return BooleanMessage(data=deleted)
# TASK 3 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -- - - - -
@endpoints.method(SESSION_OF_CONF_BY_SPEAKER_GET_REQUEST, SessionForms,
path='conference/{websafeConferenceKey}/sessions/bySpeaker',
http_method='GET', name='getConferenceSessionsBySpeaker')
def getConferenceSessionsBySpeaker(self, request):
""" Gets all the sessions of a specified speaker, in the given conference"""
sessions = self._getConferenceSessions(request)
# filter by requested speaker
sessions = sessions.filter(Session.speaker == request.speaker)
return SessionForms(
items=[self._copySessionToForm(sess) for sess in sessions]
)
@endpoints.method(SPEC_SESSION_GET_REQUEST, BooleanMessage,
path='sessions/{websafeSessionKey}/seats',
http_method='GET', name='getSeatsAvailableInSession')
def getSeatsAvailableInSession(self, request):
""" Get the availability of a conference given the session """
free = None
prof = self._getProfileFromUser() # get user Profile
# Take the session
wssk = request.websafeSessionKey
sess = ndb.Key(urlsafe=wssk).get()
# check if the session exists
if not sess:
raise endpoints.NotFoundException('No session found with key: %s' % wssk)
# Take the conference
conf_key = sess.key.parent()
conf = conf_key.get()
# Check if the conference has seats available
if conf.seatsAvailable > 0:
free = True
else:
free = False
return BooleanMessage(data=free)
@endpoints.method(message_types.VoidMessage, SessionForms,
path='noWorkshopAndBeforeSeven', http_method='GET',
name='noWorkshopAndBeforeSeven')
def noWorkshopAndBeforeSeven(self, request):
""" Get all sessions before 7pm and without workshop"""
sessions = Session.query()
# First filter
sessions = sessions.filter(
Session.startTime <= datetime.strptime("7:00 pm", "%I:%M %p").time()
)
final_sessions = [session for session in sessions
if 'Workshop' not in session.typeOfSession]
return SessionForms(
items=[self._copySessionToForm(session) for session in final_sessions]
)
# TASK 4 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -- - - - -
@staticmethod
def _cacheFeaturedSpeaker(websafeConferenceKey, speaker):
"""Create Featured Speaker & assign to memcache."""
# Get all sessions of the speaker
sessions = Session.query(Session.speaker == speaker)
# save in a string the list of the sessions names
sf = SessionForm()
string = ''
for sess in sessions:
for field in sf.all_fields():
if field.name == 'name':
string += str(getattr(sess, field.name))
string += ","
# Speaker and his sessions
featured_speaker = {'speaker': speaker, 'sessions': string }
featured = str(featured_speaker)
# store in memcache
memcache.set(MEMCACHE_FEATURED_SPEAKER_KEY, featured)
return featured
@endpoints.method(message_types.VoidMessage, StringMessage,
path='getFeaturedSpeaker',
http_method='GET', name='getFeaturedSpeaker')
def getFeaturedSpeaker(self, request):
"""Return Featured Speaker from memcache."""
# return an existing Featured Speaker from Memcache or an empty string.
featured = memcache.get(MEMCACHE_FEATURED_SPEAKER_KEY)
if not featured:
featured = ""
return StringMessage(data=featured)
api = endpoints.api_server([ConferenceApi]) # register API
| apache-2.0 |
TeslaProject/external_chromium_org | tools/telemetry/telemetry/core/platform/profiler/android_profiling_helper.py | 25 | 9850 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import glob
import hashlib
import logging
import os
import platform
import re
import shutil
import subprocess
from telemetry import decorators
from telemetry.core import platform as telemetry_platform
from telemetry.core import util
from telemetry.core.platform.profiler import android_prebuilt_profiler_helper
from telemetry.util import support_binaries
try:
import sqlite3
except ImportError:
sqlite3 = None
_TEXT_SECTION = '.text'
def _ElfMachineId(elf_file):
headers = subprocess.check_output(['readelf', '-h', elf_file])
return re.match(r'.*Machine:\s+(\w+)', headers, re.DOTALL).group(1)
def _ElfSectionAsString(elf_file, section):
return subprocess.check_output(['readelf', '-p', section, elf_file])
def _ElfSectionMd5Sum(elf_file, section):
result = subprocess.check_output(
'readelf -p%s "%s" | md5sum' % (section, elf_file), shell=True)
return result.split(' ', 1)[0]
def _FindMatchingUnstrippedLibraryOnHost(device, lib):
lib_base = os.path.basename(lib)
device_md5 = device.RunShellCommand('md5 "%s"' % lib, as_root=True)[0]
device_md5 = device_md5.split(' ', 1)[0]
def FindMatchingStrippedLibrary(out_path):
# First find a matching stripped library on the host. This avoids the need
# to pull the stripped library from the device, which can take tens of
# seconds.
host_lib_pattern = os.path.join(out_path, '*_apk', 'libs', '*', lib_base)
for stripped_host_lib in glob.glob(host_lib_pattern):
with open(stripped_host_lib) as f:
host_md5 = hashlib.md5(f.read()).hexdigest()
if host_md5 == device_md5:
return stripped_host_lib
for build_dir, build_type in util.GetBuildDirectories():
out_path = os.path.join(build_dir, build_type)
stripped_host_lib = FindMatchingStrippedLibrary(out_path)
if stripped_host_lib:
break
else:
return None
# The corresponding unstripped library will be under out/Release/lib.
unstripped_host_lib = os.path.join(out_path, 'lib', lib_base)
# Make sure the unstripped library matches the stripped one. We do this
# by comparing the hashes of text sections in both libraries. This isn't an
# exact guarantee, but should still give reasonable confidence that the
# libraries are compatible.
# TODO(skyostil): Check .note.gnu.build-id instead once we're using
# --build-id=sha1.
# pylint: disable=W0631
if (_ElfSectionMd5Sum(unstripped_host_lib, _TEXT_SECTION) !=
_ElfSectionMd5Sum(stripped_host_lib, _TEXT_SECTION)):
return None
return unstripped_host_lib
@decorators.Cache
def GetPerfhostName():
return 'perfhost_' + telemetry_platform.GetHostPlatform().GetOSVersionName()
# Ignored directories for libraries that aren't useful for symbolization.
_IGNORED_LIB_PATHS = [
'/data/dalvik-cache',
'/tmp'
]
def GetRequiredLibrariesForPerfProfile(profile_file):
"""Returns the set of libraries necessary to symbolize a given perf profile.
Args:
profile_file: Path to perf profile to analyse.
Returns:
A set of required library file names.
"""
with open(os.devnull, 'w') as dev_null:
perfhost_path = support_binaries.FindPath(GetPerfhostName(), 'linux')
perf = subprocess.Popen([perfhost_path, 'script', '-i', profile_file],
stdout=dev_null, stderr=subprocess.PIPE)
_, output = perf.communicate()
missing_lib_re = re.compile(
r'^Failed to open (.*), continuing without symbols')
libs = set()
for line in output.split('\n'):
lib = missing_lib_re.match(line)
if lib:
lib = lib.group(1)
path = os.path.dirname(lib)
if any(path.startswith(ignored_path)
for ignored_path in _IGNORED_LIB_PATHS) or path == '/':
continue
libs.add(lib)
return libs
def GetRequiredLibrariesForVTuneProfile(profile_file):
"""Returns the set of libraries necessary to symbolize a given VTune profile.
Args:
profile_file: Path to VTune profile to analyse.
Returns:
A set of required library file names.
"""
db_file = os.path.join(profile_file, 'sqlite-db', 'dicer.db')
conn = sqlite3.connect(db_file)
try:
# The 'dd_module_file' table lists all libraries on the device. Only the
# ones with 'bin_located_path' are needed for the profile.
query = 'SELECT bin_path, bin_located_path FROM dd_module_file'
return set(row[0] for row in conn.cursor().execute(query) if row[1])
finally:
conn.close()
def CreateSymFs(device, symfs_dir, libraries, use_symlinks=True):
"""Creates a symfs directory to be used for symbolizing profiles.
Prepares a set of files ("symfs") to be used with profilers such as perf for
converting binary addresses into human readable function names.
Args:
device: DeviceUtils instance identifying the target device.
symfs_dir: Path where the symfs should be created.
libraries: Set of library file names that should be included in the symfs.
use_symlinks: If True, link instead of copy unstripped libraries into the
symfs. This will speed up the operation, but the resulting symfs will no
longer be valid if the linked files are modified, e.g., by rebuilding.
Returns:
The absolute path to the kernel symbols within the created symfs.
"""
logging.info('Building symfs into %s.' % symfs_dir)
mismatching_files = {}
for lib in libraries:
device_dir = os.path.dirname(lib)
output_dir = os.path.join(symfs_dir, device_dir[1:])
if not os.path.exists(output_dir):
os.makedirs(output_dir)
output_lib = os.path.join(output_dir, os.path.basename(lib))
if lib.startswith('/data/app/'):
# If this is our own library instead of a system one, look for a matching
# unstripped library under the out directory.
unstripped_host_lib = _FindMatchingUnstrippedLibraryOnHost(device, lib)
if not unstripped_host_lib:
logging.warning('Could not find symbols for %s.' % lib)
logging.warning('Is the correct output directory selected '
'(CHROMIUM_OUT_DIR)? Did you install the APK after '
'building?')
continue
if use_symlinks:
if os.path.lexists(output_lib):
os.remove(output_lib)
os.symlink(os.path.abspath(unstripped_host_lib), output_lib)
# Copy the unstripped library only if it has been changed to avoid the
# delay. Add one second to the modification time to guard against file
# systems with poor timestamp resolution.
elif not os.path.exists(output_lib) or \
(os.stat(unstripped_host_lib).st_mtime >
os.stat(output_lib).st_mtime + 1):
logging.info('Copying %s to %s' % (unstripped_host_lib, output_lib))
shutil.copy2(unstripped_host_lib, output_lib)
else:
# Otherwise save a copy of the stripped system library under the symfs so
# the profiler can at least use the public symbols of that library. To
# speed things up, only pull files that don't match copies we already
# have in the symfs.
if not device_dir in mismatching_files:
changed_files = device.old_interface.GetFilesChanged(output_dir,
device_dir)
mismatching_files[device_dir] = [
device_path for _, device_path in changed_files]
if not os.path.exists(output_lib) or lib in mismatching_files[device_dir]:
logging.info('Pulling %s to %s' % (lib, output_lib))
device.PullFile(lib, output_lib)
# Also pull a copy of the kernel symbols.
output_kallsyms = os.path.join(symfs_dir, 'kallsyms')
if not os.path.exists(output_kallsyms):
device.PullFile('/proc/kallsyms', output_kallsyms)
return output_kallsyms
def PrepareDeviceForPerf(device):
"""Set up a device for running perf.
Args:
device: DeviceUtils instance identifying the target device.
Returns:
The path to the installed perf binary on the device.
"""
android_prebuilt_profiler_helper.InstallOnDevice(device, 'perf')
# Make sure kernel pointers are not hidden.
device.WriteFile('/proc/sys/kernel/kptr_restrict', '0', as_root=True)
return android_prebuilt_profiler_helper.GetDevicePath('perf')
def GetToolchainBinaryPath(library_file, binary_name):
"""Return the path to an Android toolchain binary on the host.
Args:
library_file: ELF library which is used to identify the used ABI,
architecture and toolchain.
binary_name: Binary to search for, e.g., 'objdump'
Returns:
Full path to binary or None if the binary was not found.
"""
# Mapping from ELF machine identifiers to GNU toolchain names.
toolchain_configs = {
'x86': 'i686-linux-android',
'MIPS': 'mipsel-linux-android',
'ARM': 'arm-linux-androideabi',
'x86-64': 'x86_64-linux-android',
'AArch64': 'aarch64-linux-android',
}
toolchain_config = toolchain_configs[_ElfMachineId(library_file)]
host_os = platform.uname()[0].lower()
host_machine = platform.uname()[4]
elf_comment = _ElfSectionAsString(library_file, '.comment')
toolchain_version = re.match(r'.*GCC: \(GNU\) ([\w.]+)',
elf_comment, re.DOTALL)
if not toolchain_version:
return None
toolchain_version = toolchain_version.group(1)
path = os.path.join(util.GetChromiumSrcDir(), 'third_party', 'android_tools',
'ndk', 'toolchains',
'%s-%s' % (toolchain_config, toolchain_version),
'prebuilt', '%s-%s' % (host_os, host_machine), 'bin',
'%s-%s' % (toolchain_config, binary_name))
path = os.path.abspath(path)
return path if os.path.exists(path) else None
| bsd-3-clause |
jackylee0424/dfr | tornado/platform/interface.py | 387 | 2244 | #!/usr/bin/env python
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Interfaces for platform-specific functionality.
This module exists primarily for documentation purposes and as base classes
for other tornado.platform modules. Most code should import the appropriate
implementation from `tornado.platform.auto`.
"""
from __future__ import absolute_import, division, print_function, with_statement
def set_close_exec(fd):
"""Sets the close-on-exec bit (``FD_CLOEXEC``)for a file descriptor."""
raise NotImplementedError()
class Waker(object):
"""A socket-like object that can wake another thread from ``select()``.
The `~tornado.ioloop.IOLoop` will add the Waker's `fileno()` to
its ``select`` (or ``epoll`` or ``kqueue``) calls. When another
thread wants to wake up the loop, it calls `wake`. Once it has woken
up, it will call `consume` to do any necessary per-wake cleanup. When
the ``IOLoop`` is closed, it closes its waker too.
"""
def fileno(self):
"""Returns the read file descriptor for this waker.
Must be suitable for use with ``select()`` or equivalent on the
local platform.
"""
raise NotImplementedError()
def write_fileno(self):
"""Returns the write file descriptor for this waker."""
raise NotImplementedError()
def wake(self):
"""Triggers activity on the waker's file descriptor."""
raise NotImplementedError()
def consume(self):
"""Called after the listen has woken up to do any necessary cleanup."""
raise NotImplementedError()
def close(self):
"""Closes the waker's file descriptor(s)."""
raise NotImplementedError()
| mit |
flotre/sickbeard-vfvo | lib/hachoir_parser/common/msdos.py | 90 | 1463 | """
MS-DOS structures.
Documentation:
- File attributes:
http://www.cs.colorado.edu/~main/cs1300/include/ddk/winddk.h
"""
from lib.hachoir_core.field import StaticFieldSet
from lib.hachoir_core.field import Bit, NullBits
_FIELDS = (
(Bit, "read_only"),
(Bit, "hidden"),
(Bit, "system"),
(NullBits, "reserved[]", 1),
(Bit, "directory"),
(Bit, "archive"),
(Bit, "device"),
(Bit, "normal"),
(Bit, "temporary"),
(Bit, "sparse_file"),
(Bit, "reparse_file"),
(Bit, "compressed"),
(Bit, "offline"),
(Bit, "dont_index_content"),
(Bit, "encrypted"),
)
class MSDOSFileAttr16(StaticFieldSet):
"""
MSDOS 16-bit file attributes
"""
format = _FIELDS + ((NullBits, "reserved[]", 1),)
_text_keys = (
# Sort attributes by importance
"directory", "read_only", "compressed",
"hidden", "system",
"normal", "device",
"temporary", "archive")
def createValue(self):
mode = []
for name in self._text_keys:
if self[name].value:
if 4 <= len(mode):
mode.append("...")
break
else:
mode.append(name)
if mode:
return ", ".join(mode)
else:
return "(none)"
class MSDOSFileAttr32(MSDOSFileAttr16):
"""
MSDOS 32-bit file attributes
"""
format = _FIELDS + ((NullBits, "reserved[]", 17),)
| gpl-3.0 |
gardner/youtube-dl | youtube_dl/extractor/hearthisat.py | 108 | 4326 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_request,
compat_urlparse,
)
from ..utils import (
HEADRequest,
str_to_int,
urlencode_postdata,
urlhandle_detect_ext,
)
class HearThisAtIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?hearthis\.at/(?P<artist>[^/]+)/(?P<title>[A-Za-z0-9\-]+)/?$'
_PLAYLIST_URL = 'https://hearthis.at/playlist.php'
_TEST = {
'url': 'https://hearthis.at/moofi/dr-kreep',
'md5': 'ab6ec33c8fed6556029337c7885eb4e0',
'info_dict': {
'id': '150939',
'ext': 'wav',
'title': 'Moofi - Dr. Kreep',
'thumbnail': 're:^https?://.*\.jpg$',
'timestamp': 1421564134,
'description': 'Creepy Patch. Mutable Instruments Braids Vowel + Formant Mode.',
'upload_date': '20150118',
'comment_count': int,
'view_count': int,
'like_count': int,
'duration': 71,
'categories': ['Experimental'],
}
}
def _real_extract(self, url):
m = re.match(self._VALID_URL, url)
display_id = '{artist:s} - {title:s}'.format(**m.groupdict())
webpage = self._download_webpage(url, display_id)
track_id = self._search_regex(
r'intTrackId\s*=\s*(\d+)', webpage, 'track ID')
payload = urlencode_postdata({'tracks[]': track_id})
req = compat_urllib_request.Request(self._PLAYLIST_URL, payload)
req.add_header('Content-type', 'application/x-www-form-urlencoded')
track = self._download_json(req, track_id, 'Downloading playlist')[0]
title = '{artist:s} - {title:s}'.format(**track)
categories = None
if track.get('category'):
categories = [track['category']]
description = self._og_search_description(webpage)
thumbnail = self._og_search_thumbnail(webpage)
meta_span = r'<span[^>]+class="%s".*?</i>([^<]+)</span>'
view_count = str_to_int(self._search_regex(
meta_span % 'plays_count', webpage, 'view count', fatal=False))
like_count = str_to_int(self._search_regex(
meta_span % 'likes_count', webpage, 'like count', fatal=False))
comment_count = str_to_int(self._search_regex(
meta_span % 'comment_count', webpage, 'comment count', fatal=False))
duration = str_to_int(self._search_regex(
r'data-length="(\d+)', webpage, 'duration', fatal=False))
timestamp = str_to_int(self._search_regex(
r'<span[^>]+class="calctime"[^>]+data-time="(\d+)', webpage, 'timestamp', fatal=False))
formats = []
mp3_url = self._search_regex(
r'(?s)<a class="player-link"\s+(?:[a-zA-Z0-9_:-]+="[^"]+"\s+)*?data-mp3="([^"]+)"',
webpage, 'mp3 URL', fatal=False)
if mp3_url:
formats.append({
'format_id': 'mp3',
'vcodec': 'none',
'acodec': 'mp3',
'url': mp3_url,
})
download_path = self._search_regex(
r'<a class="[^"]*download_fct[^"]*"\s+href="([^"]+)"',
webpage, 'download URL', default=None)
if download_path:
download_url = compat_urlparse.urljoin(url, download_path)
ext_req = HEADRequest(download_url)
ext_handle = self._request_webpage(
ext_req, display_id, note='Determining extension')
ext = urlhandle_detect_ext(ext_handle)
formats.append({
'format_id': 'download',
'vcodec': 'none',
'ext': ext,
'url': download_url,
'preference': 2, # Usually better quality
})
self._sort_formats(formats)
return {
'id': track_id,
'display_id': display_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
'description': description,
'duration': duration,
'timestamp': timestamp,
'view_count': view_count,
'comment_count': comment_count,
'like_count': like_count,
'categories': categories,
}
| unlicense |
credativUK/OCB | addons/base_calendar/__openerp__.py | 58 | 2107 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Calendar',
'version': '1.0',
'depends': ['base', 'base_status', 'mail', 'base_action_rule'],
'summary': 'Personal & Shared Calendar',
'description': """
This is a full-featured calendar system.
========================================
It supports:
------------
- Calendar of events
- Recurring events
If you need to manage your meetings, you should install the CRM module.
""",
'author': 'OpenERP SA',
'category': 'Hidden/Dependency',
'website': 'http://www.openerp.com',
'demo': ['crm_meeting_demo.xml'],
'data': [
'security/calendar_security.xml',
'security/ir.model.access.csv',
'base_calendar_view.xml',
'crm_meeting_view.xml',
'base_calendar_data.xml',
'crm_meeting_data.xml',
],
'test' : ['test/base_calendar_test.yml'],
'installable': True,
'application': True,
'auto_install': False,
'images': ['images/base_calendar1.jpeg','images/base_calendar2.jpeg','images/base_calendar3.jpeg','images/base_calendar4.jpeg',],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
shyamalschandra/scikit-learn | doc/sphinxext/numpy_ext/docscrape.py | 108 | 15435 | """Extract reference documentation from the NumPy source tree.
"""
import inspect
import textwrap
import re
import pydoc
from warnings import warn
# Try Python 2 first, otherwise load from Python 3
try:
from StringIO import StringIO
except:
from io import StringIO
class Reader(object):
"""A line-based string reader.
"""
def __init__(self, data):
"""
Parameters
----------
data : str
String with lines separated by '\n'.
"""
if isinstance(data, list):
self._str = data
else:
self._str = data.split('\n') # store string as list of lines
self.reset()
def __getitem__(self, n):
return self._str[n]
def reset(self):
self._l = 0 # current line nr
def read(self):
if not self.eof():
out = self[self._l]
self._l += 1
return out
else:
return ''
def seek_next_non_empty_line(self):
for l in self[self._l:]:
if l.strip():
break
else:
self._l += 1
def eof(self):
return self._l >= len(self._str)
def read_to_condition(self, condition_func):
start = self._l
for line in self[start:]:
if condition_func(line):
return self[start:self._l]
self._l += 1
if self.eof():
return self[start:self._l + 1]
return []
def read_to_next_empty_line(self):
self.seek_next_non_empty_line()
def is_empty(line):
return not line.strip()
return self.read_to_condition(is_empty)
def read_to_next_unindented_line(self):
def is_unindented(line):
return (line.strip() and (len(line.lstrip()) == len(line)))
return self.read_to_condition(is_unindented)
def peek(self, n=0):
if self._l + n < len(self._str):
return self[self._l + n]
else:
return ''
def is_empty(self):
return not ''.join(self._str).strip()
class NumpyDocString(object):
def __init__(self, docstring, config={}):
docstring = textwrap.dedent(docstring).split('\n')
self._doc = Reader(docstring)
self._parsed_data = {
'Signature': '',
'Summary': [''],
'Extended Summary': [],
'Parameters': [],
'Returns': [],
'Raises': [],
'Warns': [],
'Other Parameters': [],
'Attributes': [],
'Methods': [],
'See Also': [],
'Notes': [],
'Warnings': [],
'References': '',
'Examples': '',
'index': {}
}
self._parse()
def __getitem__(self, key):
return self._parsed_data[key]
def __setitem__(self, key, val):
if key not in self._parsed_data:
warn("Unknown section %s" % key)
else:
self._parsed_data[key] = val
def _is_at_section(self):
self._doc.seek_next_non_empty_line()
if self._doc.eof():
return False
l1 = self._doc.peek().strip() # e.g. Parameters
if l1.startswith('.. index::'):
return True
l2 = self._doc.peek(1).strip() # ---------- or ==========
return l2.startswith('-' * len(l1)) or l2.startswith('=' * len(l1))
def _strip(self, doc):
i = 0
j = 0
for i, line in enumerate(doc):
if line.strip():
break
for j, line in enumerate(doc[::-1]):
if line.strip():
break
return doc[i:len(doc) - j]
def _read_to_next_section(self):
section = self._doc.read_to_next_empty_line()
while not self._is_at_section() and not self._doc.eof():
if not self._doc.peek(-1).strip(): # previous line was empty
section += ['']
section += self._doc.read_to_next_empty_line()
return section
def _read_sections(self):
while not self._doc.eof():
data = self._read_to_next_section()
name = data[0].strip()
if name.startswith('..'): # index section
yield name, data[1:]
elif len(data) < 2:
yield StopIteration
else:
yield name, self._strip(data[2:])
def _parse_param_list(self, content):
r = Reader(content)
params = []
while not r.eof():
header = r.read().strip()
if ' : ' in header:
arg_name, arg_type = header.split(' : ')[:2]
else:
arg_name, arg_type = header, ''
desc = r.read_to_next_unindented_line()
desc = dedent_lines(desc)
params.append((arg_name, arg_type, desc))
return params
_name_rgx = re.compile(r"^\s*(:(?P<role>\w+):`(?P<name>[a-zA-Z0-9_.-]+)`|"
r" (?P<name2>[a-zA-Z0-9_.-]+))\s*", re.X)
def _parse_see_also(self, content):
"""
func_name : Descriptive text
continued text
another_func_name : Descriptive text
func_name1, func_name2, :meth:`func_name`, func_name3
"""
items = []
def parse_item_name(text):
"""Match ':role:`name`' or 'name'"""
m = self._name_rgx.match(text)
if m:
g = m.groups()
if g[1] is None:
return g[3], None
else:
return g[2], g[1]
raise ValueError("%s is not a item name" % text)
def push_item(name, rest):
if not name:
return
name, role = parse_item_name(name)
items.append((name, list(rest), role))
del rest[:]
current_func = None
rest = []
for line in content:
if not line.strip():
continue
m = self._name_rgx.match(line)
if m and line[m.end():].strip().startswith(':'):
push_item(current_func, rest)
current_func, line = line[:m.end()], line[m.end():]
rest = [line.split(':', 1)[1].strip()]
if not rest[0]:
rest = []
elif not line.startswith(' '):
push_item(current_func, rest)
current_func = None
if ',' in line:
for func in line.split(','):
push_item(func, [])
elif line.strip():
current_func = line
elif current_func is not None:
rest.append(line.strip())
push_item(current_func, rest)
return items
def _parse_index(self, section, content):
"""
.. index: default
:refguide: something, else, and more
"""
def strip_each_in(lst):
return [s.strip() for s in lst]
out = {}
section = section.split('::')
if len(section) > 1:
out['default'] = strip_each_in(section[1].split(','))[0]
for line in content:
line = line.split(':')
if len(line) > 2:
out[line[1]] = strip_each_in(line[2].split(','))
return out
def _parse_summary(self):
"""Grab signature (if given) and summary"""
if self._is_at_section():
return
summary = self._doc.read_to_next_empty_line()
summary_str = " ".join([s.strip() for s in summary]).strip()
if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str):
self['Signature'] = summary_str
if not self._is_at_section():
self['Summary'] = self._doc.read_to_next_empty_line()
else:
self['Summary'] = summary
if not self._is_at_section():
self['Extended Summary'] = self._read_to_next_section()
def _parse(self):
self._doc.reset()
self._parse_summary()
for (section, content) in self._read_sections():
if not section.startswith('..'):
section = ' '.join([s.capitalize()
for s in section.split(' ')])
if section in ('Parameters', 'Attributes', 'Methods',
'Returns', 'Raises', 'Warns'):
self[section] = self._parse_param_list(content)
elif section.startswith('.. index::'):
self['index'] = self._parse_index(section, content)
elif section == 'See Also':
self['See Also'] = self._parse_see_also(content)
else:
self[section] = content
# string conversion routines
def _str_header(self, name, symbol='-'):
return [name, len(name) * symbol]
def _str_indent(self, doc, indent=4):
out = []
for line in doc:
out += [' ' * indent + line]
return out
def _str_signature(self):
if self['Signature']:
return [self['Signature'].replace('*', '\*')] + ['']
else:
return ['']
def _str_summary(self):
if self['Summary']:
return self['Summary'] + ['']
else:
return []
def _str_extended_summary(self):
if self['Extended Summary']:
return self['Extended Summary'] + ['']
else:
return []
def _str_param_list(self, name):
out = []
if self[name]:
out += self._str_header(name)
for param, param_type, desc in self[name]:
out += ['%s : %s' % (param, param_type)]
out += self._str_indent(desc)
out += ['']
return out
def _str_section(self, name):
out = []
if self[name]:
out += self._str_header(name)
out += self[name]
out += ['']
return out
def _str_see_also(self, func_role):
if not self['See Also']:
return []
out = []
out += self._str_header("See Also")
last_had_desc = True
for func, desc, role in self['See Also']:
if role:
link = ':%s:`%s`' % (role, func)
elif func_role:
link = ':%s:`%s`' % (func_role, func)
else:
link = "`%s`_" % func
if desc or last_had_desc:
out += ['']
out += [link]
else:
out[-1] += ", %s" % link
if desc:
out += self._str_indent([' '.join(desc)])
last_had_desc = True
else:
last_had_desc = False
out += ['']
return out
def _str_index(self):
idx = self['index']
out = []
out += ['.. index:: %s' % idx.get('default', '')]
for section, references in idx.iteritems():
if section == 'default':
continue
out += [' :%s: %s' % (section, ', '.join(references))]
return out
def __str__(self, func_role=''):
out = []
out += self._str_signature()
out += self._str_summary()
out += self._str_extended_summary()
for param_list in ('Parameters', 'Returns', 'Raises'):
out += self._str_param_list(param_list)
out += self._str_section('Warnings')
out += self._str_see_also(func_role)
for s in ('Notes', 'References', 'Examples'):
out += self._str_section(s)
for param_list in ('Attributes', 'Methods'):
out += self._str_param_list(param_list)
out += self._str_index()
return '\n'.join(out)
def indent(str, indent=4):
indent_str = ' ' * indent
if str is None:
return indent_str
lines = str.split('\n')
return '\n'.join(indent_str + l for l in lines)
def dedent_lines(lines):
"""Deindent a list of lines maximally"""
return textwrap.dedent("\n".join(lines)).split("\n")
def header(text, style='-'):
return text + '\n' + style * len(text) + '\n'
class FunctionDoc(NumpyDocString):
def __init__(self, func, role='func', doc=None, config={}):
self._f = func
self._role = role # e.g. "func" or "meth"
if doc is None:
if func is None:
raise ValueError("No function or docstring given")
doc = inspect.getdoc(func) or ''
NumpyDocString.__init__(self, doc)
if not self['Signature'] and func is not None:
func, func_name = self.get_func()
try:
# try to read signature
argspec = inspect.getargspec(func)
argspec = inspect.formatargspec(*argspec)
argspec = argspec.replace('*', '\*')
signature = '%s%s' % (func_name, argspec)
except TypeError as e:
signature = '%s()' % func_name
self['Signature'] = signature
def get_func(self):
func_name = getattr(self._f, '__name__', self.__class__.__name__)
if inspect.isclass(self._f):
func = getattr(self._f, '__call__', self._f.__init__)
else:
func = self._f
return func, func_name
def __str__(self):
out = ''
func, func_name = self.get_func()
signature = self['Signature'].replace('*', '\*')
roles = {'func': 'function',
'meth': 'method'}
if self._role:
if self._role not in roles:
print("Warning: invalid role %s" % self._role)
out += '.. %s:: %s\n \n\n' % (roles.get(self._role, ''),
func_name)
out += super(FunctionDoc, self).__str__(func_role=self._role)
return out
class ClassDoc(NumpyDocString):
def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc,
config=None):
if not inspect.isclass(cls) and cls is not None:
raise ValueError("Expected a class or None, but got %r" % cls)
self._cls = cls
if modulename and not modulename.endswith('.'):
modulename += '.'
self._mod = modulename
if doc is None:
if cls is None:
raise ValueError("No class or documentation string given")
doc = pydoc.getdoc(cls)
NumpyDocString.__init__(self, doc)
if config is not None and config.get('show_class_members', True):
if not self['Methods']:
self['Methods'] = [(name, '', '')
for name in sorted(self.methods)]
if not self['Attributes']:
self['Attributes'] = [(name, '', '')
for name in sorted(self.properties)]
@property
def methods(self):
if self._cls is None:
return []
return [name for name, func in inspect.getmembers(self._cls)
if not name.startswith('_') and callable(func)]
@property
def properties(self):
if self._cls is None:
return []
return [name for name, func in inspect.getmembers(self._cls)
if not name.startswith('_') and func is None]
| bsd-3-clause |
haroldl/homeworklog | djangoappengine/mail.py | 50 | 3162 | from email.MIMEBase import MIMEBase
from django.core.mail.backends.base import BaseEmailBackend
from django.core.mail import EmailMultiAlternatives
from django.core.exceptions import ImproperlyConfigured
from google.appengine.api import mail as aeemail
from google.appengine.runtime import apiproxy_errors
def _send_deferred(message, fail_silently=False):
try:
message.send()
except (aeemail.Error, apiproxy_errors.Error):
if not fail_silently:
raise
class EmailBackend(BaseEmailBackend):
can_defer = False
def send_messages(self, email_messages):
num_sent = 0
for message in email_messages:
if self._send(message):
num_sent += 1
return num_sent
def _copy_message(self, message):
"""Create and return App Engine EmailMessage class from message."""
gmsg = aeemail.EmailMessage(sender=message.from_email,
to=message.to,
subject=message.subject,
body=message.body)
if message.extra_headers.get('Reply-To', None):
gmsg.reply_to = message.extra_headers['Reply-To']
if message.cc:
gmsg.cc = list(message.cc)
if message.bcc:
gmsg.bcc = list(message.bcc)
if message.attachments:
# Must be populated with (filename, filecontents) tuples
attachments = []
for attachment in message.attachments:
if isinstance(attachment, MIMEBase):
attachments.append((attachment.get_filename(),
attachment.get_payload(decode=True)))
else:
attachments.append((attachment[0], attachment[1]))
gmsg.attachments = attachments
# Look for HTML alternative content
if isinstance(message, EmailMultiAlternatives):
for content, mimetype in message.alternatives:
if mimetype == 'text/html':
gmsg.html = content
break
return gmsg
def _send(self, message):
try:
message = self._copy_message(message)
except (ValueError, aeemail.InvalidEmailError), err:
import logging
logging.warn(err)
if not self.fail_silently:
raise
return False
if self.can_defer:
self._defer_message(message)
return True
try:
message.send()
except (aeemail.Error, apiproxy_errors.Error):
if not self.fail_silently:
raise
return False
return True
def _defer_message(self, message):
from google.appengine.ext import deferred
from django.conf import settings
queue_name = getattr(settings, 'EMAIL_QUEUE_NAME', 'default')
deferred.defer(_send_deferred,
message,
fail_silently=self.fail_silently,
_queue=queue_name)
class AsyncEmailBackend(EmailBackend):
can_defer = True
| bsd-3-clause |
kubkon/DM-Simulator | analyze.py | 1 | 4777 | #!/usr/bin/env python
# encoding: utf-8
"""
analyze.py
Created by Jakub Konka on 2012-10-23.
Copyright (c) 2012 University of Strathclyde. All rights reserved.
"""
import argparse
import csv
import numpy as np
import os
import os.path
import scipy.stats as stats
import sys
### Parse command line arguments
parser = argparse.ArgumentParser(description="DM simulation -- Statistical analysis script")
parser.add_argument('input_dir', help='directory with simulation results')
parser.add_argument('context', help='data context; e.g., price, or reputation')
parser.add_argument('mode', help='transient or steady-state')
parser.add_argument('--confidence', dest='confidence', default=0.99,
type=float, help='confidence value (default: 0.99)')
args = parser.parse_args()
input_dir = args.input_dir
context = args.context
mode = args.mode.lower()
confidence = args.confidence
### Common params
# Ask for warm-up period index (if mode is steady-state)
if mode == 'steady-state':
warmup = int(input('Warm-up period index: '))
elif mode == 'transient':
warmup = 0
window_size = int(input('Window size: '))
else:
sys.exit('Unknown mode specified.')
# File names and paths
extension = ".out"
file_names = set([
f[:f.find(extension)] for root, _, files in os.walk(input_dir) for f in files
if (f.endswith(extension)
and context in f
and 'transient' not in root
and 'steady-state' not in root)
])
file_paths = [
os.path.join(root, f) for root, _, files in os.walk(input_dir) for f in files
if (f.endswith(extension)
and context in f
and 'transient' not in root
and 'steady-state' not in root)
]
# Reference column
ref_column = 'sr_number'
### Merge results from files
for name in file_names:
# Read data from files
data_in = []
for fp in file_paths:
if name in fp:
with open(fp, 'rt') as f:
reader = csv.DictReader(f)
dct = {}
for row in reader:
# Exclude data with index lower than specified warm-up period
if int(row[ref_column]) > warmup:
for key in row:
val = float(row[key]) if key != ref_column else int(row[key])
dct.setdefault(key, []).append(val)
data_in.append(dct)
# Map and reduce...
if mode == 'steady-state':
# Compute steady-state mean average
averages = [sum(dct[key]) / len(dct[key]) for dct in data_in for key in dct.keys() if key != ref_column]
mean = sum(averages) / len(averages)
# Compute standard deviation
sd = np.sqrt(sum(map(lambda x: (x-mean)**2, averages)) / (len(averages)-1))
# Compute standard error for the mean
se = sd / np.sqrt(len(averages))
# Compute confidence intervals for the mean
ci = se * stats.t.ppf(0.5 + confidence/2, len(averages)-1)
# Save to a file
# Create save dir if doesn't exist already
save_dir = input_dir + '/' + mode
if not os.path.exists(save_dir):
os.makedirs(save_dir)
with open(save_dir + '/' + name + extension, 'w', newline='', encoding='utf-8') as f:
writer = csv.writer(f, delimiter=',')
writer.writerow(['mean', 'sd', 'se', 'ci'])
writer.writerow([mean, sd, se, ci])
else:
# Compute mean
zipped = zip(*[dct[key] for dct in data_in for key in dct.keys() if key != ref_column])
init_means = list(map(lambda x: sum(x)/len(data_in), zipped))
means = []
if window_size == 0:
means = init_means
else:
for i in range(len(init_means) - window_size):
if i < window_size:
means += [sum([init_means[i+s] for s in range(-i, i+1)]) / (2*(i+1) - 1)]
else:
means += [sum([init_means[i+s] for s in range(-window_size, window_size+1)]) / (2*window_size + 1)]
# Compute standard deviation
zipped = zip(*[dct[key] for dct in data_in for key in dct.keys() if key != ref_column])
sds = [np.sqrt(sum(map(lambda x: (x-mean)**2, tup)) / (len(means) - 1)) for (tup, mean) in zip(zipped, means)]
# Compute standard error for the mean
ses = list(map(lambda x: x/np.sqrt(len(means)), sds))
# Compute confidence intervals for the mean
cis = list(map(lambda x: x * stats.t.ppf(0.5 + confidence/2, len(means)-1), ses))
# Save to a file
# Create save dir if doesn't exist already
save_dir = input_dir + '/' + mode + '_{}'.format(window_size)
if not os.path.exists(save_dir):
os.makedirs(save_dir)
with open(save_dir + '/' + name + extension, 'w', newline='', encoding='utf-8') as f:
writer = csv.writer(f, delimiter=',')
zip_input = [data_in[0][ref_column], means, sds, ses, cis]
out_headers = [ref_column, 'mean', 'sd', 'se', 'ci']
writer.writerow(out_headers)
for tup in zip(*zip_input):
writer.writerow(tup)
| mit |
pkats15/hdt_analyzer | django_test/django_venv/Lib/encodings/mac_latin2.py | 647 | 8565 | """ Python Character Mapping Codec generated from 'LATIN2.TXT' with gencodec.py.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_map)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-latin2',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x0081: 0x0100, # LATIN CAPITAL LETTER A WITH MACRON
0x0082: 0x0101, # LATIN SMALL LETTER A WITH MACRON
0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0084: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x0088: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
0x0089: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x008b: 0x010d, # LATIN SMALL LETTER C WITH CARON
0x008c: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
0x008d: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x008f: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
0x0090: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
0x0091: 0x010e, # LATIN CAPITAL LETTER D WITH CARON
0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x0093: 0x010f, # LATIN SMALL LETTER D WITH CARON
0x0094: 0x0112, # LATIN CAPITAL LETTER E WITH MACRON
0x0095: 0x0113, # LATIN SMALL LETTER E WITH MACRON
0x0096: 0x0116, # LATIN CAPITAL LETTER E WITH DOT ABOVE
0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x0098: 0x0117, # LATIN SMALL LETTER E WITH DOT ABOVE
0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x009b: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x009d: 0x011a, # LATIN CAPITAL LETTER E WITH CARON
0x009e: 0x011b, # LATIN SMALL LETTER E WITH CARON
0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x00a0: 0x2020, # DAGGER
0x00a1: 0x00b0, # DEGREE SIGN
0x00a2: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
0x00a4: 0x00a7, # SECTION SIGN
0x00a5: 0x2022, # BULLET
0x00a6: 0x00b6, # PILCROW SIGN
0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S
0x00a8: 0x00ae, # REGISTERED SIGN
0x00aa: 0x2122, # TRADE MARK SIGN
0x00ab: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
0x00ac: 0x00a8, # DIAERESIS
0x00ad: 0x2260, # NOT EQUAL TO
0x00ae: 0x0123, # LATIN SMALL LETTER G WITH CEDILLA
0x00af: 0x012e, # LATIN CAPITAL LETTER I WITH OGONEK
0x00b0: 0x012f, # LATIN SMALL LETTER I WITH OGONEK
0x00b1: 0x012a, # LATIN CAPITAL LETTER I WITH MACRON
0x00b2: 0x2264, # LESS-THAN OR EQUAL TO
0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO
0x00b4: 0x012b, # LATIN SMALL LETTER I WITH MACRON
0x00b5: 0x0136, # LATIN CAPITAL LETTER K WITH CEDILLA
0x00b6: 0x2202, # PARTIAL DIFFERENTIAL
0x00b7: 0x2211, # N-ARY SUMMATION
0x00b8: 0x0142, # LATIN SMALL LETTER L WITH STROKE
0x00b9: 0x013b, # LATIN CAPITAL LETTER L WITH CEDILLA
0x00ba: 0x013c, # LATIN SMALL LETTER L WITH CEDILLA
0x00bb: 0x013d, # LATIN CAPITAL LETTER L WITH CARON
0x00bc: 0x013e, # LATIN SMALL LETTER L WITH CARON
0x00bd: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE
0x00be: 0x013a, # LATIN SMALL LETTER L WITH ACUTE
0x00bf: 0x0145, # LATIN CAPITAL LETTER N WITH CEDILLA
0x00c0: 0x0146, # LATIN SMALL LETTER N WITH CEDILLA
0x00c1: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
0x00c2: 0x00ac, # NOT SIGN
0x00c3: 0x221a, # SQUARE ROOT
0x00c4: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
0x00c5: 0x0147, # LATIN CAPITAL LETTER N WITH CARON
0x00c6: 0x2206, # INCREMENT
0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c9: 0x2026, # HORIZONTAL ELLIPSIS
0x00ca: 0x00a0, # NO-BREAK SPACE
0x00cb: 0x0148, # LATIN SMALL LETTER N WITH CARON
0x00cc: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x00cd: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
0x00ce: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x00cf: 0x014c, # LATIN CAPITAL LETTER O WITH MACRON
0x00d0: 0x2013, # EN DASH
0x00d1: 0x2014, # EM DASH
0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK
0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x00d6: 0x00f7, # DIVISION SIGN
0x00d7: 0x25ca, # LOZENGE
0x00d8: 0x014d, # LATIN SMALL LETTER O WITH MACRON
0x00d9: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE
0x00da: 0x0155, # LATIN SMALL LETTER R WITH ACUTE
0x00db: 0x0158, # LATIN CAPITAL LETTER R WITH CARON
0x00dc: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
0x00dd: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
0x00de: 0x0159, # LATIN SMALL LETTER R WITH CARON
0x00df: 0x0156, # LATIN CAPITAL LETTER R WITH CEDILLA
0x00e0: 0x0157, # LATIN SMALL LETTER R WITH CEDILLA
0x00e1: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x00e2: 0x201a, # SINGLE LOW-9 QUOTATION MARK
0x00e3: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
0x00e4: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x00e5: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
0x00e6: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
0x00e7: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00e8: 0x0164, # LATIN CAPITAL LETTER T WITH CARON
0x00e9: 0x0165, # LATIN SMALL LETTER T WITH CARON
0x00ea: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00eb: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
0x00ec: 0x017e, # LATIN SMALL LETTER Z WITH CARON
0x00ed: 0x016a, # LATIN CAPITAL LETTER U WITH MACRON
0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00ef: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00f0: 0x016b, # LATIN SMALL LETTER U WITH MACRON
0x00f1: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x00f2: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00f3: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE
0x00f4: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x00f5: 0x0171, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
0x00f6: 0x0172, # LATIN CAPITAL LETTER U WITH OGONEK
0x00f7: 0x0173, # LATIN SMALL LETTER U WITH OGONEK
0x00f8: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00f9: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x00fa: 0x0137, # LATIN SMALL LETTER K WITH CEDILLA
0x00fb: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x00fc: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
0x00fd: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x00fe: 0x0122, # LATIN CAPITAL LETTER G WITH CEDILLA
0x00ff: 0x02c7, # CARON
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
| mit |
sometallgit/AutoUploader | Python27/Lib/distutils/tests/test_build_ext.py | 5 | 20119 | import sys
import os
from StringIO import StringIO
import textwrap
from distutils.core import Extension, Distribution
from distutils.command.build_ext import build_ext
from distutils import sysconfig
from distutils.tests import support
from distutils.errors import (DistutilsSetupError, CompileError,
DistutilsPlatformError)
import unittest
from test import test_support
# http://bugs.python.org/issue4373
# Don't load the xx module more than once.
ALREADY_TESTED = False
class BuildExtTestCase(support.TempdirManager,
support.LoggingSilencer,
unittest.TestCase):
def setUp(self):
super(BuildExtTestCase, self).setUp()
self.tmp_dir = self.mkdtemp()
self.xx_created = False
sys.path.append(self.tmp_dir)
self.addCleanup(sys.path.remove, self.tmp_dir)
if sys.version > "2.6":
import site
self.old_user_base = site.USER_BASE
site.USER_BASE = self.mkdtemp()
from distutils.command import build_ext
build_ext.USER_BASE = site.USER_BASE
def tearDown(self):
if self.xx_created:
test_support.unload('xx')
# XXX on Windows the test leaves a directory
# with xx module in TEMP
super(BuildExtTestCase, self).tearDown()
def test_build_ext(self):
global ALREADY_TESTED
support.copy_xxmodule_c(self.tmp_dir)
self.xx_created = True
xx_c = os.path.join(self.tmp_dir, 'xxmodule.c')
xx_ext = Extension('xx', [xx_c])
dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]})
dist.package_dir = self.tmp_dir
cmd = build_ext(dist)
support.fixup_build_ext(cmd)
cmd.build_lib = self.tmp_dir
cmd.build_temp = self.tmp_dir
old_stdout = sys.stdout
if not test_support.verbose:
# silence compiler output
sys.stdout = StringIO()
try:
cmd.ensure_finalized()
cmd.run()
finally:
sys.stdout = old_stdout
if ALREADY_TESTED:
self.skipTest('Already tested in %s' % ALREADY_TESTED)
else:
ALREADY_TESTED = type(self).__name__
import xx
for attr in ('error', 'foo', 'new', 'roj'):
self.assertTrue(hasattr(xx, attr))
self.assertEqual(xx.foo(2, 5), 7)
self.assertEqual(xx.foo(13,15), 28)
self.assertEqual(xx.new().demo(), None)
if test_support.HAVE_DOCSTRINGS:
doc = 'This is a template module just for instruction.'
self.assertEqual(xx.__doc__, doc)
self.assertIsInstance(xx.Null(), xx.Null)
self.assertIsInstance(xx.Str(), xx.Str)
def test_solaris_enable_shared(self):
dist = Distribution({'name': 'xx'})
cmd = build_ext(dist)
old = sys.platform
sys.platform = 'sunos' # fooling finalize_options
from distutils.sysconfig import _config_vars
old_var = _config_vars.get('Py_ENABLE_SHARED')
_config_vars['Py_ENABLE_SHARED'] = 1
try:
cmd.ensure_finalized()
finally:
sys.platform = old
if old_var is None:
del _config_vars['Py_ENABLE_SHARED']
else:
_config_vars['Py_ENABLE_SHARED'] = old_var
# make sure we get some library dirs under solaris
self.assertGreater(len(cmd.library_dirs), 0)
@unittest.skipIf(sys.version < '2.6',
'site.USER_SITE was introduced in 2.6')
def test_user_site(self):
import site
dist = Distribution({'name': 'xx'})
cmd = build_ext(dist)
# making sure the user option is there
options = [name for name, short, label in
cmd.user_options]
self.assertIn('user', options)
# setting a value
cmd.user = 1
# setting user based lib and include
lib = os.path.join(site.USER_BASE, 'lib')
incl = os.path.join(site.USER_BASE, 'include')
os.mkdir(lib)
os.mkdir(incl)
cmd.ensure_finalized()
# see if include_dirs and library_dirs were set
self.assertIn(lib, cmd.library_dirs)
self.assertIn(lib, cmd.rpath)
self.assertIn(incl, cmd.include_dirs)
def test_finalize_options(self):
# Make sure Python's include directories (for Python.h, pyconfig.h,
# etc.) are in the include search path.
modules = [Extension('foo', ['xxx'])]
dist = Distribution({'name': 'xx', 'ext_modules': modules})
cmd = build_ext(dist)
cmd.finalize_options()
py_include = sysconfig.get_python_inc()
self.assertIn(py_include, cmd.include_dirs)
plat_py_include = sysconfig.get_python_inc(plat_specific=1)
self.assertIn(plat_py_include, cmd.include_dirs)
# make sure cmd.libraries is turned into a list
# if it's a string
cmd = build_ext(dist)
cmd.libraries = 'my_lib, other_lib lastlib'
cmd.finalize_options()
self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib'])
# make sure cmd.library_dirs is turned into a list
# if it's a string
cmd = build_ext(dist)
cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep
cmd.finalize_options()
self.assertIn('my_lib_dir', cmd.library_dirs)
self.assertIn('other_lib_dir', cmd.library_dirs)
# make sure rpath is turned into a list
# if it's a string
cmd = build_ext(dist)
cmd.rpath = 'one%stwo' % os.pathsep
cmd.finalize_options()
self.assertEqual(cmd.rpath, ['one', 'two'])
# make sure cmd.link_objects is turned into a list
# if it's a string
cmd = build_ext(dist)
cmd.link_objects = 'one two,three'
cmd.finalize_options()
self.assertEqual(cmd.link_objects, ['one', 'two', 'three'])
# XXX more tests to perform for win32
# make sure define is turned into 2-tuples
# strings if they are ','-separated strings
cmd = build_ext(dist)
cmd.define = 'one,two'
cmd.finalize_options()
self.assertEqual(cmd.define, [('one', '1'), ('two', '1')])
# make sure undef is turned into a list of
# strings if they are ','-separated strings
cmd = build_ext(dist)
cmd.undef = 'one,two'
cmd.finalize_options()
self.assertEqual(cmd.undef, ['one', 'two'])
# make sure swig_opts is turned into a list
cmd = build_ext(dist)
cmd.swig_opts = None
cmd.finalize_options()
self.assertEqual(cmd.swig_opts, [])
cmd = build_ext(dist)
cmd.swig_opts = '1 2'
cmd.finalize_options()
self.assertEqual(cmd.swig_opts, ['1', '2'])
def test_check_extensions_list(self):
dist = Distribution()
cmd = build_ext(dist)
cmd.finalize_options()
#'extensions' option must be a list of Extension instances
self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, 'foo')
# each element of 'ext_modules' option must be an
# Extension instance or 2-tuple
exts = [('bar', 'foo', 'bar'), 'foo']
self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
# first element of each tuple in 'ext_modules'
# must be the extension name (a string) and match
# a python dotted-separated name
exts = [('foo-bar', '')]
self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
# second element of each tuple in 'ext_modules'
# must be a dictionary (build info)
exts = [('foo.bar', '')]
self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
# ok this one should pass
exts = [('foo.bar', {'sources': [''], 'libraries': 'foo',
'some': 'bar'})]
cmd.check_extensions_list(exts)
ext = exts[0]
self.assertIsInstance(ext, Extension)
# check_extensions_list adds in ext the values passed
# when they are in ('include_dirs', 'library_dirs', 'libraries'
# 'extra_objects', 'extra_compile_args', 'extra_link_args')
self.assertEqual(ext.libraries, 'foo')
self.assertFalse(hasattr(ext, 'some'))
# 'macros' element of build info dict must be 1- or 2-tuple
exts = [('foo.bar', {'sources': [''], 'libraries': 'foo',
'some': 'bar', 'macros': [('1', '2', '3'), 'foo']})]
self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts)
exts[0][1]['macros'] = [('1', '2'), ('3',)]
cmd.check_extensions_list(exts)
self.assertEqual(exts[0].undef_macros, ['3'])
self.assertEqual(exts[0].define_macros, [('1', '2')])
def test_get_source_files(self):
modules = [Extension('foo', ['xxx'])]
dist = Distribution({'name': 'xx', 'ext_modules': modules})
cmd = build_ext(dist)
cmd.ensure_finalized()
self.assertEqual(cmd.get_source_files(), ['xxx'])
def test_compiler_option(self):
# cmd.compiler is an option and
# should not be overridden by a compiler instance
# when the command is run
dist = Distribution()
cmd = build_ext(dist)
cmd.compiler = 'unix'
cmd.ensure_finalized()
cmd.run()
self.assertEqual(cmd.compiler, 'unix')
def test_get_outputs(self):
tmp_dir = self.mkdtemp()
c_file = os.path.join(tmp_dir, 'foo.c')
self.write_file(c_file, 'void initfoo(void) {};\n')
ext = Extension('foo', [c_file])
dist = Distribution({'name': 'xx',
'ext_modules': [ext]})
cmd = build_ext(dist)
support.fixup_build_ext(cmd)
cmd.ensure_finalized()
self.assertEqual(len(cmd.get_outputs()), 1)
cmd.build_lib = os.path.join(self.tmp_dir, 'build')
cmd.build_temp = os.path.join(self.tmp_dir, 'tempt')
# issue #5977 : distutils build_ext.get_outputs
# returns wrong result with --inplace
other_tmp_dir = os.path.realpath(self.mkdtemp())
old_wd = os.getcwd()
os.chdir(other_tmp_dir)
try:
cmd.inplace = 1
cmd.run()
so_file = cmd.get_outputs()[0]
finally:
os.chdir(old_wd)
self.assertTrue(os.path.exists(so_file))
self.assertEqual(os.path.splitext(so_file)[-1],
sysconfig.get_config_var('SO'))
so_dir = os.path.dirname(so_file)
self.assertEqual(so_dir, other_tmp_dir)
cmd.compiler = None
cmd.inplace = 0
cmd.run()
so_file = cmd.get_outputs()[0]
self.assertTrue(os.path.exists(so_file))
self.assertEqual(os.path.splitext(so_file)[-1],
sysconfig.get_config_var('SO'))
so_dir = os.path.dirname(so_file)
self.assertEqual(so_dir, cmd.build_lib)
# inplace = 0, cmd.package = 'bar'
build_py = cmd.get_finalized_command('build_py')
build_py.package_dir = {'': 'bar'}
path = cmd.get_ext_fullpath('foo')
# checking that the last directory is the build_dir
path = os.path.split(path)[0]
self.assertEqual(path, cmd.build_lib)
# inplace = 1, cmd.package = 'bar'
cmd.inplace = 1
other_tmp_dir = os.path.realpath(self.mkdtemp())
old_wd = os.getcwd()
os.chdir(other_tmp_dir)
try:
path = cmd.get_ext_fullpath('foo')
finally:
os.chdir(old_wd)
# checking that the last directory is bar
path = os.path.split(path)[0]
lastdir = os.path.split(path)[-1]
self.assertEqual(lastdir, 'bar')
def test_ext_fullpath(self):
ext = sysconfig.get_config_vars()['SO']
dist = Distribution()
cmd = build_ext(dist)
cmd.inplace = 1
cmd.distribution.package_dir = {'': 'src'}
cmd.distribution.packages = ['lxml', 'lxml.html']
curdir = os.getcwd()
wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
path = cmd.get_ext_fullpath('lxml.etree')
self.assertEqual(wanted, path)
# building lxml.etree not inplace
cmd.inplace = 0
cmd.build_lib = os.path.join(curdir, 'tmpdir')
wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext)
path = cmd.get_ext_fullpath('lxml.etree')
self.assertEqual(wanted, path)
# building twisted.runner.portmap not inplace
build_py = cmd.get_finalized_command('build_py')
build_py.package_dir = {}
cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
path = cmd.get_ext_fullpath('twisted.runner.portmap')
wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner',
'portmap' + ext)
self.assertEqual(wanted, path)
# building twisted.runner.portmap inplace
cmd.inplace = 1
path = cmd.get_ext_fullpath('twisted.runner.portmap')
wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext)
self.assertEqual(wanted, path)
def test_build_ext_inplace(self):
etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c')
etree_ext = Extension('lxml.etree', [etree_c])
dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
cmd = build_ext(dist)
cmd.ensure_finalized()
cmd.inplace = 1
cmd.distribution.package_dir = {'': 'src'}
cmd.distribution.packages = ['lxml', 'lxml.html']
curdir = os.getcwd()
ext = sysconfig.get_config_var("SO")
wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
path = cmd.get_ext_fullpath('lxml.etree')
self.assertEqual(wanted, path)
def test_setuptools_compat(self):
import distutils.core, distutils.extension, distutils.command.build_ext
saved_ext = distutils.extension.Extension
try:
# on some platforms, it loads the deprecated "dl" module
test_support.import_module('setuptools_build_ext', deprecated=True)
# theses import patch Distutils' Extension class
from setuptools_build_ext import build_ext as setuptools_build_ext
from setuptools_extension import Extension
etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c')
etree_ext = Extension('lxml.etree', [etree_c])
dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]})
cmd = setuptools_build_ext(dist)
cmd.ensure_finalized()
cmd.inplace = 1
cmd.distribution.package_dir = {'': 'src'}
cmd.distribution.packages = ['lxml', 'lxml.html']
curdir = os.getcwd()
ext = sysconfig.get_config_var("SO")
wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
path = cmd.get_ext_fullpath('lxml.etree')
self.assertEqual(wanted, path)
finally:
# restoring Distutils' Extension class otherwise its broken
distutils.extension.Extension = saved_ext
distutils.core.Extension = saved_ext
distutils.command.build_ext.Extension = saved_ext
def test_build_ext_path_with_os_sep(self):
dist = Distribution({'name': 'UpdateManager'})
cmd = build_ext(dist)
cmd.ensure_finalized()
ext = sysconfig.get_config_var("SO")
ext_name = os.path.join('UpdateManager', 'fdsend')
ext_path = cmd.get_ext_fullpath(ext_name)
wanted = os.path.join(cmd.build_lib, 'UpdateManager', 'fdsend' + ext)
self.assertEqual(ext_path, wanted)
@unittest.skipUnless(sys.platform == 'win32', 'these tests require Windows')
def test_build_ext_path_cross_platform(self):
dist = Distribution({'name': 'UpdateManager'})
cmd = build_ext(dist)
cmd.ensure_finalized()
ext = sysconfig.get_config_var("SO")
# this needs to work even under win32
ext_name = 'UpdateManager/fdsend'
ext_path = cmd.get_ext_fullpath(ext_name)
wanted = os.path.join(cmd.build_lib, 'UpdateManager', 'fdsend' + ext)
self.assertEqual(ext_path, wanted)
@unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
def test_deployment_target_default(self):
# Issue 9516: Test that, in the absence of the environment variable,
# an extension module is compiled with the same deployment target as
# the interpreter.
self._try_compile_deployment_target('==', None)
@unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
def test_deployment_target_too_low(self):
# Issue 9516: Test that an extension module is not allowed to be
# compiled with a deployment target less than that of the interpreter.
self.assertRaises(DistutilsPlatformError,
self._try_compile_deployment_target, '>', '10.1')
@unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX')
def test_deployment_target_higher_ok(self):
# Issue 9516: Test that an extension module can be compiled with a
# deployment target higher than that of the interpreter: the ext
# module may depend on some newer OS feature.
deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
if deptarget:
# increment the minor version number (i.e. 10.6 -> 10.7)
deptarget = [int(x) for x in deptarget.split('.')]
deptarget[-1] += 1
deptarget = '.'.join(str(i) for i in deptarget)
self._try_compile_deployment_target('<', deptarget)
def _try_compile_deployment_target(self, operator, target):
orig_environ = os.environ
os.environ = orig_environ.copy()
self.addCleanup(setattr, os, 'environ', orig_environ)
if target is None:
if os.environ.get('MACOSX_DEPLOYMENT_TARGET'):
del os.environ['MACOSX_DEPLOYMENT_TARGET']
else:
os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c')
with open(deptarget_c, 'w') as fp:
fp.write(textwrap.dedent('''\
#include <AvailabilityMacros.h>
int dummy;
#if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED
#else
#error "Unexpected target"
#endif
''' % operator))
# get the deployment target that the interpreter was built with
target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
target = tuple(map(int, target.split('.')[0:2]))
# format the target value as defined in the Apple
# Availability Macros. We can't use the macro names since
# at least one value we test with will not exist yet.
if target[1] < 10:
# for 10.1 through 10.9.x -> "10n0"
target = '%02d%01d0' % target
else:
# for 10.10 and beyond -> "10nn00"
target = '%02d%02d00' % target
deptarget_ext = Extension(
'deptarget',
[deptarget_c],
extra_compile_args=['-DTARGET=%s'%(target,)],
)
dist = Distribution({
'name': 'deptarget',
'ext_modules': [deptarget_ext]
})
dist.package_dir = self.tmp_dir
cmd = build_ext(dist)
cmd.build_lib = self.tmp_dir
cmd.build_temp = self.tmp_dir
try:
cmd.ensure_finalized()
cmd.run()
except CompileError:
self.fail("Wrong deployment target during compilation")
def test_suite():
return unittest.makeSuite(BuildExtTestCase)
if __name__ == '__main__':
test_support.run_unittest(test_suite())
| mit |
yashu-seth/networkx | networkx/algorithms/tests/test_euler.py | 54 | 2973 | #!/usr/bin/env python
# run with nose: nosetests -v test_euler.py
from nose.tools import *
import networkx as nx
from networkx import is_eulerian,eulerian_circuit
class TestEuler:
def test_is_eulerian(self):
assert_true(is_eulerian(nx.complete_graph(5)))
assert_true(is_eulerian(nx.complete_graph(7)))
assert_true(is_eulerian(nx.hypercube_graph(4)))
assert_true(is_eulerian(nx.hypercube_graph(6)))
assert_false(is_eulerian(nx.complete_graph(4)))
assert_false(is_eulerian(nx.complete_graph(6)))
assert_false(is_eulerian(nx.hypercube_graph(3)))
assert_false(is_eulerian(nx.hypercube_graph(5)))
assert_false(is_eulerian(nx.petersen_graph()))
assert_false(is_eulerian(nx.path_graph(4)))
def test_is_eulerian2(self):
# not connected
G = nx.Graph()
G.add_nodes_from([1,2,3])
assert_false(is_eulerian(G))
# not strongly connected
G = nx.DiGraph()
G.add_nodes_from([1,2,3])
assert_false(is_eulerian(G))
G = nx.MultiDiGraph()
G.add_edge(1,2)
G.add_edge(2,3)
G.add_edge(2,3)
G.add_edge(3,1)
assert_false(is_eulerian(G))
def test_eulerian_circuit_cycle(self):
G=nx.cycle_graph(4)
edges=list(eulerian_circuit(G,source=0))
nodes=[u for u,v in edges]
assert_equal(nodes,[0,3,2,1])
assert_equal(edges,[(0,3),(3,2),(2,1),(1,0)])
edges=list(eulerian_circuit(G,source=1))
nodes=[u for u,v in edges]
assert_equal(nodes,[1,2,3,0])
assert_equal(edges,[(1,2),(2,3),(3,0),(0,1)])
G=nx.complete_graph(3)
edges=list(eulerian_circuit(G,source=0))
nodes=[u for u,v in edges]
assert_equal(nodes,[0,2,1])
assert_equal(edges,[(0,2),(2,1),(1,0)])
edges=list(eulerian_circuit(G,source=1))
nodes=[u for u,v in edges]
assert_equal(nodes,[1,2,0])
assert_equal(edges,[(1,2),(2,0),(0,1)])
def test_eulerian_circuit_digraph(self):
G=nx.DiGraph()
G.add_cycle([0,1,2,3])
edges=list(eulerian_circuit(G,source=0))
nodes=[u for u,v in edges]
assert_equal(nodes,[0,1,2,3])
assert_equal(edges,[(0,1),(1,2),(2,3),(3,0)])
edges=list(eulerian_circuit(G,source=1))
nodes=[u for u,v in edges]
assert_equal(nodes,[1,2,3,0])
assert_equal(edges,[(1,2),(2,3),(3,0),(0,1)])
def test_eulerian_circuit_multigraph(self):
G=nx.MultiGraph()
G.add_cycle([0,1,2,3])
G.add_edge(1,2)
G.add_edge(1,2)
edges=list(eulerian_circuit(G,source=0))
nodes=[u for u,v in edges]
assert_equal(nodes,[0,3,2,1,2,1])
assert_equal(edges,[(0,3),(3,2),(2,1),(1,2),(2,1),(1,0)])
@raises(nx.NetworkXError)
def test_not_eulerian(self):
f=list(eulerian_circuit(nx.complete_graph(4)))
| bsd-3-clause |
thedekel/flask-auth | examples/sqlalchemy_model.py | 1 | 2406 | from flask import Flask, request, redirect, url_for
from flask.ext.sqlalchemy import SQLAlchemy
from flaskext.auth import Auth, login_required, logout
from flaskext.auth.models.sa import get_user_class
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/test.db'
db = SQLAlchemy(app)
auth = Auth(app, login_url_name='index')
User = get_user_class(db.Model)
@login_required()
def admin():
return 'Admin! Excellent!'
def index():
if request.method == 'POST':
username = request.form['username']
user = User.query.filter(User.username==username).one()
if user is not None:
# Authenticate and log in!
if user.authenticate(request.form['password']):
return redirect(url_for('admin'))
return 'Failure :('
return '''
<form method="POST">
Username: <input type="text" name="username"/><br/>
Password: <input type="password" name="password"/><br/>
<input type="submit" value="Log in"/>
</form>
'''
def user_create():
if request.method == 'POST':
username = request.form['username']
if User.query.filter(User.username==username).first():
return 'User already exists.'
password = request.form['password']
user = User(username=username, password=password)
db.session.add(user)
db.session.commit()
return redirect(url_for('index'))
return '''
<form method="POST">
Username: <input type="text" name="username"/><br/>
Password: <input type="password" name="password"/><br/>
<input type="submit" value="Create"/>
</form>
'''
def logout_view():
user_data = logout()
if user_data is None:
return 'No user to log out.'
return 'Logged out user {0}.'.format(user_data['username'])
# URLs
app.add_url_rule('/', 'index', index, methods=['GET', 'POST'])
app.add_url_rule('/admin/', 'admin', admin)
app.add_url_rule('/users/create/', 'user_create', user_create, methods=['GET', 'POST'])
app.add_url_rule('/logout/', 'logout', logout_view)
# Secret key needed to use sessions.
app.secret_key = 'N4BUdSXUzHxNoO8g'
if __name__ == '__main__':
try:
open('/tmp/flask_auth_test.db')
except IOError:
db.create_all()
app.run(debug=True)
| mit |
steinsag/hosteurope-letsencrypt | neu.py | 1 | 1584 | #!/usr/bin/env python3
# coding=utf-8
import json
import os
from shared import domain_list, config_file
# certbot tries to write to /var/log/letsencrypt by default; because of this, running as root is required.
# certbot Error Message:
# Either run as root, or set --config-dir, --work-dir, and --logs-dir to writeable paths.
is_root = os.geteuid() == 0
home_dir = os.path.expanduser('~/.config/hosteurope-letsencrypt')
certbot_config_dir = home_dir
certbot_work_dir = home_dir
certbot_logs_dir = os.path.expanduser('~/.config/hosteurope-letsencrypt/logs')
if not is_root and not os.path.exists(certbot_logs_dir):
os.makedirs(certbot_logs_dir)
# Einstellungen einlesen
with open(config_file('einstellungen.json')) as cfg_file:
config = json.load(cfg_file)
email = config['email']
staging = config['staging']
challenge = config.get('preferred-challenge', 'http')
# certbot Kommando zusammenbauen
cmd = 'certbot certonly --manual --agree-tos --manual-public-ip-logging-ok'
cmd += ' -m ' + email
cmd += ' --preferred-challenge=' + challenge
if 'http' == challenge:
cmd += ' --manual-auth-hook "python3 validate.py"'
if staging:
cmd += ' --staging'
if not is_root:
cmd += ' --logs-dir ' + certbot_logs_dir
cmd += ' --work-dir ' + certbot_work_dir
cmd += ' --config-dir ' + certbot_config_dir
cmd += domain_list
# Sicherheitsabfrage
print(cmd)
answer = input('Für diese Domains ein neues Zertifikat erstellen? (j/n): ')
if answer != 'j':
print('Abbruch, es wurde kein Zertifikat erstellt.')
exit(0)
# neues Zertifikat erstellen
os.system(cmd)
| apache-2.0 |
pkuyym/Paddle | python/paddle/fluid/tests/unittests/op_test.py | 2 | 21957 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import random
import itertools
import paddle.fluid.core as core
import collections
from paddle.fluid.backward import append_backward
from paddle.fluid.op import Operator
from paddle.fluid.executor import Executor
from paddle.fluid.framework import Program, OpProtoHolder
def randomize_probability(batch_size, class_num, dtype='float32'):
prob = np.random.uniform(
0.1, 1.0, size=(batch_size, class_num)).astype(dtype)
prob_sum = prob.sum(axis=1)
for i in xrange(len(prob)):
prob[i] /= prob_sum[i]
return prob
def create_op(scope, op_type, inputs, outputs, attrs):
kwargs = dict()
def __create_var__(name, var_name):
scope.var(var_name).get_tensor()
kwargs[name].append(var_name)
for in_name, in_dup in Operator.get_op_inputs(op_type):
if in_name in inputs:
kwargs[in_name] = []
if in_dup:
sub_in = inputs[in_name]
for item in sub_in:
sub_in_name, _ = item[0], item[1]
__create_var__(in_name, sub_in_name)
else:
__create_var__(in_name, in_name)
for out_name, out_dup in Operator.get_op_outputs(op_type):
if out_name in outputs:
kwargs[out_name] = []
if out_dup:
sub_out = outputs[out_name]
for item in sub_out:
sub_out_name, _ = item[0], item[1]
__create_var__(out_name, sub_out_name)
else:
__create_var__(out_name, out_name)
for attr_name in Operator.get_op_attr_names(op_type):
if attr_name in attrs:
kwargs[attr_name] = attrs[attr_name]
return Operator(op_type, **kwargs)
def set_input(scope, op, inputs, place):
def __set_input__(var_name, var):
if isinstance(var, tuple) or isinstance(var, np.ndarray):
tensor = scope.find_var(var_name).get_tensor()
if isinstance(var, tuple):
tensor.set_lod(var[1])
var = var[0]
tensor.set_dims(var.shape)
tensor.set(var, place)
elif isinstance(var, float):
scope.find_var(var_name).set_float(var)
elif isinstance(var, int):
scope.find_var(var_name).set_int(var)
for in_name, in_dup in Operator.get_op_inputs(op.type()):
if in_name in inputs:
if in_dup:
sub_in = inputs[in_name]
for item in sub_in:
sub_in_name, sub_in_val = item[0], item[1]
__set_input__(sub_in_name, sub_in_val)
else:
__set_input__(in_name, inputs[in_name])
def get_numeric_gradient(place,
scope,
op,
inputs,
input_to_check,
output_names,
delta=0.005,
in_place=False):
# FIXME: change this method by compile time concepts
set_input(scope, op, inputs, place)
def product(dim):
return reduce(lambda a, b: a * b, dim, 1)
def get_output():
sum = []
for output_name in output_names:
op.run(scope, place)
sum.append(
np.array(scope.find_var(output_name).get_tensor()).mean())
return np.array(sum).mean()
tensor_to_check = scope.find_var(input_to_check).get_tensor()
tensor_size = product(tensor_to_check.get_dims())
tensor_to_check_dtype = tensor_to_check.dtype()
if tensor_to_check_dtype == core.VarDesc.VarType.FP32:
tensor_to_check_dtype = np.float32
elif tensor_to_check_dtype == core.VarDesc.VarType.FP64:
tensor_to_check_dtype = np.float64
else:
raise ValueError("Not supported data type " + str(
tensor_to_check_dtype))
gradient_flat = np.zeros(shape=(tensor_size, ), dtype=tensor_to_check_dtype)
def __get_elem__(tensor, i):
if tensor_to_check_dtype == np.float32:
return tensor.get_float_element(i)
else:
return tensor.get_double_element(i)
def __set_elem__(tensor, i, e):
if tensor_to_check_dtype == np.float32:
tensor.set_float_element(i, e)
else:
tensor.set_double_element(i, e)
# we only compute gradient of one element each time.
# we use a for loop to compute the gradient of every element.
for i in xrange(tensor_size):
if in_place:
set_input(scope, op, inputs, place)
# get one input element throw it's index i.
origin = __get_elem__(tensor_to_check, i)
# add delta to it, run op and then get the sum of the result tensor.
x_pos = origin + delta
__set_elem__(tensor_to_check, i, x_pos)
y_pos = get_output()
if in_place:
set_input(scope, op, inputs, place)
x_neg = origin - delta
__set_elem__(tensor_to_check, i, x_neg)
y_neg = get_output()
__set_elem__(tensor_to_check, i, origin)
gradient_flat[i] = (y_pos - y_neg) / delta / 2
return gradient_flat.reshape(tensor_to_check.get_dims())
def append_input_output(block, op_proto, np_list, is_input):
'''Insert VarDesc and generate Python variable instance'''
proto_list = op_proto.inputs if is_input else op_proto.outputs
def create_var(block, name, np_list, var_proto):
if name not in np_list:
assert var_proto.intermediate, "{} not found".format(name)
shape = None
lod_level = None
else:
np_value = np_list[name]
if isinstance(np_value, tuple):
shape = list(np_value[0].shape)
lod_level = len(np_value[1])
else:
shape = list(np_value.shape)
lod_level = 0
return block.create_var(
dtype="float32", shape=shape, lod_level=lod_level, name=name)
var_dict = {}
for var_proto in proto_list:
var_name = str(var_proto.name)
if is_input:
if (var_name not in np_list) and var_proto.dispensable:
continue
assert (var_name in np_list) or (var_proto.dispensable), \
"Missing {} as input".format(var_name)
if var_proto.duplicable:
assert isinstance(np_list[var_name], list), \
"Duplicable {} should be set as list".format(var_name)
var_list = []
for (name, np_value) in np_list[var_name]:
var_list.append(
create_var(block, name, {name: np_value}, var_proto))
var_dict[var_name] = var_list
else:
var_dict[var_name] = create_var(block, var_name, np_list, var_proto)
return var_dict
class OpTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
'''Fix random seeds to remove randomness from tests'''
cls._np_rand_state = np.random.get_state()
cls._py_rand_state = random.getstate()
np.random.seed(123)
random.seed(124)
@classmethod
def tearDownClass(cls):
'''Restore random seeds'''
np.random.set_state(cls._np_rand_state)
random.setstate(cls._py_rand_state)
def feed_var(self, input_vars, place):
feed_map = {}
for var_name in input_vars:
if isinstance(input_vars[var_name], list):
for name, np_value in self.inputs[var_name]:
tensor = core.LoDTensor()
if isinstance(np_value, tuple):
tensor.set(np_value[0], place)
tensor.set_lod(np_value[1])
else:
tensor.set(np_value, place)
feed_map[name] = tensor
else:
tensor = core.LoDTensor()
if isinstance(self.inputs[var_name], tuple):
tensor.set(self.inputs[var_name][0], place)
tensor.set_lod(self.inputs[var_name][1])
else:
tensor.set(self.inputs[var_name], place)
feed_map[var_name] = tensor
return feed_map
def calc_output(self, place):
outs, _ = self._calc_output(place)
return outs
def _calc_output(self, place):
op_proto = OpProtoHolder.instance().get_op_proto(self.op_type)
program = Program()
block = program.global_block()
inputs = append_input_output(block, op_proto, self.inputs, True)
outputs = append_input_output(block, op_proto, self.outputs, False)
op = block.append_op(
type=self.op_type,
inputs=inputs,
outputs=outputs,
attrs=self.attrs if hasattr(self, "attrs") else dict())
# infer variable type and infer shape in compile-time
op.desc.infer_var_type(block.desc)
op.desc.infer_shape(block.desc)
fetch_list = []
for var_name, var in outputs.iteritems():
if var_name in self.outputs:
if isinstance(var, list):
for v in var:
fetch_list.append(v)
else:
fetch_list.append(var)
feed_map = self.feed_var(inputs, place)
exe = Executor(place)
outs = exe.run(program,
feed=feed_map,
fetch_list=fetch_list,
return_numpy=False)
return outs, fetch_list
def check_output_with_place(self, place, atol):
outs, fetch_list = self._calc_output(place)
for out_name, out_dup in Operator.get_op_outputs(self.op_type):
if out_name not in self.outputs:
continue
def find_actual(target_name, fetch_list):
found = [
i for i, var in enumerate(fetch_list)
if var.name == target_name
]
self.assertTrue(
len(found) == 1, "Found {} {}".format(
len(found), target_name))
return found[0]
if out_dup:
sub_out = self.outputs[out_name]
if not isinstance(sub_out, list):
raise AssertionError("sub_out type %s is not list",
type(sub_out))
for item in sub_out:
sub_out_name, expect = item[0], item[1]
idx = find_actual(sub_out_name, fetch_list)
actual = outs[idx]
actual_t = np.array(actual)
expect_t = expect[0] \
if isinstance(expect, tuple) else expect
self.assertTrue(
np.allclose(
actual_t, expect_t, atol=atol),
"Output (" + sub_out_name + ") has diff at " +
str(place))
if isinstance(expect, tuple):
self.assertListEqual(
actual.lod(), expect[1], "Output (" + sub_out_name +
") has different lod at " + str(place))
else:
idx = find_actual(out_name, fetch_list)
actual = outs[idx]
actual_t = np.array(actual)
expect = self.outputs[out_name]
expect_t = expect[0] if isinstance(expect, tuple) else expect
self.assertTrue(
np.allclose(
actual_t, expect_t, atol=atol),
"Output (" + out_name + ") has diff at " + str(place) +
str(actual_t) + "\n" + str(expect_t))
if isinstance(expect, tuple):
self.assertListEqual(actual.lod(), expect[1],
"Output (" + out_name +
") has different lod at " + str(place))
def check_output(self, atol=1e-5):
places = [core.CPUPlace()]
if core.is_compiled_with_cuda() and core.op_support_gpu(self.op_type):
places.append(core.CUDAPlace(0))
for place in places:
self.check_output_with_place(place, atol)
def check_output_customized(self, checker):
places = [core.CPUPlace()]
if core.is_compiled_with_cuda() and core.op_support_gpu(self.op_type):
places.append(core.CUDAPlace(0))
for place in places:
outs = self.calc_output(place)
outs = [np.array(out) for out in outs]
checker(outs)
def __assert_is_close(self, numeric_grads, analytic_grads, names,
max_relative_error, msg_prefix):
for a, b, name in itertools.izip(numeric_grads, analytic_grads, names):
abs_a = np.abs(a)
abs_a[abs_a < 1e-3] = 1
diff_mat = np.abs(a - b) / abs_a
max_diff = np.max(diff_mat)
def err_msg():
offset = np.argmax(diff_mat > max_relative_error)
return ("%s Variable %s max gradient diff %f over limit %f, "
"the first error element is %d, %f, %f") % (
msg_prefix, name, max_diff, max_relative_error,
offset, a.flatten()[offset], b.flatten()[offset])
self.assertLessEqual(max_diff, max_relative_error, err_msg())
def check_grad(self,
inputs_to_check,
output_names,
no_grad_set=None,
numeric_grad_delta=0.005,
in_place=False,
max_relative_error=0.005,
user_defined_grads=None):
places = [core.CPUPlace()]
if core.is_compiled_with_cuda() and core.op_support_gpu(self.op_type):
places.append(core.CUDAPlace(0))
for place in places:
self.check_grad_with_place(place, inputs_to_check, output_names,
no_grad_set, numeric_grad_delta,
in_place, max_relative_error,
user_defined_grads)
def check_grad_with_place(self,
place,
inputs_to_check,
output_names,
no_grad_set=None,
numeric_grad_delta=0.005,
in_place=False,
max_relative_error=0.005,
user_defined_grads=None):
self.scope = core.Scope()
op_inputs = self.inputs if hasattr(self, "inputs") else dict()
op_outputs = self.outputs if hasattr(self, "outputs") else dict()
op_attrs = self.attrs if hasattr(self, "attrs") else dict()
self.op = create_op(self.scope, self.op_type, op_inputs, op_outputs,
op_attrs)
if no_grad_set is None:
no_grad_set = set()
if not type(output_names) is list:
output_names = [output_names]
numeric_grads = user_defined_grads or [
get_numeric_gradient(
place,
self.scope,
self.op,
self.inputs,
input_to_check,
output_names,
delta=numeric_grad_delta,
in_place=in_place) for input_to_check in inputs_to_check
]
analytic_grads = self._get_gradient(inputs_to_check, place,
output_names, no_grad_set)
self.__assert_is_close(numeric_grads, analytic_grads, inputs_to_check,
max_relative_error,
"Gradient Check On %s" % str(place))
@staticmethod
def _create_var_descs_(block, var_dict):
# FIXME: Try unify with `append_input_output`
for param_name in var_dict:
var = var_dict[param_name]
if not isinstance(var, list) and not isinstance(var, tuple):
var = [(param_name, var, None)]
if not isinstance(var[0], list) and not isinstance(var[0], tuple):
var = [(param_name, var[0], var[1])]
for i, item in enumerate(var):
if not isinstance(item[0], basestring):
item = [[param_name] + list(item)]
if len(item) == 2:
if isinstance(item[1], tuple):
var[i] = [item[0], item[1][0], item[1][1]]
else:
# only set var name and value, set lod to None
var[i] = list(item) + [None]
var_descs = [(block.create_var(
name=name, shape=each.shape, dtype=each.dtype), each, lod)
for name, each, lod in var]
yield param_name, var_descs
@staticmethod
def _merge_list(iterable):
return reduce(lambda a, b: list(a) + list(b), iterable, [])
@staticmethod
def _numpy_to_lod_tensor(np_value, lod, place):
tensor = core.LoDTensor()
tensor.set(np_value, place)
if lod is not None:
tensor.set_lod(lod)
return tensor
@staticmethod
def np_dtype_to_fluid_dtype(input):
"""Change the dtype of float16 numpy array
numpy float16 is binded to paddle::platform::float16
in tensor_py.h via the help of uint16 data type since
the internal memory representation of float16 is
uint16_t in paddle and np.uint16 in numpy, which are
themselves binded together by pybind.
Args:
input: input numpy array
Returns:
input: The dtype of input will be changed to np.uint16 if
it is originally np.float16, such that the internal memory
of input will be reinterpreted as of dtype np.uint16.
"""
if input.dtype == np.float16:
input.dtype = np.uint16
return input
def _get_gradient(self, input_to_check, place, output_names, no_grad_set):
prog = Program()
block = prog.global_block()
inputs_with_np = {
key: value
for (key, value) in OpTest._create_var_descs_(
block, getattr(self, 'inputs', {}))
}
outputs_with_np = {
key: val
for (key, val) in OpTest._create_var_descs_(
block, getattr(self, 'outputs', {}))
}
inputs = {
k: [item[0] for item in inputs_with_np[k]]
for k in inputs_with_np
}
outputs = {
k: [item[0] for item in outputs_with_np[k]]
for k in outputs_with_np
}
op = block.append_op(
type=self.op_type,
inputs=inputs,
outputs=outputs,
attrs=getattr(self, 'attrs', {}))
# infer variable type and infer shape in compile-time
op.desc.infer_var_type(block.desc)
op.desc.infer_shape(block.desc)
mean_inputs = map(block.var, output_names)
if len(mean_inputs) == 1:
loss = block.create_var(dtype=mean_inputs[0].dtype, shape=[1])
op = block.append_op(
inputs={"X": mean_inputs}, outputs={"Out": loss}, type='mean')
op.desc.infer_var_type(block.desc)
op.desc.infer_shape(block.desc)
else:
avg_sum = []
for cur_loss in mean_inputs:
cur_avg_loss = block.create_var(dtype=cur_loss.dtype, shape=[1])
op = block.append_op(
inputs={"X": [cur_loss]},
outputs={"Out": [cur_avg_loss]},
type="mean")
op.desc.infer_var_type(block.desc)
op.desc.infer_shape(block.desc)
avg_sum.append(cur_avg_loss)
loss_sum = block.create_var(dtype=avg_sum[0].dtype, shape=[1])
op_sum = block.append_op(
inputs={"X": avg_sum}, outputs={"Out": loss_sum}, type='sum')
op_sum.desc.infer_var_type(block.desc)
op_sum.desc.infer_shape(block.desc)
loss = block.create_var(dtype=loss_sum.dtype, shape=[1])
op_loss = block.append_op(
inputs={"X": loss_sum},
outputs={"Out": loss},
type='scale',
attrs={'scale': 1.0 / float(len(avg_sum))})
op_loss.desc.infer_var_type(block.desc)
op_loss.desc.infer_shape(block.desc)
param_grad_list = append_backward(
loss=loss, parameter_list=input_to_check, no_grad_set=no_grad_set)
feed_dict = {
item[0].name: OpTest._numpy_to_lod_tensor(item[1], item[2], place)
for p_name in inputs_with_np for item in inputs_with_np[p_name]
}
fetch_list = [g for p, g in param_grad_list]
executor = Executor(place)
return map(np.array,
executor.run(prog, feed_dict, fetch_list,
return_numpy=False))
| apache-2.0 |
Kazade/NeHe-Website | google_appengine/lib/django-0.96/django/core/mail.py | 32 | 4253 | # Use this module for e-mailing.
from django.conf import settings
from email.MIMEText import MIMEText
from email.Header import Header
from email.Utils import formatdate
import smtplib
import socket
import time
import random
# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of
# seconds, which slows down the restart of the server.
class CachedDnsName(object):
def __str__(self):
return self.get_fqdn()
def get_fqdn(self):
if not hasattr(self, '_fqdn'):
self._fqdn = socket.getfqdn()
return self._fqdn
DNS_NAME = CachedDnsName()
class BadHeaderError(ValueError):
pass
class SafeMIMEText(MIMEText):
def __setitem__(self, name, val):
"Forbids multi-line headers, to prevent header injection."
if '\n' in val or '\r' in val:
raise BadHeaderError, "Header values can't contain newlines (got %r for header %r)" % (val, name)
if name == "Subject":
val = Header(val, settings.DEFAULT_CHARSET)
MIMEText.__setitem__(self, name, val)
def send_mail(subject, message, from_email, recipient_list, fail_silently=False, auth_user=None, auth_password=None):
"""
Easy wrapper for sending a single message to a recipient list. All members
of the recipient list will see the other recipients in the 'To' field.
If auth_user is None, the EMAIL_HOST_USER setting is used.
If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
"""
if auth_user is None:
auth_user = settings.EMAIL_HOST_USER
if auth_password is None:
auth_password = settings.EMAIL_HOST_PASSWORD
return send_mass_mail([[subject, message, from_email, recipient_list]], fail_silently, auth_user, auth_password)
def send_mass_mail(datatuple, fail_silently=False, auth_user=None, auth_password=None):
"""
Given a datatuple of (subject, message, from_email, recipient_list), sends
each message to each recipient list. Returns the number of e-mails sent.
If from_email is None, the DEFAULT_FROM_EMAIL setting is used.
If auth_user and auth_password are set, they're used to log in.
If auth_user is None, the EMAIL_HOST_USER setting is used.
If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
"""
if auth_user is None:
auth_user = settings.EMAIL_HOST_USER
if auth_password is None:
auth_password = settings.EMAIL_HOST_PASSWORD
try:
server = smtplib.SMTP(settings.EMAIL_HOST, settings.EMAIL_PORT)
if auth_user and auth_password:
server.login(auth_user, auth_password)
except:
if fail_silently:
return
raise
num_sent = 0
for subject, message, from_email, recipient_list in datatuple:
if not recipient_list:
continue
from_email = from_email or settings.DEFAULT_FROM_EMAIL
msg = SafeMIMEText(message, 'plain', settings.DEFAULT_CHARSET)
msg['Subject'] = subject
msg['From'] = from_email
msg['To'] = ', '.join(recipient_list)
msg['Date'] = formatdate()
try:
random_bits = str(random.getrandbits(64))
except AttributeError: # Python 2.3 doesn't have random.getrandbits().
random_bits = ''.join([random.choice('1234567890') for i in range(19)])
msg['Message-ID'] = "<%d.%s@%s>" % (time.time(), random_bits, DNS_NAME)
try:
server.sendmail(from_email, recipient_list, msg.as_string())
num_sent += 1
except:
if not fail_silently:
raise
try:
server.quit()
except:
if fail_silently:
return
raise
return num_sent
def mail_admins(subject, message, fail_silently=False):
"Sends a message to the admins, as defined by the ADMINS setting."
send_mail(settings.EMAIL_SUBJECT_PREFIX + subject, message, settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS], fail_silently)
def mail_managers(subject, message, fail_silently=False):
"Sends a message to the managers, as defined by the MANAGERS setting."
send_mail(settings.EMAIL_SUBJECT_PREFIX + subject, message, settings.SERVER_EMAIL, [a[1] for a in settings.MANAGERS], fail_silently)
| bsd-3-clause |
iambibhas/django | django/contrib/gis/tests/geoapp/test_regress.py | 28 | 3858 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
from datetime import datetime
from django.contrib.gis.geos import HAS_GEOS
from django.contrib.gis.shortcuts import render_to_kmz
from django.contrib.gis.tests.utils import no_oracle
from django.db.models import Count, Min
from django.test import TestCase, skipUnlessDBFeature
if HAS_GEOS:
from .models import City, PennsylvaniaCity, State, Truth
@skipUnlessDBFeature("gis_enabled")
class GeoRegressionTests(TestCase):
fixtures = ['initial']
def test_update(self):
"Testing GeoQuerySet.update(). See #10411."
pnt = City.objects.get(name='Pueblo').point
bak = pnt.clone()
pnt.y += 0.005
pnt.x += 0.005
City.objects.filter(name='Pueblo').update(point=pnt)
self.assertEqual(pnt, City.objects.get(name='Pueblo').point)
City.objects.filter(name='Pueblo').update(point=bak)
self.assertEqual(bak, City.objects.get(name='Pueblo').point)
def test_kmz(self):
"Testing `render_to_kmz` with non-ASCII data. See #11624."
name = "Åland Islands"
places = [{
'name': name,
'description': name,
'kml': '<Point><coordinates>5.0,23.0</coordinates></Point>'
}]
render_to_kmz('gis/kml/placemarks.kml', {'places': places})
@skipUnlessDBFeature("supports_extent_aggr")
def test_extent(self):
"Testing `extent` on a table with a single point. See #11827."
pnt = City.objects.get(name='Pueblo').point
ref_ext = (pnt.x, pnt.y, pnt.x, pnt.y)
extent = City.objects.filter(name='Pueblo').extent()
for ref_val, val in zip(ref_ext, extent):
self.assertAlmostEqual(ref_val, val, 4)
def test_unicode_date(self):
"Testing dates are converted properly, even on SpatiaLite. See #16408."
founded = datetime(1857, 5, 23)
PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)',
founded=founded)
self.assertEqual(founded, PennsylvaniaCity.objects.datetimes('founded', 'day')[0])
self.assertEqual(founded, PennsylvaniaCity.objects.aggregate(Min('founded'))['founded__min'])
def test_empty_count(self):
"Testing that PostGISAdapter.__eq__ does check empty strings. See #13670."
# contrived example, but need a geo lookup paired with an id__in lookup
pueblo = City.objects.get(name='Pueblo')
state = State.objects.filter(poly__contains=pueblo.point)
cities_within_state = City.objects.filter(id__in=state)
# .count() should not throw TypeError in __eq__
self.assertEqual(cities_within_state.count(), 1)
# TODO: fix on Oracle -- get the following error because the SQL is ordered
# by a geometry object, which Oracle apparently doesn't like:
# ORA-22901: cannot compare nested table or VARRAY or LOB attributes of an object type
@no_oracle
def test_defer_or_only_with_annotate(self):
"Regression for #16409. Make sure defer() and only() work with annotate()"
self.assertIsInstance(list(City.objects.annotate(Count('point')).defer('name')), list)
self.assertIsInstance(list(City.objects.annotate(Count('point')).only('name')), list)
def test_boolean_conversion(self):
"Testing Boolean value conversion with the spatial backend, see #15169."
t1 = Truth.objects.create(val=True)
t2 = Truth.objects.create(val=False)
val1 = Truth.objects.get(pk=t1.pk).val
val2 = Truth.objects.get(pk=t2.pk).val
# verify types -- should't be 0/1
self.assertIsInstance(val1, bool)
self.assertIsInstance(val2, bool)
# verify values
self.assertEqual(val1, True)
self.assertEqual(val2, False)
| bsd-3-clause |
anisyonk/pilot | HPC/HPCManager.py | 3 | 20132 | import commands
import os
import shutil
import sys
import time
import traceback
import json
import pickle
from Logger import Logger
import logging
logging.basicConfig(level=logging.DEBUG)
class HPCManager:
def __init__(self, logFileName=None):
self.__globalWorkingDir = None
self.__localWorkingDir = None
self.__jobStateFile = 'HPCManagerState.json'
self.__logFileName = logFileName
self.__log= Logger(logFileName)
self.__isFinished = False
# HPC resource information
self.__queue = None
self.__backfill_queue = None
self.__nodes = None
self.__cpuPerNode = None
self.__ATHENA_PROC_NUMBER = None
self.__repo = None
self.__mppwidth = None
self.__mppnppn = None
self.__walltime = None
self.__walltime_m = 0
# Number of AthenaMP workers per rank
self.__ATHENA_PROC_NUMBER = 2
self.__eventsPerWorker = 3
self.__failedPollTimes = 0
self.__lastState = None
self.__lastTime = time.time()
self.__copyInputFiles = copyInputFiles
self.__mode = None
self.__jobs = {}
self.__jobsFile = None
self.__eventRanges = None
self.__eventRangesFile = None
self.__jobid = None
self.__stageout_threads = 1
self.__pandaJobStateFile = None
self.__pluginName = 'pbs'
self.__plugin = None
self.__localSetup = None
self.__firstJobWorkDir = None
def __init__(self, globalWorkingDir=None, localWorkingDir=None, logFileName=None, copyInputFiles=False):
self.__globalWorkingDir = globalWorkingDir
# self.__globalYodaDir = os.path.join(globalWorkingDir, 'Yoda')
self.__globalYodaDir = self.__globalWorkingDir
if not os.path.exists(self.__globalYodaDir):
os.makedirs (self.__globalYodaDir)
self.__localWorkingDir = localWorkingDir
if self.__localWorkingDir is None:
self.__localWorkingDir = self.__globalYodaDir
self.__jobStateFile = os.path.join(self.__globalYodaDir, 'HPCManagerState.json')
self.__logFileName = logFileName
self.__log= Logger(logFileName)
self.__isFinished = False
# HPC resource information
self.__queue = None
self.__backfill_queue = None
self.__nodes = None
self.__cpuPerNode = None
self.__repo = None
self.__mppwidth = None
self.__mppnppn = None
self.__walltime = None
self.__walltime_m = 0
# Number of AthenaMP workers per rank
self.__ATHENA_PROC_NUMBER = 2
self.__eventsPerWorker = 3
self.__failedPollTimes = 0
self.__lastState = None
self.__lastTime = time.time()
self.__copyInputFiles = copyInputFiles
self.__mode = None
self.__jobs = {}
self.__jobsFile = None
self.__eventRanges = {}
self.__eventRangesFile = None
self.__jobid = None
self.__stageout_threads = 1
self.__pandaJobStateFile = None
self.__yodaToOS = False
self.__yodaToZip = False
self.__dumpEventOutputs = True
self.__pluginName = 'pbs'
self.__plugin = None
self.__localSetup = None
self.__firstJobWorkDir = None
def setPandaJobStateFile(self, file):
self.__pandaJobStateFile = file
def getPandaJobStateFile(self):
return self.__pandaJobStateFile
def setStageoutThreads(self, num):
self.__stageout_threads = num
def getStageoutThreads(self):
return self.__stageout_threads
def getHPCJobId(self):
return self.__jobid
def prepare(self):
if self.__globalWorkingDir != self.__localWorkingDir:
self.__log.info("Global Working directory is different with local working directory.")
# copy job file to local working directory
cmd = "cp -r "+self.__globalWorkingDir+"/HPCJob.py "+self.__globalWorkingDir+"/EventServer "+self.__globalWorkingDir+"/pandayoda/ "+self.__localWorkingDir
self.__log.info("Copying script to local working directory: %s" % cmd)
status, output = commands.getstatusoutput(cmd)
self.__log.info("Executing command result: (status: %s, output: %s)" %(status, output))
def getHPCResources(self, partition, max_nodes=None, min_nodes=2, min_walltime_m=30):
return self.__plugin.getHPCResources(partition, max_nodes, min_nodes, min_walltime_m)
def getMode(self, defaultResources):
mode = defaultResources['mode']
self.__mode = mode
self.__log.info("Running mode: %s" % mode)
if defaultResources['partition'] is None:
self.__log.info("Partition is not defined, will use normal mode")
res = None
self.__mode = 'normal'
return self.__mode
def setupPlugin(self, pluginName):
# pluginName = defaultResources.get('plugin', 'pbs')
self.__pluginName = pluginName
plugin = 'HPC.HPCManagerPlugins.%s.%s' % (pluginName, pluginName)
self.__log.info("HPCManager plugin: %s" % plugin)
components = plugin.split('.')
mod = __import__('.'.join(components[:-1]))
for comp in components[1:]:
mod = getattr(mod, comp)
self.__plugin = mod(self.__logFileName)
self.__log.info("HPCManager plugin is setup: %s" % self.__plugin)
def setLocalSetup(self, setup):
self.__localSetup = setup
def getFreeResources(self, defaultResources):
mode = self.getMode(defaultResources)
res = None
if mode == 'backfill':
res = {}
while not res:
self.__log.info("Run in backfill mode, waiting to get resources.")
time.sleep(60)
res = self.getHPCResources(defaultResources['partition'], int(defaultResources['max_nodes']), int(defaultResources['min_nodes']), int(defaultResources['min_walltime_m']))
self.__log.info("Get resources: %s" % res)
nodes = int(defaultResources['min_nodes'])
walltime = 0
if res:
for n in sorted(res.keys(), reverse=True):
if int(defaultResources['min_walltime_m']) * 60 <= res[n] and nodes <= n:
nodes = n
walltime = res[n] / 60
walltime = walltime - 2
break
if walltime <= 0:
walltime = int(defaultResources['walltime_m'])
nodes = int(defaultResources['nodes'])
self.__log.info("Cannot get resource, using default one: walltime in minutes: %s, nodes: %s" % (walltime, nodes))
self.__queue = defaultResources['queue']
self.__backfill_queue = defaultResources['backfill_queue']
self.__nodes = nodes
if self.__nodes > int(defaultResources['max_nodes']):
self.__nodes = int(defaultResources['max_nodes'])
self.__cpuPerNode = int(defaultResources['cpu_per_node'])
self.__mppwidth = int(self.__nodes) * int(self.__cpuPerNode)
self.__mppnppn = defaultResources['mppnppn']
initialtime_m = int(defaultResources['initialtime_m'])
time_per_event_m = int(defaultResources['time_per_event_m'])
if mode == 'backfill':
self.__queue = self.__backfill_queue
self.__log.info("Run in backfill mode, using queue: %s" % self.__queue)
if walltime > int(defaultResources['max_walltime_m']):
walltime = int(defaultResources['max_walltime_m'])
self.__walltime_m = walltime
h, m = divmod(walltime, 60)
self.__walltime = "%d:%02d:%02d" % (h, m, 0)
self.__eventsPerWorker = (int(walltime) - int(initialtime_m)) * 1.0 /time_per_event_m
self.__log.info("Walltime(minutes): %s" % walltime)
self.__log.info("InitialTime(configured minutes): %s" % initialtime_m)
self.__log.info("Time per event(minutes): %s" % time_per_event_m)
if self.__eventsPerWorker < 1:
self.__eventsPerWorker = 1
self.__log.info("Events per worker: (wallTime-initialTime)/timePerEvent=%s" % self.__eventsPerWorker)
self.__ATHENA_PROC_NUMBER = defaultResources['ATHENA_PROC_NUMBER']
self.__repo = defaultResources['repo']
self.__yodaToOS = defaultResources.get('yoda_to_os', False)
self.__yodaToZip = defaultResources.get('yoda_to_zip', False)
if self.__yodaToZip:
self.__yodaToOS = False
if self.__yodaToOS or self.__yodaToZip:
self.__dumpEventOutputs = False
self.__copyOutputToGlobal = defaultResources.get('copyOutputToGlobal', False)
if defaultResources.get('yoda_to_zip', False) or defaultResources.get('es_to_zip', False):
self.__copyOutputToGlobal = False
self.__setup = defaultResources.get('setup', None)
self.__esPath = defaultResources.get('esPath', None)
self.__os_bucket_id = defaultResources.get('os_bucket_id', None)
def getCoreCount(self):
return int(self.__mppwidth)
def getEventsNumber(self):
# walltime is minutes
# 1 Yoda and (self.__nodes -1) Droid
# plus 1 cached event per node
#return int(self.__eventsPerWorker) * (int(self.__nodes) -1) * int(self.__ATHENA_PROC_NUMBER) + (int(self.__nodes) -1) * 1
# try to download 1.5 times of events than predicted.
self.__log.info("Events per worker: %s" % self.__eventsPerWorker)
self.__log.info("Nodes: %s" % self.__nodes)
self.__log.info("Cores per node: %s" % self.__ATHENA_PROC_NUMBER)
totalNeededEvents = int(self.__eventsPerWorker * int(self.__nodes) * int(self.__ATHENA_PROC_NUMBER) * 1.5)
self.__log.info("Total needed Events: eventsPerWorker * nodes * coresPerNode * 1.5 = %s" % totalNeededEvents)
return totalNeededEvents
def initJobs(self, jobs, eventRanges):
self.__log.info("initJobs: %s" % jobs)
ranks = [i for i in range(1, self.__nodes)]
totalNeededRanks = 1
for jobId in jobs:
# job = {"TokenExtractCmd": tokenExtractorCommand, "AthenaMPCmd": athenaMPCommand}
job = jobs[jobId]
job['JobId'] = jobId
job["AthenaMPCmd"] = "export TRF_ECHO=1; " + job["AthenaMPCmd"]
job["CopyInputFiles"] = self.__copyInputFiles
job["LocalWorkingDir"] = self.__localWorkingDir
job["ATHENA_PROC_NUMBER"] = self.__ATHENA_PROC_NUMBER
job['neededRanks'] = 0
job['ranks'] = []
job['yodaToOS'] = self.__yodaToOS
job['yodaToZip'] = self.__yodaToZip
# job['zipFileName'] = self.__zipFileName
job['copyOutputToGlobal'] = self.__copyOutputToGlobal
job['setup'] = self.__setup
job['esPath'] = self.__esPath
job['os_bucket_id'] = self.__os_bucket_id
eventsPerNode = int(self.__ATHENA_PROC_NUMBER) * (int(self.__eventsPerWorker))
if jobId in eventRanges:
#job['neededRanks'] = len(eventRanges[jobId]) / eventsPerNode + (len(eventRanges[jobId]) % eventsPerNode + eventsPerNode - 1)/eventsPerNode
job['neededRanks'] = round(len(eventRanges[jobId]) * 1.0 / eventsPerNode, 2)
if len(eventRanges[jobId]) >= eventsPerNode * 4:
job['neededRanks'] += 0
elif len(eventRanges[jobId]) > eventsPerNode:
job['neededRanks'] += 0
totalNeededRanks += job['neededRanks']
self.__jobs[jobId] = job
if self.__firstJobWorkDir is None:
self.__firstJobWorkDir = job['GlobalWorkingDir']
if totalNeededRanks < self.__nodes:
self.__nodes = int(totalNeededRanks)
#if self.__nodes < 2:
# self.__nodes = 2
self.__mppwidth = int(self.__nodes) * int(self.__cpuPerNode)
self.__jobsFile = os.path.join(self.__globalYodaDir, "HPCJobs.json")
with open(self.__jobsFile, 'w') as outputFile:
json.dump(self.__jobs, outputFile)
self.__eventRanges = eventRanges
self.__eventRangesFile = os.path.join(self.__globalYodaDir, "JobsEventRanges.json")
with open(self.__eventRangesFile, 'w') as outputFile:
json.dump(self.__eventRanges, outputFile)
def getJobsRanks(self):
jobRanks = {}
for jobId in self.__jobs:
jobRanks[jobId] = self.__jobs[jobId]['ranks']
# will remove
def initEventRanges(self, eventRanges):
self.__eventRanges = eventRanges
self.__eventRangesFile = os.path.join(self.__globalYodaDir, "EventRanges.json")
with open(self.__eventRangesFile, 'w') as outputFile:
json.dump(self.__eventRanges, outputFile)
# will remove
def initJobRanks(self):
numRanges = 0
if self.__eventRanges:
numRanges = len(self.__eventRanges)
eventsPerNode = int(self.__ATHENA_PROC_NUMBER) * (int(self.__eventsPerWorker) - 1)
if eventsPerNode == 0:
eventsPerNode = 1
nodes = numRanges/eventsPerNode + (numRanges%eventsPerNode + eventsPerNode - 1)/eventsPerNode + 1
if nodes < int(self.__nodes):
self.__nodes = nodes
self.__mppwidth = int(self.__nodes) * int(self.__cpuPerNode)
if self.__nodes <= 5 and self.__mode != 'backfill':
# self.__walltime_m = self.__walltime_m * 2
h, m = divmod(self.__walltime_m, 60)
self.__walltime = "%d:%02d:%02d" % (h, m, 0)
def isLocalProcess(self):
return self.__plugin.isLocalProcess()
def submit(self):
if not self.__jobs:
self.__log.info("No prepared jobs available. will not submit any jobs.")
return
for i in range(5):
if self.__plugin.getName() == 'arc12233' and self.__firstJobWorkDir is not None:
status, jobid = self.__plugin.submitJob(self.__globalWorkingDir, self.__firstJobWorkDir, self.__firstJobWorkDir, self.__queue, self.__repo, self.__mppwidth, self.__mppnppn, self.__walltime, self.__nodes, localSetup=self.__localSetup, cpuPerNode=self.__cpuPerNode, dumpEventOutputs=self.__dumpEventOutputs)
else:
status, jobid = self.__plugin.submitJob(self.__globalWorkingDir, self.__globalYodaDir, self.__localWorkingDir, self.__queue, self.__repo, self.__mppwidth, self.__mppnppn, self.__walltime, self.__nodes, localSetup=self.__localSetup, cpuPerNode=self.__cpuPerNode, dumpEventOutputs=self.__dumpEventOutputs)
if status != 0:
self.__log.info("Failed to submit this job to HPC. will sleep one minute and retry")
time.sleep(60)
else:
self.__jobid = jobid
break
if status != 0:
self.__log.info("Failed to submit this job to HPC. All retries finished. will fail")
def saveState(self):
hpcState = {'GlobalWorkingDir': self.__globalWorkingDir, 'Plugin':self.__pluginName, 'JobID': self.__jobid, 'JobCommand': sys.argv, 'JobStateFile': self.__pandaJobStateFile, 'StageoutThreads': self.__stageout_threads}
with open(self.__jobStateFile, 'w') as outputFile:
json.dump(hpcState, outputFile)
def recoveryState(self):
if os.path.exists(self.__jobStateFile):
tmpFile = open(self.__jobStateFile)
hpcState = json.load(tmpFile)
tmpFile.close()
self.__globalWorkingDir = hpcState['GlobalWorkingDir']
self.__jobid = hpcState['JobID']
self.__pluginName = hpcState['Plugin']
self.__stageout_threads = hpcState['StageoutThreads']
self.setupPlugin(self.__pluginName)
def poll(self):
if self.__plugin.isLocalProcess():
return 'Complete'
if self.__jobid is None:
self.__log.info("HPC job id is None, will return failed.")
self.__isFinished = True
return 'Failed'
counter = 120
while counter > 0:
counter = counter - 1
state = self.__plugin.poll(self.__jobid)
if self.__lastState is None or self.__lastState != state or time.time() > self.__lastTime + 60*5:
self.__log.info("HPC job state is: %s" %(state))
self.__lastState = state
self.__lastTime = time.time()
if state in ['Complete', 'Failed']:
self.__isFinished = True
break
if state != 'Unknown': break
else: time.sleep(60)
return state
def checkHPCJobLog(self):
logFile = os.path.join(self.__globalYodaDir, "athena_stdout.txt")
command = "grep 'HPCJob-Yoda failed' " + logFile
status, output = commands.getstatusoutput(command)
if status == 0:
return -1, "HPCJob-Yoda failed"
return 0, None
def getOutputs(self):
outputs = []
all_files = os.listdir(self.__globalYodaDir)
for file in all_files:
if file.endswith(".dump"):
filename = os.path.join(self.__globalYodaDir, file)
handle = open(filename)
for line in handle:
line = line.replace(" ", " ")
eventRange, status, output = line.split(" ")
if status == 'finished':
outputFileName = output.split(",")[0]
outputs.append((eventRange, status, outputFileName))
else:
outputs.append((eventRange, status, output))
handle.close()
os.rename(filename, filename + ".BAK")
return outputs
def isFinished(self):
if self.__jobid is None:
self.__log.info("HPC job id is None. Finished")
self.__isFinished = True
return self.__isFinished
def finishJob(self):
if self.__jobid and self.__plugin:
self.__plugin.delete(self.__jobid)
def flushOutputs(self):
try:
self.__log.debug("Flush Yoda outputs")
from pandayoda.yodacore import Database
db = Database.Backend(self.__globalYodaDir)
db.dumpUpdates(True)
except:
self.__log.debug("Failed to flush outputs: %s" % traceback.format_exc())
def old_postRun(self):
try:
self.__log.debug("postRun")
all_files = os.listdir(self.__globalYodaDir)
for file in all_files:
path = os.path.join(self.__globalYodaDir, file)
if file.startswith("rank_") and os.path.isdir(path):
self.__log.debug("Found dir %s" % path)
for jobId in self.__jobs:
dest_dir = os.path.join(self.__jobs[jobId]['GlobalWorkingDir'], file)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
for localFile in os.listdir(path):
localPath = os.path.join(path, localFile)
self.__log.debug("Copying %s to %s" % (localPath, dest_dir))
if os.path.isdir(localPath):
try:
shutil.copytree(localPath, dest_dir)
except:
self.__log.warning("Failed to copy %s to %s: %s" % (localPath, dest_dir, traceback.format_exc()))
else:
try:
shutil.copy(localPath, dest_dir)
except:
self.__log.warning("Failed to copy %s to %s: %s" % (localPath, dest_dir, traceback.format_exc()))
except:
self.__log.warning("Failed to post run: %s" % traceback.format_exc())
def postRun(self):
return
| apache-2.0 |
recsm/SQP | sqp/migrations/0044_import_questions_jorge.py | 1 | 30530 | # encoding: utf-8
import os
import datetime
import re
import codecs
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.conf import settings
from sqp import models as sqp_models
class Migration(DataMigration):
def forwards(self, orm):
try:
sql = 'ALTER TABLE sqp_item DROP INDEX unique_name;'
db.execute_many(sql)
print "unique_name index dropped"
except:
print "unique_name index not dropped (most likely already deleted)"
log_text = ''
Q_BASE_DIR = settings.PROJECT_DIR + '/data/questions_jorge/'
files = []
r,d,files = os.walk(Q_BASE_DIR).next()
#looking for russian A and B chars
item_regex = re.compile(ur'^(P\.)?[A-ZАВ]{1}[0-9]{1,3}([A-Za-z]{1,3})?(\.)?$')
text_area_regex = re.compile(ur'\{[A-Z]+\}')
q_regex = re.compile(ur'Q{1}[0-9]{1,4}')
for file_name in files:
file_log_text = []
CREATED_ITEMS = 0
CREATED_QUESTIONS = 0
EDITED_QUESTIONS = 0
NOT_EDITED = 0
SKIPPED_AREAS = 0
IMPORTED_LINES = 0
SKIPPED_LINES = []
#utf-8-sig to get rid of the utf-8 BOM /ufeff
#http://stackoverflow.com/questions/9228202/tokenizing-unicode-using-nltk
file = codecs.open(Q_BASE_DIR + file_name, "r", "utf-8-sig")
if not '.txt' in file_name:
continue
print "NOW CHECKING file %s" % file.name
round_name, country_iso, language_iso = file_name.replace('.txt', '').split('_')
language = sqp_models.Language.objects.get(iso=language_iso)
country = sqp_models.Country.objects.get(iso=country_iso)
round_name = round_name.replace('ESS', 'ESS Round ')
study = sqp_models.Study.objects.get(name=round_name)
key = None
questions = {}
text_areas = ['INTRO',
'QUESTION',
'ANSWERS',
'TRASH']
line_number = 0
for line in file:
line_number += 1
#Get rid of any Q13 Q12 crap
if q_regex.match(line):
line = re.sub(q_regex, '', line).strip()
key = None
if item_regex.match(line.strip()):
key = item_regex.match(line.strip()).group(0)
#russian chars
key = key.replace(u'\u0410', 'A')
key = key.replace(u'\u0412', 'B')
#P.
key = key.replace('P.', '')
key = key.replace(' ', '')
#Trailing .
key = key.replace('.', '')
questions[key] = {'INTRO' : '',
'QUESTION' : '',
'ANSWERS' : '',
'found_text_areas' : []
}
current_text_area = 'QUESTION'
continue
elif key and text_area_regex.match(line):
match = text_area_regex.match(line).group(0)
current_text_area = match.replace('{', '').replace('}', '')
if current_text_area == 'ANSWERS 1':
current_text_area ='ANSWERS'
elif current_text_area == 'ANSWERS 2':
SKIPPED_AREAS += 1
continue
if current_text_area in questions[key]['found_text_areas']:
current_text_area = 'TRASH'
else:
questions[key]['found_text_areas'].append(current_text_area)
if current_text_area not in text_areas:
raise Exception('Unrecognized text area "%s"' % current_text_area)
continue
#Only take the first occurence of QUESTION / INTRO / ANSWERS
if key and current_text_area != 'TRASH':
questions[key][current_text_area] += line
IMPORTED_LINES += 1
elif line.strip() != '':
SKIPPED_LINES.append({'line_number' : line_number,
'content': line})
n = 0
for key in questions:
n +=1
#if n > 10:break
#print "NOW SAVING question %s" % key
try:
item, i_was_created = sqp_models.Item.objects.get_or_create(admin=key, study=study)
if i_was_created:
CREATED_ITEMS += 1
except:
print '!!!!!!!!!!BAD KEY!!!!!!!!!!!!!!!%s' % key
raise Exception()
question, q_was_created = sqp_models.Question.objects.get_or_create(item=item, country=country, language=language)
if q_was_created:
CREATED_QUESTIONS += 1
if question.rfa_text or question.introduction_text or question.answer_text:
NOT_EDITED += 1
else:
question.introduction_text = questions[key]['INTRO'].strip()
question.rfa_text = questions[key]['QUESTION'].strip()
question.answer_text = questions[key]['ANSWERS'].strip()
if q_was_created:
question.imported_from = 'jorge-created'
else:
question.imported_from = 'jorge-existing'
question.save(create_suggestions = False)
EDITED_QUESTIONS += 1
file_log_text.append('%s %s %s new items:%s, total qs:%s, created qs:%s, edited qs:%s, not edited qs:%s, skipped keys:%s' % \
(country_iso, language_iso, round_name,
CREATED_ITEMS, len(questions), CREATED_QUESTIONS, EDITED_QUESTIONS, NOT_EDITED, SKIPPED_AREAS))
file_log_text.append('LINES SKIPPED %s / IMPORTED %s' % (len(SKIPPED_LINES), IMPORTED_LINES))
if SKIPPED_LINES:
file_log_text.append('SKIPPED_LINES')
for l in SKIPPED_LINES:
file_log_text.append(' %s: %s' % (l['line_number'], l['content'].replace('\n', '')))
file_log_text.append('IMPORTED ITEMS: %s' % ','.join(questions.keys()))
file_log_text.append('------------------------------------------------------------------------')
print '\n'.join(file_log_text)
print
log_text += '\n'.join(file_log_text) + '\n\n\n'
log_file = codecs.open('/tmp/jorge_import.log', 'w', "utf-8-sig")
log_file.write(log_text)
log_file.close()
print "LOG STORED AT '/tmp/jorge_import.log'"
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sqp.branch': {
'Meta': {'ordering': "('label__characteristic__name', 'label__id')", 'object_name': 'Branch'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Label']"}),
'to_characteristic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Characteristic']"})
},
'sqp.characteristic': {
'Meta': {'ordering': "['name']", 'object_name': 'Characteristic'},
'auto_fill_suggestion': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'desc': ('django.db.models.fields.TextField', [], {'db_column': "'description'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'suggestion': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'validation_rules': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sqp.ValidationRule']", 'null': 'True', 'blank': 'True'}),
'widget': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Widget']"})
},
'sqp.characteristicset': {
'Meta': {'ordering': "['id']", 'object_name': 'CharacteristicSet'},
'branches': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sqp.Branch']", 'symmetrical': 'False'}),
'coders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sqp.coding': {
'Meta': {'ordering': "['user', 'characteristic']", 'object_name': 'Coding'},
'characteristic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Characteristic']"}),
'choice': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Question']"}),
'seconds_taken': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'updated_on': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'sqp.codingchange': {
'Meta': {'object_name': 'CodingChange'},
'change_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'change_by_user_set'", 'null': 'True', 'to': "orm['auth.User']"}),
'change_type': ('django.db.models.fields.IntegerField', [], {}),
'characteristic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Characteristic']"}),
'coding_change_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.CodingChangeGroup']"}),
'coding_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'coding_user_set'", 'null': 'True', 'to': "orm['auth.User']"}),
'error_occured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_value': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'new_value_by_related_country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Country']", 'null': 'True', 'blank': 'True'}),
'new_value_by_related_lang': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Language']", 'null': 'True', 'blank': 'True'}),
'processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'processing_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'question_id': ('django.db.models.fields.IntegerField', [], {})
},
'sqp.codingchangegroup': {
'Meta': {'ordering': "['id']", 'object_name': 'CodingChangeGroup'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'sqp.codingsuggestion': {
'Meta': {'object_name': 'CodingSuggestion'},
'characteristic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Characteristic']"}),
'explanation': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Question']"}),
'value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'sqp.completion': {
'Meta': {'object_name': 'Completion'},
'authorized': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'characteristic_set': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.CharacteristicSet']"}),
'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'out_of_date': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'potential_improvements': ('sqp.fields.PickledObjectField', [], {'null': 'True', 'blank': 'True'}),
'predictions': ('sqp.fields.PickledObjectField', [], {'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'sqp.country': {
'Meta': {'ordering': "['name']", 'object_name': 'Country'},
'iso': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_three': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'sqp.faq': {
'Meta': {'object_name': 'FAQ'},
'answer': ('django.db.models.fields.TextField', [], {}),
'asker': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.TextField', [], {})
},
'sqp.history': {
'Meta': {'object_name': 'History'},
'action_description': ('django.db.models.fields.TextField', [], {}),
'action_type': ('django.db.models.fields.IntegerField', [], {}),
'actor': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {}),
'object_model': ('django.db.models.fields.CharField', [], {'max_length': '90'}),
'object_name': ('django.db.models.fields.CharField', [], {'max_length': '170'}),
'previous_values': ('django.db.models.fields.TextField', [], {}),
'time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'sqp.item': {
'Meta': {'ordering': "('study', 'admin_letter', 'admin_number', 'id')", 'object_name': 'Item'},
'admin': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'admin_letter': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'admin_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'created_item_set'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'long_name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Study']"})
},
'sqp.itemgroup': {
'Meta': {'object_name': 'ItemGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sqp.Item']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'sqp.label': {
'Meta': {'ordering': "('characteristic__name', 'id')", 'object_name': 'Label'},
'characteristic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Characteristic']"}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'compute': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'True'", 'max_length': '150'})
},
'sqp.language': {
'Meta': {'ordering': "('name',)", 'object_name': 'Language'},
'coders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'iso': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'iso2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sqp.parameter': {
'Meta': {'ordering': "['order']", 'object_name': 'Parameter'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'views': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sqp.View']", 'through': "orm['sqp.Prediction']", 'symmetrical': 'False'})
},
'sqp.prediction': {
'Meta': {'object_name': 'Prediction'},
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'paramater': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Parameter']"}),
'view': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.View']"})
},
'sqp.question': {
'Meta': {'ordering': "('item__study', 'country', 'language', 'item__admin_letter', 'item__admin_number', 'item__id')", 'object_name': 'Question'},
'answer_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Country']"}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'created_question_set'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'imported_from': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'introduction_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Item']"}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Language']"}),
'rel': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'rel_hi': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'rel_lo': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'relz': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'relz_se': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'rfa_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'val': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'val_hi': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'val_lo': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'valz': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'valz_se': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
'sqp.questionbulkassignments': {
'Meta': {'object_name': 'QuestionBulkAssignments'},
'assignments': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sqp.UserQuestion']", 'symmetrical': 'False', 'blank': 'True'}),
'can_edit_details': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_edit_text': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Country']", 'null': 'True'}),
'has_been_run': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.ItemGroup']", 'null': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Language']", 'null': 'True'}),
'last_run_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False'})
},
'sqp.questionbulkcreation': {
'Meta': {'object_name': 'QuestionBulkCreation'},
'copy_text_from_study': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Study']", 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Country']"}),
'created_questions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sqp.Question']", 'symmetrical': 'False', 'blank': 'True'}),
'has_been_run': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.ItemGroup']"}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Language']"}),
'last_run_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'sqp.study': {
'Meta': {'ordering': "('name',)", 'object_name': 'Study'},
'coders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'created_study_set'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '70'})
},
'sqp.usedcountry': {
'Meta': {'ordering': "['name']", 'object_name': 'UsedCountry', 'db_table': "'vw_country_question'"},
'iso': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'sqp.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'default_characteristic_set': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.CharacteristicSet']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_trusted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'sqp.userquestion': {
'Meta': {'object_name': 'UserQuestion'},
'can_edit_details': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_edit_text': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sqp.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'sqp.validationrule': {
'Meta': {'object_name': 'ValidationRule'},
'failure_message': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'rule': ('django.db.models.fields.TextField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '7'})
},
'sqp.view': {
'Meta': {'ordering': "['order']", 'object_name': 'View'},
'expects': ('django.db.models.fields.CharField', [], {'default': "'tuple'", 'max_length': '20'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '140'})
},
'sqp.widget': {
'Meta': {'object_name': 'Widget'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['sqp']
| mit |
ArtUrim/elf2MockCreator | findUndefs.py | 1 | 2186 | # ########################################################################
# elf2MockCreator/findUndefs
#
# Find undefined symbols reffered in the elf object
#
# Artur Lozinski (lozinski dot artur at gmail dor com)
# This code is the public domain
# ########################################################################
import sys
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
class FindUndefs(object):
""" Find undefined symbols reffered in the elf object. Object file after
compilation, but before linking includes information about all symbols which are
referenced in the compiled code, but not defined in the compilation unit. The class
is defined for searching for the undefs likt this.
Public methods:
findUndefs -- returns a list of all found undefined objects
"""
def __init__(self,fname):
""" fname:
file name of object file
"""
self.secSymbols = []
self.fh = open( fname, 'rb' )
if self.fh:
self.elffile = ELFFile(self.fh)
for section in self.elffile.iter_sections():
if isinstance( section, SymbolTableSection ):
self.secSymbols.append( section )
else:
raise IOError
def __del__(self):
if self.fh:
self.fh.close()
def findUndef(self):
""" returns a list of strings """
self.undefs = []
for ss in self.secSymbols:
self.undefs.extend( self.__findUndefInSec(ss) )
return self.undefs
def __findUndefInSec(self,sec):
undefs = []
for sb in sec.iter_symbols():
if sb['st_shndx'] == 'SHN_UNDEF' and sb.name:
undefs.append( sb.name )
return undefs
if __name__ == '__main__':
if len(sys.argv) > 1:
try:
fu = FindUndefs( sys.argv[1] )
for ius, us in enumerate(fu.findUndef()):
print( "{}: {}".format( ius, us ) )
except IOError as ioe:
print( "Cannot open file {}".format( sys.argv[1] ) )
else:
print( "No input file" )
| gpl-3.0 |
SYSTRAN/geographic-api-python-client | systran_geographic_api/models/mail.py | 1 | 1743 | #!/usr/bin/env python
# coding: utf-8
"""
Copyright 2015 SYSTRAN Software, Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class Mail(object):
"""
NOTE: This class is auto generated by the systran code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Systran model
:param dict systran_types: The key is attribute name and the value is attribute type.
:param dict attribute_map: The key is attribute name and the value is json key in definition.
"""
self.systran_types = {
'value': 'str',
'type': 'str'
}
self.attribute_map = {
'value': 'value',
'type': 'type'
}
# Mail address
self.value = None # str
# Professional, ...
self.type = None # str
def __repr__(self):
properties = []
for p in self.__dict__:
if p != 'systran_types' and p != 'attribute_map':
properties.append('{prop}={val!r}'.format(prop=p, val=self.__dict__[p]))
return '<{name} {props}>'.format(name=__name__, props=' '.join(properties))
| apache-2.0 |
Nuos/python-docx | tests/image/test_gif.py | 9 | 1224 | # encoding: utf-8
"""
Test suite for docx.image.gif module
"""
from __future__ import absolute_import, print_function
import pytest
from docx.compat import BytesIO
from docx.image.constants import MIME_TYPE
from docx.image.gif import Gif
from ..unitutil.mock import initializer_mock
class DescribeGif(object):
def it_can_construct_from_a_gif_stream(self, from_stream_fixture):
stream, Gif__init__, cx, cy = from_stream_fixture
gif = Gif.from_stream(stream)
Gif__init__.assert_called_once_with(cx, cy, 72, 72)
assert isinstance(gif, Gif)
def it_knows_its_content_type(self):
gif = Gif(None, None, None, None)
assert gif.content_type == MIME_TYPE.GIF
def it_knows_its_default_ext(self):
gif = Gif(None, None, None, None)
assert gif.default_ext == 'gif'
# fixtures -------------------------------------------------------
@pytest.fixture
def from_stream_fixture(self, Gif__init__):
cx, cy = 42, 24
bytes_ = b'filler\x2A\x00\x18\x00'
stream = BytesIO(bytes_)
return stream, Gif__init__, cx, cy
@pytest.fixture
def Gif__init__(self, request):
return initializer_mock(request, Gif)
| mit |
olasitarska/django | django/contrib/gis/forms/widgets.py | 39 | 3682 | from __future__ import unicode_literals
import logging
from django.conf import settings
from django.contrib.gis import gdal
from django.contrib.gis.geos import GEOSGeometry, GEOSException
from django.forms.widgets import Widget
from django.template import loader
from django.utils import six
from django.utils import translation
logger = logging.getLogger('django.contrib.gis')
class BaseGeometryWidget(Widget):
"""
The base class for rich geometry widgets.
Renders a map using the WKT of the geometry.
"""
geom_type = 'GEOMETRY'
map_srid = 4326
map_width = 600
map_height = 400
display_raw = False
supports_3d = False
template_name = '' # set on subclasses
def __init__(self, attrs=None):
self.attrs = {}
for key in ('geom_type', 'map_srid', 'map_width', 'map_height', 'display_raw'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
def serialize(self, value):
return value.wkt if value else ''
def deserialize(self, value):
try:
return GEOSGeometry(value, self.map_srid)
except (GEOSException, ValueError) as err:
logger.error(
"Error creating geometry from value '%s' (%s)" % (
value, err)
)
return None
def render(self, name, value, attrs=None):
# If a string reaches here (via a validation error on another
# field) then just reconstruct the Geometry.
if isinstance(value, six.string_types):
value = self.deserialize(value)
if value:
# Check that srid of value and map match
if value.srid != self.map_srid:
try:
ogr = value.ogr
ogr.transform(self.map_srid)
value = ogr
except gdal.OGRException as err:
logger.error(
"Error transforming geometry from srid '%s' to srid '%s' (%s)" % (
value.srid, self.map_srid, err)
)
context = self.build_attrs(
attrs,
name=name,
module='geodjango_%s' % name.replace('-', '_'), # JS-safe
serialized=self.serialize(value),
geom_type=gdal.OGRGeomType(self.attrs['geom_type']),
STATIC_URL=settings.STATIC_URL,
LANGUAGE_BIDI=translation.get_language_bidi(),
)
return loader.render_to_string(self.template_name, context)
class OpenLayersWidget(BaseGeometryWidget):
template_name = 'gis/openlayers.html'
class Media:
js = (
'http://openlayers.org/api/2.13/OpenLayers.js',
'gis/js/OLMapWidget.js',
)
class OSMWidget(BaseGeometryWidget):
"""
An OpenLayers/OpenStreetMap-based widget.
"""
template_name = 'gis/openlayers-osm.html'
default_lon = 5
default_lat = 47
class Media:
js = (
'http://openlayers.org/api/2.13/OpenLayers.js',
'http://www.openstreetmap.org/openlayers/OpenStreetMap.js',
'gis/js/OLMapWidget.js',
)
def __init__(self, attrs=None):
super(OSMWidget, self).__init__()
for key in ('default_lon', 'default_lat'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
@property
def map_srid(self):
# Use the official spherical mercator projection SRID when GDAL is
# available; otherwise, fallback to 900913.
if gdal.HAS_GDAL:
return 3857
else:
return 900913
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.