repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
PythonNut/servo | python/mach/mach/mixin/process.py | 124 | 6762 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# This module provides mixins to perform process execution.
from __future__ import absolute_import, unicode_literals
import logging
import os
import subprocess
import sys
from mozprocess.processhandler import ProcessHandlerMixin
from .logging import LoggingMixin
# Perform detection of operating system environment. This is used by command
# execution. We only do this once to save redundancy. Yes, this can fail module
# loading. That is arguably OK.
if 'SHELL' in os.environ:
_current_shell = os.environ['SHELL']
elif 'MOZILLABUILD' in os.environ:
_current_shell = os.environ['MOZILLABUILD'] + '/msys/bin/sh.exe'
elif 'COMSPEC' in os.environ:
_current_shell = os.environ['COMSPEC']
else:
raise Exception('Could not detect environment shell!')
_in_msys = False
if os.environ.get('MSYSTEM', None) == 'MINGW32':
_in_msys = True
if not _current_shell.lower().endswith('.exe'):
_current_shell += '.exe'
class ProcessExecutionMixin(LoggingMixin):
"""Mix-in that provides process execution functionality."""
def run_process(self, args=None, cwd=None, append_env=None,
explicit_env=None, log_name=None, log_level=logging.INFO,
line_handler=None, require_unix_environment=False,
ensure_exit_code=0, ignore_children=False, pass_thru=False):
"""Runs a single process to completion.
Takes a list of arguments to run where the first item is the
executable. Runs the command in the specified directory and
with optional environment variables.
append_env -- Dict of environment variables to append to the current
set of environment variables.
explicit_env -- Dict of environment variables to set for the new
process. Any existing environment variables will be ignored.
require_unix_environment if True will ensure the command is executed
within a UNIX environment. Basically, if we are on Windows, it will
execute the command via an appropriate UNIX-like shell.
ignore_children is proxied to mozprocess's ignore_children.
ensure_exit_code is used to ensure the exit code of a process matches
what is expected. If it is an integer, we raise an Exception if the
exit code does not match this value. If it is True, we ensure the exit
code is 0. If it is False, we don't perform any exit code validation.
pass_thru is a special execution mode where the child process inherits
this process's standard file handles (stdin, stdout, stderr) as well as
additional file descriptors. It should be used for interactive processes
where buffering from mozprocess could be an issue. pass_thru does not
use mozprocess. Therefore, arguments like log_name, line_handler,
and ignore_children have no effect.
"""
args = self._normalize_command(args, require_unix_environment)
self.log(logging.INFO, 'new_process', {'args': args}, ' '.join(args))
def handleLine(line):
# Converts str to unicode on Python 2 and bytes to str on Python 3.
if isinstance(line, bytes):
line = line.decode(sys.stdout.encoding or 'utf-8', 'replace')
if line_handler:
line_handler(line)
if not log_name:
return
self.log(log_level, log_name, {'line': line.rstrip()}, '{line}')
use_env = {}
if explicit_env:
use_env = explicit_env
else:
use_env.update(os.environ)
if append_env:
use_env.update(append_env)
self.log(logging.DEBUG, 'process', {'env': use_env}, 'Environment: {env}')
# There is a bug in subprocess where it doesn't like unicode types in
# environment variables. Here, ensure all unicode are converted to
# binary. utf-8 is our globally assumed default. If the caller doesn't
# want UTF-8, they shouldn't pass in a unicode instance.
normalized_env = {}
for k, v in use_env.items():
if isinstance(k, unicode):
k = k.encode('utf-8', 'strict')
if isinstance(v, unicode):
v = v.encode('utf-8', 'strict')
normalized_env[k] = v
use_env = normalized_env
if pass_thru:
proc = subprocess.Popen(args, cwd=cwd, env=use_env)
status = None
# Leave it to the subprocess to handle Ctrl+C. If it terminates as
# a result of Ctrl+C, proc.wait() will return a status code, and,
# we get out of the loop. If it doesn't, like e.g. gdb, we continue
# waiting.
while status is None:
try:
status = proc.wait()
except KeyboardInterrupt:
pass
else:
p = ProcessHandlerMixin(args, cwd=cwd, env=use_env,
processOutputLine=[handleLine], universal_newlines=True,
ignore_children=ignore_children)
p.run()
p.processOutput()
status = p.wait()
if ensure_exit_code is False:
return status
if ensure_exit_code is True:
ensure_exit_code = 0
if status != ensure_exit_code:
raise Exception('Process executed with non-0 exit code: %s' % args)
return status
def _normalize_command(self, args, require_unix_environment):
"""Adjust command arguments to run in the necessary environment.
This exists mainly to facilitate execution of programs requiring a *NIX
shell when running on Windows. The caller specifies whether a shell
environment is required. If it is and we are running on Windows but
aren't running in the UNIX-like msys environment, then we rewrite the
command to execute via a shell.
"""
assert isinstance(args, list) and len(args)
if not require_unix_environment or not _in_msys:
return args
# Always munge Windows-style into Unix style for the command.
prog = args[0].replace('\\', '/')
# PyMake removes the C: prefix. But, things seem to work here
# without it. Not sure what that's about.
# We run everything through the msys shell. We need to use
# '-c' and pass all the arguments as one argument because that is
# how sh works.
cline = subprocess.list2cmdline([prog] + args[1:])
return [_current_shell, '-c', cline]
| mpl-2.0 |
hwine/build-relengapi | relengapi/blueprints/slaveloan/model.py | 2 | 5153 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
import sqlalchemy as sa
from sqlalchemy.orm import relationship
from sqlalchemy.schema import Index
from relengapi.blueprints.slaveloan import rest
from relengapi.lib import db
from relengapi.util import tz
_tbl_prefix = 'slaveloan_'
class Machines(db.declarative_base('relengapi'), db.UniqueMixin):
__tablename__ = _tbl_prefix + 'machines'
id = sa.Column(sa.Integer, primary_key=True)
fqdn = sa.Column(sa.String(255), nullable=False, unique=True)
ipaddress = sa.Column(sa.String(18), unique=True)
loan = relationship("Loans", backref="machine")
@classmethod
def unique_hash(cls, fqdn, *args, **kwargs):
return fqdn
@classmethod
def unique_filter(cls, query, fqdn, *args, **kwargs):
return query.filter(Machines.fqdn == fqdn)
def to_json(self):
return dict(id=self.id, fqdn=self.fqdn, ipaddress=self.ipaddress)
def to_wsme(self):
return rest.Machine(**self.to_json())
class Humans(db.declarative_base('relengapi'), db.UniqueMixin):
__tablename__ = _tbl_prefix + 'humans'
id = sa.Column(sa.Integer, primary_key=True)
ldap = sa.Column(sa.String(255), nullable=False, unique=True)
bugzilla = sa.Column(sa.String(255), nullable=False)
loans = relationship("Loans", backref="human")
@classmethod
def unique_hash(cls, ldap, *args, **kwargs):
return ldap
@classmethod
def unique_filter(cls, query, ldap, *args, **kwargs):
return query.filter(Humans.ldap == ldap)
def to_json(self):
return dict(id=self.id, ldap_email=self.ldap, bugzilla_email=self.bugzilla)
def to_wsme(self):
return rest.Human(**self.to_json())
class Loans(db.declarative_base('relengapi')):
__tablename__ = _tbl_prefix + 'loans'
id = sa.Column(sa.Integer, primary_key=True)
status = sa.Column(sa.String(50), nullable=False)
bug_id = sa.Column(sa.Integer, nullable=True)
human_id = sa.Column(sa.Integer,
sa.ForeignKey(_tbl_prefix + 'humans.id'),
nullable=False)
machine_id = sa.Column(sa.Integer,
sa.ForeignKey(_tbl_prefix + 'machines.id'),
nullable=True)
history = relationship("History", backref="for_loan")
manual_actions = relationship("ManualActions", backref="for_loan")
# Backrefs
# # human (Humans)
# # machine (Machines)
def to_json(self, sub_meth="to_json"):
if self.machine_id:
return dict(id=self.id, status=self.status, bug_id=self.bug_id,
human=getattr(self.human, sub_meth)(),
machine=getattr(self.machine, sub_meth)())
else:
return dict(id=self.id, status=self.status, bug_id=self.bug_id,
human=getattr(self.human, sub_meth)(),
machine=None)
def to_wsme(self):
return rest.Loan(**self.to_json(sub_meth="to_wsme"))
class History(db.declarative_base('relengapi')):
__tablename__ = _tbl_prefix + 'history'
id = sa.Column(sa.Integer, primary_key=True)
loan_id = sa.Column(sa.Integer,
sa.ForeignKey(_tbl_prefix + 'loans.id'),
nullable=False)
timestamp = sa.Column(db.UTCDateTime(timezone=True),
default=tz.utcnow,
nullable=False)
msg = sa.Column(sa.Text, nullable=False)
# Backrefs
# # for_loan (Loans)
def to_json(self):
return dict(id=self.id, loan_id=self.loan_id,
timestamp=self.timestamp,
msg=self.msg)
def to_wsme(self):
return rest.HistoryEntry(**self.to_json())
class ManualActions(db.declarative_base('relengapi')):
__tablename__ = _tbl_prefix + 'manualactions'
id = sa.Column(sa.Integer, primary_key=True)
loan_id = sa.Column(sa.Integer,
sa.ForeignKey(_tbl_prefix + 'loans.id'),
nullable=False)
timestamp_start = sa.Column(db.UTCDateTime(timezone=True),
default=tz.utcnow,
nullable=False)
timestamp_complete = sa.Column(db.UTCDateTime(timezone=True),
default=None,
nullable=True)
complete_by = sa.Column(sa.String(255), nullable=True)
msg = sa.Column(sa.Text, nullable=False)
# Backrefs
# # for_loan (Loan this applies to)
__table_args__ = (Index("loan_id_idx", "loan_id"), )
def to_json(self):
return dict(id=self.id, loan_id=self.loan_id,
timestamp_start=self.timestamp_start,
timestamp_complete=self.timestamp_complete,
complete_by=self.complete_by,
msg=self.msg)
def to_wsme(self):
return rest.ManualAction(**self.to_json())
| mpl-2.0 |
0x90sled/catapult | third_party/gsutil/third_party/boto/tests/unit/manage/test_ssh.py | 114 | 2004 | #!/usr/bin/env python
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
try:
import paramiko
from boto.manage.cmdshell import SSHClient
except ImportError:
paramiko = None
SSHClient = None
from tests.compat import mock, unittest
class TestSSHTimeout(unittest.TestCase):
@unittest.skipIf(not paramiko, 'Paramiko missing')
def test_timeout(self):
client_tmp = paramiko.SSHClient
def client_mock():
client = client_tmp()
client.connect = mock.Mock(name='connect')
return client
paramiko.SSHClient = client_mock
paramiko.RSAKey.from_private_key_file = mock.Mock()
server = mock.Mock()
test = SSHClient(server)
self.assertEqual(test._ssh_client.connect.call_args[1]['timeout'], None)
test2 = SSHClient(server, timeout=30)
self.assertEqual(test2._ssh_client.connect.call_args[1]['timeout'], 30)
| bsd-3-clause |
gpoesia/servo | tests/wpt/web-platform-tests/tools/lint/tests/test_path_lints.py | 93 | 1350 | from __future__ import unicode_literals
from ..lint import check_path
from .base import check_errors
import pytest
import six
def test_allowed_path_length():
basename = 29 * "test/"
for idx in range(5):
filename = basename + idx * "a"
errors = check_path("/foo/", filename, False)
check_errors(errors)
assert errors == []
def test_forbidden_path_length():
basename = 29 * "test/"
for idx in range(5, 10):
filename = basename + idx * "a"
message = "/%s longer than maximum path length (%s > 150)" % (filename, 146 + idx)
errors = check_path("/foo/", filename, False)
check_errors(errors)
assert errors == [("PATH LENGTH", message, filename, None)]
@pytest.mark.parametrize("path_ending,generated", [(".worker.html", ".worker.js"),
(".any.worker.html", ".any.js"),
(".any.html", ".any.js")])
def test_forbidden_path_endings(path_ending, generated):
path = "/test/test" + path_ending
message = ("path ends with %s which collides with generated tests from %s files" %
(path_ending, generated))
errors = check_path("/foo/", path, False)
check_errors(errors)
assert errors == [("WORKER COLLISION", message, path, None)]
| mpl-2.0 |
zhangtuoparis13/Vintageous | ex/plat/windows.py | 9 | 1838 | import subprocess
from subprocess import PIPE
import os
import tempfile
try:
import ctypes
except ImportError:
import plat
if plat.HOST_PLATFORM == plat.WINDOWS:
raise EnvironmentError("ctypes module missing for Windows.")
ctypes = None
def get_startup_info():
# Hide the child process window.
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
return startupinfo
def run_and_wait(view, cmd):
subprocess.Popen(['cmd.exe', '/c', cmd + '&& pause']).wait()
def run_and_read(view, cmd):
out, err = subprocess.Popen(['cmd.exe', '/c', cmd],
stdout=PIPE,
stderr=PIPE,
shell=True,
startupinfo=get_startup_info()).communicate()
try:
return (out or err).decode(get_oem_cp()).replace('\r\n', '\n')
except AttributeError:
return ''
def filter_region(view, txt, command):
try:
contents = tempfile.NamedTemporaryFile(suffix='.txt', delete=False)
contents.write(txt.encode('utf-8'))
contents.close()
script = tempfile.NamedTemporaryFile(suffix='.bat', delete=False)
script.write(('@echo off\ntype %s | %s' % (contents.name, command)).encode('utf-8'))
script.close()
p = subprocess.Popen([script.name],
stdout=PIPE,
stderr=PIPE,
startupinfo=get_startup_info())
our, err = p.communicate()
return (out or err).decode(get_oem_cp()).replace('\r\n', '\n')[:-1].strip()
finally:
os.remove(script.name)
os.remove(contents.name)
def get_oem_cp():
codepage = ctypes.windll.kernel32.GetOEMCP()
return str(codepage)
| mit |
aurelijusb/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/user.py | 313 | 1627 | """Hook to allow user-specified customization code to run.
As a policy, Python doesn't run user-specified code on startup of
Python programs (interactive sessions execute the script specified in
the PYTHONSTARTUP environment variable if it exists).
However, some programs or sites may find it convenient to allow users
to have a standard customization file, which gets run when a program
requests it. This module implements such a mechanism. A program
that wishes to use the mechanism must execute the statement
import user
The user module looks for a file .pythonrc.py in the user's home
directory and if it can be opened, execfile()s it in its own global
namespace. Errors during this phase are not caught; that's up to the
program that imports the user module, if it wishes.
The user's .pythonrc.py could conceivably test for sys.version if it
wishes to do different things depending on the Python version.
"""
from warnings import warnpy3k
warnpy3k("the user module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
import os
home = os.curdir # Default
if 'HOME' in os.environ:
home = os.environ['HOME']
elif os.name == 'posix':
home = os.path.expanduser("~/")
elif os.name == 'nt': # Contributed by Jeff Bauer
if 'HOMEPATH' in os.environ:
if 'HOMEDRIVE' in os.environ:
home = os.environ['HOMEDRIVE'] + os.environ['HOMEPATH']
else:
home = os.environ['HOMEPATH']
pythonrc = os.path.join(home, ".pythonrc.py")
try:
f = open(pythonrc)
except IOError:
pass
else:
f.close()
execfile(pythonrc)
| apache-2.0 |
jamesls/boto | boto/mturk/qualification.py | 20 | 6762 | # Copyright (c) 2008 Chris Moyer http://coredumped.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class Qualifications:
def __init__(self, requirements=None):
if requirements == None:
requirements = []
self.requirements = requirements
def add(self, req):
self.requirements.append(req)
def get_as_params(self):
params = {}
assert(len(self.requirements) <= 10)
for n, req in enumerate(self.requirements):
reqparams = req.get_as_params()
for rp in reqparams:
params['QualificationRequirement.%s.%s' % ((n+1), rp) ] = reqparams[rp]
return params
class Requirement(object):
"""
Representation of a single requirement
"""
def __init__(self, qualification_type_id, comparator, integer_value=None, required_to_preview=False):
self.qualification_type_id = qualification_type_id
self.comparator = comparator
self.integer_value = integer_value
self.required_to_preview = required_to_preview
def get_as_params(self):
params = {
"QualificationTypeId": self.qualification_type_id,
"Comparator": self.comparator,
}
if self.comparator != 'Exists' and self.integer_value is not None:
params['IntegerValue'] = self.integer_value
if self.required_to_preview:
params['RequiredToPreview'] = "true"
return params
class PercentAssignmentsSubmittedRequirement(Requirement):
"""
The percentage of assignments the Worker has submitted, over all assignments the Worker has accepted. The value is an integer between 0 and 100.
"""
def __init__(self, comparator, integer_value, required_to_preview=False):
Requirement.__init__(self, qualification_type_id="00000000000000000000", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview)
class PercentAssignmentsAbandonedRequirement(Requirement):
"""
The percentage of assignments the Worker has abandoned (allowed the deadline to elapse), over all assignments the Worker has accepted. The value is an integer between 0 and 100.
"""
def __init__(self, comparator, integer_value, required_to_preview=False):
Requirement.__init__(self, qualification_type_id="00000000000000000070", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview)
class PercentAssignmentsReturnedRequirement(Requirement):
"""
The percentage of assignments the Worker has returned, over all assignments the Worker has accepted. The value is an integer between 0 and 100.
"""
def __init__(self, comparator, integer_value, required_to_preview=False):
Requirement.__init__(self, qualification_type_id="000000000000000000E0", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview)
class PercentAssignmentsApprovedRequirement(Requirement):
"""
The percentage of assignments the Worker has submitted that were subsequently approved by the Requester, over all assignments the Worker has submitted. The value is an integer between 0 and 100.
"""
def __init__(self, comparator, integer_value, required_to_preview=False):
Requirement.__init__(self, qualification_type_id="000000000000000000L0", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview)
class PercentAssignmentsRejectedRequirement(Requirement):
"""
The percentage of assignments the Worker has submitted that were subsequently rejected by the Requester, over all assignments the Worker has submitted. The value is an integer between 0 and 100.
"""
def __init__(self, comparator, integer_value, required_to_preview=False):
Requirement.__init__(self, qualification_type_id="000000000000000000S0", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview)
class NumberHitsApprovedRequirement(Requirement):
"""
Specifies the total number of HITs submitted by a Worker that have been approved. The value is an integer greater than or equal to 0.
"""
def __init__(self, comparator, integer_value, required_to_preview=False):
Requirement.__init__(self, qualification_type_id="00000000000000000040", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview)
class LocaleRequirement(Requirement):
"""
A Qualification requirement based on the Worker's location. The Worker's location is specified by the Worker to Mechanical Turk when the Worker creates his account.
"""
def __init__(self, comparator, locale, required_to_preview=False):
Requirement.__init__(self, qualification_type_id="00000000000000000071", comparator=comparator, integer_value=None, required_to_preview=required_to_preview)
self.locale = locale
def get_as_params(self):
params = {
"QualificationTypeId": self.qualification_type_id,
"Comparator": self.comparator,
'LocaleValue.Country': self.locale,
}
if self.required_to_preview:
params['RequiredToPreview'] = "true"
return params
class AdultRequirement(Requirement):
"""
Requires workers to acknowledge that they are over 18 and that they agree to work on potentially offensive content. The value type is boolean, 1 (required), 0 (not required, the default).
"""
def __init__(self, comparator, integer_value, required_to_preview=False):
Requirement.__init__(self, qualification_type_id="00000000000000000060", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview)
| mit |
40223249-1/-w16b_test | static/Brython3.1.1-20150328-091302/Lib/tempfile.py | 728 | 22357 | """Temporary files.
This module provides generic, low- and high-level interfaces for
creating temporary files and directories. The interfaces listed
as "safe" just below can be used without fear of race conditions.
Those listed as "unsafe" cannot, and are provided for backward
compatibility only.
This module also provides some data items to the user:
TMP_MAX - maximum number of names that will be tried before
giving up.
tempdir - If this is set to a string before the first use of
any routine from this module, it will be considered as
another candidate location to store temporary files.
"""
__all__ = [
"NamedTemporaryFile", "TemporaryFile", # high level safe interfaces
"SpooledTemporaryFile", "TemporaryDirectory",
"mkstemp", "mkdtemp", # low level safe interfaces
"mktemp", # deprecated unsafe interface
"TMP_MAX", "gettempprefix", # constants
"tempdir", "gettempdir"
]
# Imports.
import warnings as _warnings
import sys as _sys
import io as _io
import os as _os
import errno as _errno
from random import Random as _Random
try:
import fcntl as _fcntl
except ImportError:
def _set_cloexec(fd):
pass
else:
def _set_cloexec(fd):
try:
flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0)
except OSError:
pass
else:
# flags read successfully, modify
flags |= _fcntl.FD_CLOEXEC
_fcntl.fcntl(fd, _fcntl.F_SETFD, flags)
try:
import _thread
except ImportError:
import _dummy_thread as _thread
_allocate_lock = _thread.allocate_lock
_text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL
if hasattr(_os, 'O_NOINHERIT'):
_text_openflags |= _os.O_NOINHERIT
if hasattr(_os, 'O_NOFOLLOW'):
_text_openflags |= _os.O_NOFOLLOW
_bin_openflags = _text_openflags
if hasattr(_os, 'O_BINARY'):
_bin_openflags |= _os.O_BINARY
if hasattr(_os, 'TMP_MAX'):
TMP_MAX = _os.TMP_MAX
else:
TMP_MAX = 10000
# Although it does not have an underscore for historical reasons, this
# variable is an internal implementation detail (see issue 10354).
template = "tmp"
# Internal routines.
_once_lock = _allocate_lock()
if hasattr(_os, "lstat"):
_stat = _os.lstat
elif hasattr(_os, "stat"):
_stat = _os.stat
else:
# Fallback. All we need is something that raises OSError if the
# file doesn't exist.
def _stat(fn):
f = open(fn)
f.close()
def _exists(fn):
try:
_stat(fn)
except OSError:
return False
else:
return True
class _RandomNameSequence:
"""An instance of _RandomNameSequence generates an endless
sequence of unpredictable strings which can safely be incorporated
into file names. Each string is six characters long. Multiple
threads can safely use the same instance at the same time.
_RandomNameSequence is an iterator."""
characters = "abcdefghijklmnopqrstuvwxyz0123456789_"
@property
def rng(self):
cur_pid = _os.getpid()
if cur_pid != getattr(self, '_rng_pid', None):
self._rng = _Random()
self._rng_pid = cur_pid
return self._rng
def __iter__(self):
return self
def __next__(self):
c = self.characters
choose = self.rng.choice
letters = [choose(c) for dummy in "123456"]
return ''.join(letters)
def _candidate_tempdir_list():
"""Generate a list of candidate temporary directories which
_get_default_tempdir will try."""
dirlist = []
# First, try the environment.
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = _os.getenv(envname)
if dirname: dirlist.append(dirname)
# Failing that, try OS-specific locations.
if _os.name == 'nt':
dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ])
else:
dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ])
# As a last resort, the current directory.
try:
dirlist.append(_os.getcwd())
except (AttributeError, OSError):
dirlist.append(_os.curdir)
return dirlist
def _get_default_tempdir():
"""Calculate the default directory to use for temporary files.
This routine should be called exactly once.
We determine whether or not a candidate temp dir is usable by
trying to create and write to a file in that directory. If this
is successful, the test file is deleted. To prevent denial of
service, the name of the test file must be randomized."""
namer = _RandomNameSequence()
dirlist = _candidate_tempdir_list()
for dir in dirlist:
if dir != _os.curdir:
dir = _os.path.normcase(_os.path.abspath(dir))
# Try only a few names per directory.
for seq in range(100):
name = next(namer)
filename = _os.path.join(dir, name)
try:
fd = _os.open(filename, _bin_openflags, 0o600)
try:
try:
with _io.open(fd, 'wb', closefd=False) as fp:
fp.write(b'blat')
finally:
_os.close(fd)
finally:
_os.unlink(filename)
return dir
except FileExistsError:
pass
except OSError:
break # no point trying more names in this directory
raise FileNotFoundError(_errno.ENOENT,
"No usable temporary directory found in %s" %
dirlist)
_name_sequence = None
def _get_candidate_names():
"""Common setup sequence for all user-callable interfaces."""
global _name_sequence
if _name_sequence is None:
_once_lock.acquire()
try:
if _name_sequence is None:
_name_sequence = _RandomNameSequence()
finally:
_once_lock.release()
return _name_sequence
def _mkstemp_inner(dir, pre, suf, flags):
"""Code common to mkstemp, TemporaryFile, and NamedTemporaryFile."""
names = _get_candidate_names()
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, pre + name + suf)
try:
fd = _os.open(file, flags, 0o600)
_set_cloexec(fd)
return (fd, _os.path.abspath(file))
except FileExistsError:
continue # try again
except PermissionError:
# This exception is thrown when a directory with the chosen name
# already exists on windows.
if _os.name == 'nt':
continue
else:
raise
raise FileExistsError(_errno.EEXIST,
"No usable temporary file name found")
# User visible interfaces.
def gettempprefix():
"""Accessor for tempdir.template."""
return template
tempdir = None
def gettempdir():
"""Accessor for tempfile.tempdir."""
global tempdir
if tempdir is None:
_once_lock.acquire()
try:
if tempdir is None:
tempdir = _get_default_tempdir()
finally:
_once_lock.release()
return tempdir
def mkstemp(suffix="", prefix=template, dir=None, text=False):
"""User-callable function to create and return a unique temporary
file. The return value is a pair (fd, name) where fd is the
file descriptor returned by os.open, and name is the filename.
If 'suffix' is specified, the file name will end with that suffix,
otherwise there will be no suffix.
If 'prefix' is specified, the file name will begin with that prefix,
otherwise a default prefix is used.
If 'dir' is specified, the file will be created in that directory,
otherwise a default directory is used.
If 'text' is specified and true, the file is opened in text
mode. Else (the default) the file is opened in binary mode. On
some operating systems, this makes no difference.
The file is readable and writable only by the creating user ID.
If the operating system uses permission bits to indicate whether a
file is executable, the file is executable by no one. The file
descriptor is not inherited by children of this process.
Caller is responsible for deleting the file when done with it.
"""
if dir is None:
dir = gettempdir()
if text:
flags = _text_openflags
else:
flags = _bin_openflags
return _mkstemp_inner(dir, prefix, suffix, flags)
def mkdtemp(suffix="", prefix=template, dir=None):
"""User-callable function to create and return a unique temporary
directory. The return value is the pathname of the directory.
Arguments are as for mkstemp, except that the 'text' argument is
not accepted.
The directory is readable, writable, and searchable only by the
creating user.
Caller is responsible for deleting the directory when done with it.
"""
if dir is None:
dir = gettempdir()
names = _get_candidate_names()
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, prefix + name + suffix)
try:
_os.mkdir(file, 0o700)
return file
except FileExistsError:
continue # try again
raise FileExistsError(_errno.EEXIST,
"No usable temporary directory name found")
def mktemp(suffix="", prefix=template, dir=None):
"""User-callable function to return a unique temporary file name. The
file is not created.
Arguments are as for mkstemp, except that the 'text' argument is
not accepted.
This function is unsafe and should not be used. The file name
refers to a file that did not exist at some point, but by the time
you get around to creating it, someone else may have beaten you to
the punch.
"""
## from warnings import warn as _warn
## _warn("mktemp is a potential security risk to your program",
## RuntimeWarning, stacklevel=2)
if dir is None:
dir = gettempdir()
names = _get_candidate_names()
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, prefix + name + suffix)
if not _exists(file):
return file
raise FileExistsError(_errno.EEXIST,
"No usable temporary filename found")
class _TemporaryFileWrapper:
"""Temporary file wrapper
This class provides a wrapper around files opened for
temporary use. In particular, it seeks to automatically
remove the file when it is no longer needed.
"""
def __init__(self, file, name, delete=True):
self.file = file
self.name = name
self.close_called = False
self.delete = delete
def __getattr__(self, name):
# Attribute lookups are delegated to the underlying file
# and cached for non-numeric results
# (i.e. methods are cached, closed and friends are not)
file = self.__dict__['file']
a = getattr(file, name)
if not isinstance(a, int):
setattr(self, name, a)
return a
# The underlying __enter__ method returns the wrong object
# (self.file) so override it to return the wrapper
def __enter__(self):
self.file.__enter__()
return self
# iter() doesn't use __getattr__ to find the __iter__ method
def __iter__(self):
return iter(self.file)
# NT provides delete-on-close as a primitive, so we don't need
# the wrapper to do anything special. We still use it so that
# file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile.
if _os.name != 'nt':
# Cache the unlinker so we don't get spurious errors at
# shutdown when the module-level "os" is None'd out. Note
# that this must be referenced as self.unlink, because the
# name TemporaryFileWrapper may also get None'd out before
# __del__ is called.
unlink = _os.unlink
def close(self):
if not self.close_called:
self.close_called = True
self.file.close()
if self.delete:
self.unlink(self.name)
def __del__(self):
self.close()
# Need to trap __exit__ as well to ensure the file gets
# deleted when used in a with statement
def __exit__(self, exc, value, tb):
result = self.file.__exit__(exc, value, tb)
self.close()
return result
else:
def __exit__(self, exc, value, tb):
self.file.__exit__(exc, value, tb)
def NamedTemporaryFile(mode='w+b', buffering=-1, encoding=None,
newline=None, suffix="", prefix=template,
dir=None, delete=True):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
'mode' -- the mode argument to io.open (default "w+b").
'buffering' -- the buffer size argument to io.open (default -1).
'encoding' -- the encoding argument to io.open (default None)
'newline' -- the newline argument to io.open (default None)
'delete' -- whether the file is deleted on close (default True).
The file is created as mkstemp() would do it.
Returns an object with a file-like interface; the name of the file
is accessible as file.name. The file will be automatically deleted
when it is closed unless the 'delete' argument is set to False.
"""
if dir is None:
dir = gettempdir()
flags = _bin_openflags
# Setting O_TEMPORARY in the flags causes the OS to delete
# the file when it is closed. This is only supported by Windows.
if _os.name == 'nt' and delete:
flags |= _os.O_TEMPORARY
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
file = _io.open(fd, mode, buffering=buffering,
newline=newline, encoding=encoding)
return _TemporaryFileWrapper(file, name, delete)
if _os.name != 'posix' or _os.sys.platform == 'cygwin':
# On non-POSIX and Cygwin systems, assume that we cannot unlink a file
# while it is open.
TemporaryFile = NamedTemporaryFile
else:
def TemporaryFile(mode='w+b', buffering=-1, encoding=None,
newline=None, suffix="", prefix=template,
dir=None):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
'mode' -- the mode argument to io.open (default "w+b").
'buffering' -- the buffer size argument to io.open (default -1).
'encoding' -- the encoding argument to io.open (default None)
'newline' -- the newline argument to io.open (default None)
The file is created as mkstemp() would do it.
Returns an object with a file-like interface. The file has no
name, and will cease to exist when it is closed.
"""
if dir is None:
dir = gettempdir()
flags = _bin_openflags
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
try:
_os.unlink(name)
return _io.open(fd, mode, buffering=buffering,
newline=newline, encoding=encoding)
except:
_os.close(fd)
raise
class SpooledTemporaryFile:
"""Temporary file wrapper, specialized to switch from BytesIO
or StringIO to a real file when it exceeds a certain size or
when a fileno is needed.
"""
_rolled = False
def __init__(self, max_size=0, mode='w+b', buffering=-1,
encoding=None, newline=None,
suffix="", prefix=template, dir=None):
if 'b' in mode:
self._file = _io.BytesIO()
else:
# Setting newline="\n" avoids newline translation;
# this is important because otherwise on Windows we'd
# hget double newline translation upon rollover().
self._file = _io.StringIO(newline="\n")
self._max_size = max_size
self._rolled = False
self._TemporaryFileArgs = {'mode': mode, 'buffering': buffering,
'suffix': suffix, 'prefix': prefix,
'encoding': encoding, 'newline': newline,
'dir': dir}
def _check(self, file):
if self._rolled: return
max_size = self._max_size
if max_size and file.tell() > max_size:
self.rollover()
def rollover(self):
if self._rolled: return
file = self._file
newfile = self._file = TemporaryFile(**self._TemporaryFileArgs)
del self._TemporaryFileArgs
newfile.write(file.getvalue())
newfile.seek(file.tell(), 0)
self._rolled = True
# The method caching trick from NamedTemporaryFile
# won't work here, because _file may change from a
# BytesIO/StringIO instance to a real file. So we list
# all the methods directly.
# Context management protocol
def __enter__(self):
if self._file.closed:
raise ValueError("Cannot enter context with closed file")
return self
def __exit__(self, exc, value, tb):
self._file.close()
# file protocol
def __iter__(self):
return self._file.__iter__()
def close(self):
self._file.close()
@property
def closed(self):
return self._file.closed
@property
def encoding(self):
try:
return self._file.encoding
except AttributeError:
if 'b' in self._TemporaryFileArgs['mode']:
raise
return self._TemporaryFileArgs['encoding']
def fileno(self):
self.rollover()
return self._file.fileno()
def flush(self):
self._file.flush()
def isatty(self):
return self._file.isatty()
@property
def mode(self):
try:
return self._file.mode
except AttributeError:
return self._TemporaryFileArgs['mode']
@property
def name(self):
try:
return self._file.name
except AttributeError:
return None
@property
def newlines(self):
try:
return self._file.newlines
except AttributeError:
if 'b' in self._TemporaryFileArgs['mode']:
raise
return self._TemporaryFileArgs['newline']
def read(self, *args):
return self._file.read(*args)
def readline(self, *args):
return self._file.readline(*args)
def readlines(self, *args):
return self._file.readlines(*args)
def seek(self, *args):
self._file.seek(*args)
@property
def softspace(self):
return self._file.softspace
def tell(self):
return self._file.tell()
def truncate(self, size=None):
if size is None:
self._file.truncate()
else:
if size > self._max_size:
self.rollover()
self._file.truncate(size)
def write(self, s):
file = self._file
rv = file.write(s)
self._check(file)
return rv
def writelines(self, iterable):
file = self._file
rv = file.writelines(iterable)
self._check(file)
return rv
class TemporaryDirectory(object):
"""Create and return a temporary directory. This has the same
behavior as mkdtemp but can be used as a context manager. For
example:
with TemporaryDirectory() as tmpdir:
...
Upon exiting the context, the directory and everything contained
in it are removed.
"""
def __init__(self, suffix="", prefix=template, dir=None):
self._closed = False
self.name = None # Handle mkdtemp raising an exception
self.name = mkdtemp(suffix, prefix, dir)
def __repr__(self):
return "<{} {!r}>".format(self.__class__.__name__, self.name)
def __enter__(self):
return self.name
def cleanup(self, _warn=False):
if self.name and not self._closed:
try:
self._rmtree(self.name)
except (TypeError, AttributeError) as ex:
# Issue #10188: Emit a warning on stderr
# if the directory could not be cleaned
# up due to missing globals
if "None" not in str(ex):
raise
print("ERROR: {!r} while cleaning up {!r}".format(ex, self,),
file=_sys.stderr)
return
self._closed = True
if _warn:
self._warn("Implicitly cleaning up {!r}".format(self),
ResourceWarning)
def __exit__(self, exc, value, tb):
self.cleanup()
def __del__(self):
# Issue a ResourceWarning if implicit cleanup needed
self.cleanup(_warn=True)
# XXX (ncoghlan): The following code attempts to make
# this class tolerant of the module nulling out process
# that happens during CPython interpreter shutdown
# Alas, it doesn't actually manage it. See issue #10188
_listdir = staticmethod(_os.listdir)
_path_join = staticmethod(_os.path.join)
_isdir = staticmethod(_os.path.isdir)
_islink = staticmethod(_os.path.islink)
_remove = staticmethod(_os.remove)
_rmdir = staticmethod(_os.rmdir)
_os_error = OSError
_warn = _warnings.warn
def _rmtree(self, path):
# Essentially a stripped down version of shutil.rmtree. We can't
# use globals because they may be None'ed out at shutdown.
for name in self._listdir(path):
fullname = self._path_join(path, name)
try:
isdir = self._isdir(fullname) and not self._islink(fullname)
except self._os_error:
isdir = False
if isdir:
self._rmtree(fullname)
else:
try:
self._remove(fullname)
except self._os_error:
pass
try:
self._rmdir(path)
except self._os_error:
pass
| agpl-3.0 |
red-hood/calendarserver | twistedcaldav/directory/calendaruserproxyloader.py | 1 | 5460 | ##
# Copyright (c) 2009-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
XML based calendar user proxy loader.
"""
__all__ = [
"XMLCalendarUserProxyLoader",
]
import types
from twisted.internet.defer import inlineCallbacks
from twext.python.log import Logger
from twistedcaldav.config import config, fullServerPath
from twistedcaldav.xmlutil import readXML
from txdav.who.delegates import Delegates
log = Logger()
ELEMENT_PROXIES = "proxies"
ELEMENT_RECORD = "record"
ELEMENT_GUID = "guid"
ELEMENT_WRITE_PROXIES = "write-proxies"
ELEMENT_READ_PROXIES = "read-proxies"
ELEMENT_MEMBER = "member"
ATTRIBUTE_REPEAT = "repeat"
class XMLCalendarUserProxyLoader(object):
"""
XML calendar user proxy configuration file parser and loader.
"""
def __repr__(self):
return "<%s %r>" % (self.__class__.__name__, self.xmlFile)
def __init__(self, xmlFile):
self.items = []
self.xmlFile = fullServerPath(config.DataRoot, xmlFile)
# Read in XML
try:
_ignore_tree, proxies_node = readXML(self.xmlFile, ELEMENT_PROXIES)
except ValueError:
log.failure("XML parse error for proxy data file {xmlfile}", xmlfile=self.xmlFile)
# FIXME: RuntimeError is dumb.
self._parseXML(proxies_node)
def _parseXML(self, rootnode):
"""
Parse the XML root node from the augments configuration document.
@param rootnode: the L{Element} to parse.
"""
for child in rootnode:
if child.tag != ELEMENT_RECORD:
raise RuntimeError("Unknown augment type: '%s' in augment file: '%s'" % (child.tag, self.xmlFile,))
repeat = int(child.get(ATTRIBUTE_REPEAT, "1"))
guid = None
write_proxies = set()
read_proxies = set()
for node in child:
if node.tag == ELEMENT_GUID:
guid = node.text
elif node.tag in (
ELEMENT_WRITE_PROXIES,
ELEMENT_READ_PROXIES,
):
self._parseMembers(node, write_proxies if node.tag == ELEMENT_WRITE_PROXIES else read_proxies)
else:
raise RuntimeError("Invalid element '%s' in proxies file: '%s'" % (node.tag, self.xmlFile,))
# Must have at least a guid
if not guid:
raise RuntimeError("Invalid record '%s' without a guid in proxies file: '%s'" % (child, self.xmlFile,))
if repeat > 1:
for i in xrange(1, repeat + 1):
self._buildRecord(guid, write_proxies, read_proxies, i)
else:
self._buildRecord(guid, write_proxies, read_proxies)
def _parseMembers(self, node, addto):
for child in node:
if child.tag == ELEMENT_MEMBER:
addto.add(child.text)
def _buildRecord(self, guid, write_proxies, read_proxies, count=None):
def expandCount(value, count):
if type(value) in types.StringTypes:
return value % (count,) if count and "%" in value else value
else:
return value
guid = expandCount(guid, count)
write_proxies = set([expandCount(member, count) for member in write_proxies])
read_proxies = set([expandCount(member, count) for member in read_proxies])
self.items.append((guid, write_proxies, read_proxies,))
@inlineCallbacks
def updateProxyDB(self, db):
"""
Move the XML file proxies over to the proxyDB.
"""
for item in self.items:
guid, write_proxies, read_proxies = item
yield db.setGroupMembers("%s#%s" % (guid, "calendar-proxy-write"), write_proxies)
yield db.setGroupMembers("%s#%s" % (guid, "calendar-proxy-read"), read_proxies)
@inlineCallbacks
def updateProxyStore(self, store):
"""
Move the XML file proxies over to the store database.
"""
directory = store.directoryService()
txn = store.newTransaction(label="xmlDelegatesToStore")
for item in self.items:
guid, write_proxies, read_proxies = item
delegatorRecord = yield directory.recordWithUID(guid)
if delegatorRecord is None:
continue
for proxy in write_proxies:
delegateRecord = yield directory.recordWithUID(proxy)
if delegateRecord is None:
continue
yield Delegates.addDelegate(txn, delegatorRecord, delegateRecord, True)
for proxy in read_proxies:
delegateRecord = yield directory.recordWithUID(proxy)
if delegateRecord is None:
continue
yield Delegates.addDelegate(txn, delegatorRecord, delegateRecord, False)
yield txn.commit()
| apache-2.0 |
rsnakamura/oldape | apetools/lexicographers/parametertree.py | 1 | 3506 |
# python standard library
from collections import namedtuple
Parameters = namedtuple("Parameters", "name parameters".split())
class TreeNode(object):
"""
A Class to represent a node in a tree with arbitrary number of children
"""
def __init__(self, cargo, children=None):
"""
:param:
- `cargo`: the data for this node
- `children`: a list of TreeNodes
"""
self.cargo = cargo
self.children = children
return
def __str__(self):
return str(self.cargo)
# end class TreeNode
class ParameterTree(object):
"""
A class to build a tree from iterative parameters
The main product is the `paths` attribute which can be iterated over to get the parameters for a test.
"""
def __init__(self, parameters):
"""
:param:
- `parameters`: list of parameter objects with a `name` property
"""
self.parameters = parameters
self._tree = None
self._paths = None
return
@property
def tree(self):
"""
builds the tree bottoms-up from the parameters
:return: list of trees (highest nodes are parameters[0], leaves are parameters[-1])
"""
if self._tree is None:
# parameters is a list of Parameter(name,parameters) namedtuples
parameters = self.parameters[:]
leaves = parameters.pop()
parameters.reverse()
tree = [TreeNode(Parameters(leaves.name, leaf)) for leaf in leaves.parameters]
for level in parameters:
# level is a Parameters(name, parameters) namedtuple
new_tree = [TreeNode(Parameters(name=level.name, parameters=sibling), tree) for sibling in level.parameters]
tree = new_tree
self._tree = tree
return self._tree
@property
def paths(self):
"""
:return: list of namedtuples
"""
if self._paths is None:
# create the paths with a series of tree-traversals
self._paths = []
for limb in self.tree:
path = {}
self._traverse(limb, path, self._paths)
# convert self._paths from dicts to namedtuples
paths = []
for path in self._paths:
Paths = namedtuple("Paths", ["total_count"] + path.keys())
fields = dict([(f,path[f]) for f in Paths._fields if f != "total_count"])
fields["total_count"] = len(self._paths)
paths.append(Paths(**fields))
self._paths = paths
return self._paths
def _traverse(self, tree, path, paths):
"""
A depth-first traversal
:param:
- `tree`: A Tree object to traverse
- `path`: a name:parameter dict to contain a particular path
- `paths`: The list to store all the paths
:postcondition:
- path holds tree's cargo
- if tree is a leaf, return path
- if tree not leaf traverse children and traverse each child
- append path returned by leaf to self._paths
"""
path[tree.cargo.name] = tree.cargo
if tree.children is None:
paths.append(path)
return
for child in tree.children:
new_path = path.copy()
self._traverse(child, new_path, paths)
return
# end class Parameter_Tree
| apache-2.0 |
Bl4ckb0ne/ring-api | ring_api/server/flask/api/crypto.py | 1 | 1832 | #
# Copyright (C) 2016 Savoir-faire Linux Inc
#
# Authors: Simon Zeni <simon.zeni@savoirfairelinux.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from flask import jsonify, request
from flask_restful import Resource
class Tls(Resource):
def __init__(self, dring):
self.dring = dring
def get(self):
data = request.args
if (not data):
return jsonify({
'status': 404,
'message': 'data not found'
})
elif ('type' not in data):
return jsonify({
'status': 404,
'message': 'type not found in data'
})
tls_type = data.get('type')
if (tls_type == 'settings'):
return jsonify({
'status': 200,
'settings': self.dring.config.get_tls_default_settings()
})
elif (tls_type == 'method'):
return jsonify({
'status': 200,
'methods': self.dring.config.get_supported_tls_method()
})
return jsonify({
'status': 400,
'message': 'wrong tls type'
})
| gpl-3.0 |
POFK/utilTool | vim/after/pydiction.py | 17 | 9252 | #!/usr/bin/env python
# Last modified: July 23rd, 2009
"""
pydiction.py 1.2 by Ryan Kulla (rkulla AT gmail DOT com).
Description: Creates a Vim dictionary of Python module attributes for Vim's
completion feature. The created dictionary file is used by
the Vim ftplugin "python_pydiction.vim".
Usage: pydiction.py <module> ... [-v]
Example: The following will append all the "time" and "math" modules'
attributes to a file, in the current directory, called "pydiction"
with and without the "time." and "math." prefix:
$ python pydiction.py time math
To print the output just to stdout, instead of appending to the file,
supply the -v option:
$ python pydiction.py -v time math
License: BSD.
"""
__author__ = "Ryan Kulla (rkulla AT gmail DOT com)"
__version__ = "1.2"
__copyright__ = "Copyright (c) 2003-2009 Ryan Kulla"
import os
import sys
import types
import shutil
# Path/filename of the vim dictionary file to write to:
PYDICTION_DICT = r'complete-dict'
# Path/filename of the vim dictionary backup file:
PYDICTION_DICT_BACKUP = r'complete-dict.last'
# Sentintal to test if we should only output to stdout:
STDOUT_ONLY = False
def get_submodules(module_name, submodules):
"""Build a list of all the submodules of modules."""
# Try to import a given module, so we can dir() it:
try:
imported_module = my_import(module_name)
except ImportError, err:
return submodules
mod_attrs = dir(imported_module)
for mod_attr in mod_attrs:
if type(getattr(imported_module, mod_attr)) is types.ModuleType:
submodules.append(module_name + '.' + mod_attr)
return submodules
def write_dictionary(module_name):
"""Write to module attributes to the vim dictionary file."""
prefix_on = '%s.%s'
prefix_on_callable = '%s.%s('
prefix_off = '%s'
prefix_off_callable = '%s('
try:
imported_module = my_import(module_name)
except ImportError, err:
return
mod_attrs = dir(imported_module)
# Generate fully-qualified module names:
write_to.write('\n--- import %s ---\n' % module_name)
for mod_attr in mod_attrs:
if callable(getattr(imported_module, mod_attr)):
# If an attribute is callable, show an opening parentheses:
format = prefix_on_callable
else:
format = prefix_on
write_to.write(format % (module_name, mod_attr) + '\n')
# Generate submodule names by themselves, for when someone does
# "from foo import bar" and wants to complete bar.baz.
# This works the same no matter how many .'s are in the module.
if module_name.count('.'):
# Get the "from" part of the module. E.g., 'xml.parsers'
# if the module name was 'xml.parsers.expat':
first_part = module_name[:module_name.rfind('.')]
# Get the "import" part of the module. E.g., 'expat'
# if the module name was 'xml.parsers.expat'
second_part = module_name[module_name.rfind('.') + 1:]
write_to.write('\n--- from %s import %s ---\n' %
(first_part, second_part))
for mod_attr in mod_attrs:
if callable(getattr(imported_module, mod_attr)):
format = prefix_on_callable
else:
format = prefix_on
write_to.write(format % (second_part, mod_attr) + '\n')
# Generate non-fully-qualified module names:
write_to.write('\n--- from %s import * ---\n' % module_name)
for mod_attr in mod_attrs:
if callable(getattr(imported_module, mod_attr)):
format = prefix_off_callable
else:
format = prefix_off
write_to.write(format % mod_attr + '\n')
def my_import(name):
"""Make __import__ import "package.module" formatted names."""
mod = __import__(name)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def remove_duplicates(seq, keep=()):
"""
Remove duplicates from a sequence while perserving order.
The optional tuple argument "keep" can be given to specificy
each string you don't want to be removed as a duplicate.
"""
seq2 = []
seen = set();
for i in seq:
if i in (keep):
seq2.append(i)
continue
elif i not in seen:
seq2.append(i)
seen.add(i)
return seq2
def get_yesno(msg="[Y/n]?"):
"""
Returns True if user inputs 'n', 'Y', "yes", "Yes"...
Returns False if user inputs 'n', 'N', "no", "No"...
If they enter an invalid option it tells them so and asks again.
Hitting Enter is equivalent to answering Yes.
Takes an optional message to display, defaults to "[Y/n]?".
"""
while True:
answer = raw_input(msg)
if answer == '':
return True
elif len(answer):
answer = answer.lower()[0]
if answer == 'y':
return True
break
elif answer == 'n':
return False
break
else:
print "Invalid option. Please try again."
continue
def main(write_to):
"""Generate a dictionary for Vim of python module attributes."""
submodules = []
for module_name in sys.argv[1:]:
try:
imported_module = my_import(module_name)
except ImportError, err:
print "Couldn't import: %s. %s" % (module_name, err)
sys.argv.remove(module_name)
cli_modules = sys.argv[1:]
# Step through each command line argument:
for module_name in cli_modules:
print "Trying module: %s" % module_name
submodules = get_submodules(module_name, submodules)
# Step through the current module's submodules:
for submodule_name in submodules:
submodules = get_submodules(submodule_name, submodules)
# Add the top-level modules to the list too:
for module_name in cli_modules:
submodules.append(module_name)
submodules.sort()
# Step through all of the modules and submodules to create the dict file:
for submodule_name in submodules:
write_dictionary(submodule_name)
if STDOUT_ONLY:
return
# Close and Reopen the file for reading and remove all duplicate lines:
write_to.close()
print "Removing duplicates..."
f = open(PYDICTION_DICT, 'r')
file_lines = f.readlines()
file_lines = remove_duplicates(file_lines, ('\n'))
f.close()
# Delete the original file:
os.unlink(PYDICTION_DICT)
# Recreate the file, this time it won't have any duplicates lines:
f = open(PYDICTION_DICT, 'w')
for attr in file_lines:
f.write(attr)
f.close()
print "Done."
if __name__ == '__main__':
"""Process the command line."""
if sys.version_info[0:2] < (2, 3):
sys.exit("You need a Python 2.x version of at least Python 2.3")
if len(sys.argv) <= 1:
sys.exit("%s requires at least one argument. None given." %
sys.argv[0])
if '-v' in sys.argv:
write_to = sys.stdout
sys.argv.remove('-v')
STDOUT_ONLY = True
elif os.path.exists(PYDICTION_DICT):
# See if any of the given modules have already been pydiction'd:
f = open(PYDICTION_DICT, 'r')
file_lines = f.readlines()
for module_name in sys.argv[1:]:
for line in file_lines:
if line.find('--- import %s ' % module_name) != -1:
print '"%s" already exists in %s. Skipping...' % \
(module_name, PYDICTION_DICT)
sys.argv.remove(module_name)
break
f.close()
if len(sys.argv) < 2:
# Check if there's still enough command-line arguments:
sys.exit("Nothing new to do. Aborting.")
if os.path.exists(PYDICTION_DICT_BACKUP):
answer = get_yesno('Overwrite existing backup "%s" [Y/n]? ' % \
PYDICTION_DICT_BACKUP)
if (answer):
print "Backing up old dictionary to: %s" % \
PYDICTION_DICT_BACKUP
try:
shutil.copyfile(PYDICTION_DICT, PYDICTION_DICT_BACKUP)
except IOError, err:
print "Couldn't back up %s. %s" % (PYDICTION_DICT, err)
else:
print "Skipping backup..."
print 'Appending to: "%s"' % PYDICTION_DICT
else:
print "Backing up current %s to %s" % \
(PYDICTION_DICT, PYDICTION_DICT_BACKUP)
try:
shutil.copyfile(PYDICTION_DICT, PYDICTION_DICT_BACKUP)
except IOError, err:
print "Couldn't back up %s. %s" % (PYDICTION_DICT, err)
else:
print 'Creating file: "%s"' % PYDICTION_DICT
if not STDOUT_ONLY:
write_to = open(PYDICTION_DICT, 'a')
main(write_to)
| mit |
MounirMesselmeni/django-highlightjs | highlightjs/settings.py | 1 | 1267 | from django.conf import settings
HIGHLIGHTJS_DEFAULTS = {
"jquery_url": "//code.jquery.com/jquery.min.js",
"base_url": "//cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/highlight.min.js",
"css_url": "//cdnjs.cloudflare.com/ajax/libs/highlight.js/8.3/styles/{0}.min.css",
"include_jquery": False,
"style": "monokai_sublime",
}
# Start with a copy of default settings
HIGHLIGHTJS = HIGHLIGHTJS_DEFAULTS.copy()
# Override with user settings from settings.py
def update_settings():
"""
Useful method in test to update settings when using @override_settings
"""
HIGHLIGHTJS.update(getattr(settings, "HIGHLIGHTJS", {}))
update_settings()
def get_highlightjs_setting(setting, default=None):
"""
Read a setting
"""
return HIGHLIGHTJS.get(setting, default)
def highlightjs_url():
"""
Prefix a relative url with the bootstrap base url
"""
return get_highlightjs_setting("base_url")
def highlightjs_jquery_url():
"""
Return the full url to jQuery file to use
"""
return get_highlightjs_setting("jquery_url")
def css_url():
"""
Return the full url to the highlightjs CSS file
"""
return get_highlightjs_setting("css_url").format(get_highlightjs_setting("style"))
| mit |
shakamunyi/sahara | sahara/tests/integration/tests/gating/test_vanilla_two_gating.py | 10 | 14237 | # Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from testtools import testcase
from sahara.tests.integration.configs import config as cfg
from sahara.tests.integration.tests import base as b
from sahara.tests.integration.tests import cinder
from sahara.tests.integration.tests import cluster_configs
from sahara.tests.integration.tests import edp
from sahara.tests.integration.tests import map_reduce
from sahara.tests.integration.tests import scaling
from sahara.tests.integration.tests import swift
from sahara.utils import edp as utils_edp
class VanillaTwoGatingTest(cluster_configs.ClusterConfigTest,
map_reduce.MapReduceTest, swift.SwiftTest,
scaling.ScalingTest, cinder.CinderVolumeTest,
edp.EDPTest):
vanilla_two_config = cfg.ITConfig().vanilla_two_config
SKIP_MAP_REDUCE_TEST = vanilla_two_config.SKIP_MAP_REDUCE_TEST
SKIP_SWIFT_TEST = vanilla_two_config.SKIP_SWIFT_TEST
SKIP_SCALING_TEST = vanilla_two_config.SKIP_SCALING_TEST
SKIP_CINDER_TEST = vanilla_two_config.SKIP_CINDER_TEST
SKIP_EDP_TEST = vanilla_two_config.SKIP_EDP_TEST
def setUp(self):
super(VanillaTwoGatingTest, self).setUp()
self.cluster_id = None
self.cluster_template_id = None
def get_plugin_config(self):
return cfg.ITConfig().vanilla_two_config
ng_params = {
'MapReduce': {
'yarn.app.mapreduce.am.resource.mb': 256,
'yarn.app.mapreduce.am.command-opts': '-Xmx256m'
},
'YARN': {
'yarn.scheduler.minimum-allocation-mb': 256,
'yarn.scheduler.maximum-allocation-mb': 1024,
'yarn.nodemanager.vmem-check-enabled': False
}
}
@b.errormsg("Failure while 'nm-dn' node group template creation: ")
def _create_nm_dn_ng_template(self):
template = {
'name': 'test-node-group-template-vanilla-nm-dn',
'plugin_config': self.plugin_config,
'description': 'test node group template for Vanilla plugin',
'node_processes': ['nodemanager', 'datanode'],
'floating_ip_pool': self.floating_ip_pool,
'auto_security_group': True,
'node_configs': self.ng_params
}
self.ng_tmpl_nm_dn_id = self.create_node_group_template(**template)
self.addCleanup(self.delete_node_group_template, self.ng_tmpl_nm_dn_id)
@b.errormsg("Failure while 'nm' node group template creation: ")
def _create_nm_ng_template(self):
template = {
'name': 'test-node-group-template-vanilla-nm',
'plugin_config': self.plugin_config,
'description': 'test node group template for Vanilla plugin',
'volumes_per_node': self.volumes_per_node,
'volumes_size': self.volumes_size,
'node_processes': ['nodemanager'],
'floating_ip_pool': self.floating_ip_pool,
'auto_security_group': True,
'node_configs': self.ng_params
}
self.ng_tmpl_nm_id = self.create_node_group_template(**template)
self.addCleanup(self.delete_node_group_template, self.ng_tmpl_nm_id)
@b.errormsg("Failure while 'dn' node group template creation: ")
def _create_dn_ng_template(self):
template = {
'name': 'test-node-group-template-vanilla-dn',
'plugin_config': self.plugin_config,
'description': 'test node group template for Vanilla plugin',
'volumes_per_node': self.volumes_per_node,
'volumes_size': self.volumes_size,
'node_processes': ['datanode'],
'floating_ip_pool': self.floating_ip_pool,
'auto_security_group': True,
'node_configs': self.ng_params
}
self.ng_tmpl_dn_id = self.create_node_group_template(**template)
self.addCleanup(self.delete_node_group_template, self.ng_tmpl_dn_id)
@b.errormsg("Failure while cluster template creation: ")
def _create_cluster_template(self):
template = {
'name': 'test-cluster-template-vanilla',
'plugin_config': self.plugin_config,
'description': 'test cluster template for Vanilla plugin',
'cluster_configs': {
'HDFS': {
'dfs.replication': 1
}
},
'node_groups': [
{
'name': 'master-node-rm-nn',
'flavor_id': self.flavor_id,
'node_processes': ['namenode', 'resourcemanager',
'hiveserver'],
'floating_ip_pool': self.floating_ip_pool,
'auto_security_group': True,
'count': 1,
'node_configs': self.ng_params
},
{
'name': 'master-node-oo-hs',
'flavor_id': self.flavor_id,
'node_processes': ['oozie', 'historyserver',
'secondarynamenode'],
'floating_ip_pool': self.floating_ip_pool,
'auto_security_group': True,
'count': 1,
'node_configs': self.ng_params
},
{
'name': 'worker-node-nm-dn',
'node_group_template_id': self.ng_tmpl_nm_dn_id,
'count': 2
},
{
'name': 'worker-node-dn',
'node_group_template_id': self.ng_tmpl_dn_id,
'count': 1
},
{
'name': 'worker-node-nm',
'node_group_template_id': self.ng_tmpl_nm_id,
'count': 1
}
],
'net_id': self.internal_neutron_net
}
self.cluster_template_id = self.create_cluster_template(**template)
self.addCleanup(self.delete_cluster_template, self.cluster_template_id)
@b.errormsg("Failure while cluster creation: ")
def _create_cluster(self):
cluster_name = '%s-%s-v2' % (self.common_config.CLUSTER_NAME,
self.plugin_config.PLUGIN_NAME)
cluster = {
'name': cluster_name,
'plugin_config': self.plugin_config,
'cluster_template_id': self.cluster_template_id,
'description': 'test cluster',
'cluster_configs': {}
}
cluster_id = self.create_cluster(**cluster)
self.addCleanup(self.delete_cluster, cluster_id)
self.poll_cluster_state(cluster_id)
self.cluster_info = self.get_cluster_info(self.plugin_config)
self.await_active_workers_for_namenode(self.cluster_info['node_info'],
self.plugin_config)
@b.errormsg("Failure while Cinder testing: ")
def _check_cinder(self):
self.cinder_volume_testing(self.cluster_info)
@b.errormsg("Failure while Map Reduce testing: ")
def _check_mapreduce(self):
self.map_reduce_testing(self.cluster_info)
@b.errormsg("Failure during check of Swift availability: ")
def _check_swift(self):
self.check_swift_availability(self.cluster_info)
@b.errormsg("Failure while EDP testing: ")
def _check_edp(self):
self.poll_jobs_status(list(self._run_edp_tests()))
def _run_edp_tests(self):
skipped_edp_job_types = self.plugin_config.SKIP_EDP_JOB_TYPES
if utils_edp.JOB_TYPE_PIG not in skipped_edp_job_types:
yield self._edp_pig_test()
if utils_edp.JOB_TYPE_MAPREDUCE not in skipped_edp_job_types:
yield self._edp_mapreduce_test()
if utils_edp.JOB_TYPE_MAPREDUCE_STREAMING not in skipped_edp_job_types:
yield self._edp_mapreduce_streaming_test()
if utils_edp.JOB_TYPE_JAVA not in skipped_edp_job_types:
yield self._edp_java_test()
if utils_edp.JOB_TYPE_HIVE not in skipped_edp_job_types:
yield self._check_edp_hive()
if utils_edp.JOB_TYPE_SHELL not in skipped_edp_job_types:
yield self._edp_shell_test()
# TODO(esikachev): Until fix bug 1413602
def _run_edp_tests_after_scaling(self):
skipped_edp_job_types = self.plugin_config.SKIP_EDP_JOB_TYPES
if utils_edp.JOB_TYPE_PIG not in skipped_edp_job_types:
yield self._edp_pig_test()
if utils_edp.JOB_TYPE_MAPREDUCE not in skipped_edp_job_types:
yield self._edp_mapreduce_test()
if utils_edp.JOB_TYPE_MAPREDUCE_STREAMING not in skipped_edp_job_types:
yield self._edp_mapreduce_streaming_test()
if utils_edp.JOB_TYPE_JAVA not in skipped_edp_job_types:
yield self._edp_java_test()
if utils_edp.JOB_TYPE_SHELL not in skipped_edp_job_types:
yield self._edp_shell_test()
def _edp_pig_test(self):
pig_job = self.edp_info.read_pig_example_script()
pig_lib = self.edp_info.read_pig_example_jar()
return self.edp_testing(
job_type=utils_edp.JOB_TYPE_PIG,
job_data_list=[{'pig': pig_job}],
lib_data_list=[{'jar': pig_lib}],
swift_binaries=True,
hdfs_local_output=True)
def _edp_mapreduce_test(self):
mapreduce_jar = self.edp_info.read_mapreduce_example_jar()
mapreduce_configs = self.edp_info.mapreduce_example_configs()
return self.edp_testing(
job_type=utils_edp.JOB_TYPE_MAPREDUCE,
job_data_list=[],
lib_data_list=[{'jar': mapreduce_jar}],
configs=mapreduce_configs,
swift_binaries=True,
hdfs_local_output=True)
def _edp_mapreduce_streaming_test(self):
return self.edp_testing(
job_type=utils_edp.JOB_TYPE_MAPREDUCE_STREAMING,
job_data_list=[],
lib_data_list=[],
configs=self.edp_info.mapreduce_streaming_configs())
def _edp_java_test(self):
java_jar = self.edp_info.read_java_example_lib(2)
java_configs = self.edp_info.java_example_configs(2)
return self.edp_testing(
utils_edp.JOB_TYPE_JAVA,
job_data_list=[],
lib_data_list=[{'jar': java_jar}],
configs=java_configs)
def _edp_shell_test(self):
shell_script_data = self.edp_info.read_shell_example_script()
shell_file_data = self.edp_info.read_shell_example_text_file()
return self.edp_testing(
job_type=utils_edp.JOB_TYPE_SHELL,
job_data_list=[{'script': shell_script_data}],
lib_data_list=[{'text': shell_file_data}],
configs=self.edp_info.shell_example_configs())
def _check_edp_hive(self):
return self.check_edp_hive()
@b.errormsg("Failure while cluster scaling: ")
def _check_scaling(self):
change_list = [
{
'operation': 'resize',
'info': ['worker-node-nm-dn', 1]
},
{
'operation': 'resize',
'info': ['worker-node-dn', 0]
},
{
'operation': 'resize',
'info': ['worker-node-nm', 0]
},
{
'operation': 'add',
'info': [
'new-worker-node-nm', 1, '%s' % self.ng_tmpl_nm_id
]
},
{
'operation': 'add',
'info': [
'new-worker-node-dn', 1, '%s' % self.ng_tmpl_dn_id
]
}
]
self.cluster_info = self.cluster_scaling(self.cluster_info,
change_list)
self.await_active_workers_for_namenode(self.cluster_info['node_info'],
self.plugin_config)
@b.errormsg("Failure while Cinder testing after cluster scaling: ")
def _check_cinder_after_scaling(self):
self.cinder_volume_testing(self.cluster_info)
@b.errormsg("Failure while Map Reduce testing after cluster scaling: ")
def _check_mapreduce_after_scaling(self):
self.map_reduce_testing(self.cluster_info)
@b.errormsg(
"Failure during check of Swift availability after cluster scaling: ")
def _check_swift_after_scaling(self):
self.check_swift_availability(self.cluster_info)
@b.errormsg("Failure while EDP testing after cluster scaling: ")
def _check_edp_after_scaling(self):
self.poll_jobs_status(list(self._run_edp_tests_after_scaling()))
@testcase.skipIf(
cfg.ITConfig().vanilla_two_config.SKIP_ALL_TESTS_FOR_PLUGIN,
"All tests for Vanilla plugin were skipped")
@testcase.attr('vanilla2')
def test_vanilla_two_plugin_gating(self):
self._create_nm_dn_ng_template()
self._create_nm_ng_template()
self._create_dn_ng_template()
self._create_cluster_template()
self._create_cluster()
self._test_event_log(self.cluster_id)
self._check_cinder()
self._check_mapreduce()
self._check_swift()
self._check_edp()
if not self.plugin_config.SKIP_SCALING_TEST:
self._check_scaling()
self._test_event_log(self.cluster_id)
self._check_cinder_after_scaling()
self._check_mapreduce_after_scaling()
self._check_swift_after_scaling()
self._check_edp_after_scaling()
def tearDown(self):
super(VanillaTwoGatingTest, self).tearDown()
| apache-2.0 |
bdfoster/blumate | blumate/components/sensor/steam_online.py | 1 | 2169 | """
Sensor for Steam account status.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.steam_online/
"""
from blumate.helpers.entity import Entity
from blumate.const import CONF_API_KEY
ICON = 'mdi:steam'
REQUIREMENTS = ['steamodd==4.21']
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Steam platform."""
import steam as steamod
steamod.api.key.set(config.get(CONF_API_KEY))
add_devices(
[SteamSensor(account,
steamod) for account in config.get('accounts', [])])
class SteamSensor(Entity):
"""A class for the Steam account."""
# pylint: disable=abstract-method
def __init__(self, account, steamod):
"""Initialize the sensor."""
self._steamod = steamod
self._account = account
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._profile.persona
@property
def entity_id(self):
"""Return the entity ID."""
return 'sensor.steam_{}'.format(self._account)
@property
def state(self):
"""Return the state of the sensor."""
return self._state
# pylint: disable=no-member
def update(self):
"""Update device state."""
self._profile = self._steamod.user.profile(self._account)
if self._profile.current_game[2] is None:
self._game = 'None'
else:
self._game = self._profile.current_game[2]
self._state = {
1: 'Online',
2: 'Busy',
3: 'Away',
4: 'Snooze',
5: 'Trade',
6: 'Play',
}.get(self._profile.status, 'Offline')
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {'Game': self._game}
@property
def entity_picture(self):
"""Avatar of the account."""
return self._profile.avatar_medium
@property
def icon(self):
"""Return the icon to use in the frontend."""
return ICON
| mit |
rsvip/Django | tests/aggregation/tests.py | 12 | 44227 | from __future__ import unicode_literals
import datetime
import re
from decimal import Decimal
from django.core.exceptions import FieldError
from django.db import connection
from django.db.models import (
F, Aggregate, Avg, Count, DecimalField, DurationField, FloatField, Func,
IntegerField, Max, Min, Sum, Value,
)
from django.test import TestCase, ignore_warnings
from django.test.utils import Approximate, CaptureQueriesContext
from django.utils import six, timezone
from django.utils.deprecation import RemovedInDjango110Warning
from .models import Author, Book, Publisher, Store
class AggregateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34)
cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35)
cls.a3 = Author.objects.create(name='Brad Dayley', age=45)
cls.a4 = Author.objects.create(name='James Bennett', age=29)
cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37)
cls.a6 = Author.objects.create(name='Paul Bissex', age=29)
cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25)
cls.a8 = Author.objects.create(name='Peter Norvig', age=57)
cls.a9 = Author.objects.create(name='Stuart Russell', age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name='Apress', num_awards=3, duration=datetime.timedelta(days=1))
cls.p2 = Publisher.objects.create(name='Sams', num_awards=1, duration=datetime.timedelta(days=2))
cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7)
cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right',
pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6)
)
cls.b2 = Book.objects.create(
isbn='067232959', name='Sams Teach Yourself Django in 24 Hours',
pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3)
)
cls.b3 = Book.objects.create(
isbn='159059996', name='Practical Django Projects',
pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23)
)
cls.b4 = Book.objects.create(
isbn='013235613', name='Python Web Development with Django',
pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3)
)
cls.b5 = Book.objects.create(
isbn='013790395', name='Artificial Intelligence: A Modern Approach',
pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15)
)
cls.b6 = Book.objects.create(
isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15)
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name='Amazon.com',
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59)
)
s2 = Store.objects.create(
name='Books.com',
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59)
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30)
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def test_empty_aggregate(self):
self.assertEqual(Author.objects.all().aggregate(), {})
def test_single_aggregate(self):
vals = Author.objects.aggregate(Avg("age"))
self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
def test_multiple_aggregates(self):
vals = Author.objects.aggregate(Sum("age"), Avg("age"))
self.assertEqual(vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)})
def test_filter_aggregate(self):
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["age__sum"], 254)
def test_related_aggregate(self):
vals = Author.objects.aggregate(Avg("friends__age"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["friends__age__avg"], 34.07, places=2)
vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["authors__age__avg"], 38.2857, places=2)
vals = Author.objects.all().filter(name__contains="a").aggregate(Avg("book__rating"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__rating__avg"], 4.0)
vals = Book.objects.aggregate(Sum("publisher__num_awards"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["publisher__num_awards__sum"], 30)
vals = Publisher.objects.aggregate(Sum("book__price"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__price__sum"], Decimal("270.27"))
def test_aggregate_multi_join(self):
vals = Store.objects.aggregate(Max("books__authors__age"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["books__authors__age__max"], 57)
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__publisher__num_awards__min"], 1)
def test_aggregate_alias(self):
vals = Store.objects.filter(name="Amazon.com").aggregate(amazon_mean=Avg("books__rating"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["amazon_mean"], 4.08, places=2)
def test_annotate_basic(self):
self.assertQuerysetEqual(
Book.objects.annotate().order_by('pk'), [
"The Definitive Guide to Django: Web Development Done Right",
"Sams Teach Yourself Django in 24 Hours",
"Practical Django Projects",
"Python Web Development with Django",
"Artificial Intelligence: A Modern Approach",
"Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp"
],
lambda b: b.name
)
books = Book.objects.annotate(mean_age=Avg("authors__age"))
b = books.get(pk=self.b1.pk)
self.assertEqual(
b.name,
'The Definitive Guide to Django: Web Development Done Right'
)
self.assertEqual(b.mean_age, 34.5)
def test_annotate_defer(self):
qs = Book.objects.annotate(
page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk)
rows = [
(1, "159059725", 447, "The Definitive Guide to Django: Web Development Done Right")
]
self.assertQuerysetEqual(
qs.order_by('pk'), rows,
lambda r: (r.id, r.isbn, r.page_sum, r.name)
)
def test_annotate_defer_select_related(self):
qs = Book.objects.select_related('contact').annotate(
page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk)
rows = [
(1, "159059725", 447, "Adrian Holovaty",
"The Definitive Guide to Django: Web Development Done Right")
]
self.assertQuerysetEqual(
qs.order_by('pk'), rows,
lambda r: (r.id, r.isbn, r.page_sum, r.contact.name, r.name)
)
def test_annotate_m2m(self):
books = Book.objects.filter(rating__lt=4.5).annotate(Avg("authors__age")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 51.5),
('Practical Django Projects', 29.0),
('Python Web Development with Django', Approximate(30.3, places=1)),
('Sams Teach Yourself Django in 24 Hours', 45.0)
],
lambda b: (b.name, b.authors__age__avg),
)
books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Practical Django Projects', 1),
('Python Web Development with Django', 3),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 2)
],
lambda b: (b.name, b.num_authors)
)
def test_backwards_m2m_annotate(self):
authors = Author.objects.filter(name__contains="a").annotate(Avg("book__rating")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 4.5),
('Brad Dayley', 3.0),
('Jacob Kaplan-Moss', 4.5),
('James Bennett', 4.0),
('Paul Bissex', 4.0),
('Stuart Russell', 4.0)
],
lambda a: (a.name, a.book__rating__avg)
)
authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 1),
('Brad Dayley', 1),
('Jacob Kaplan-Moss', 1),
('James Bennett', 1),
('Jeffrey Forcier', 1),
('Paul Bissex', 1),
('Peter Norvig', 2),
('Stuart Russell', 1),
('Wesley J. Chun', 1)
],
lambda a: (a.name, a.num_books)
)
def test_reverse_fkey_annotate(self):
books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 7),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 9),
('Practical Django Projects', 3),
('Python Web Development with Django', 7),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 3)
],
lambda b: (b.name, b.publisher__num_awards__sum)
)
publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
self.assertQuerysetEqual(
publishers, [
('Apress', Decimal("59.69")),
("Jonno's House of Books", None),
('Morgan Kaufmann', Decimal("75.00")),
('Prentice Hall', Decimal("112.49")),
('Sams', Decimal("23.09"))
],
lambda p: (p.name, p.book__price__sum)
)
def test_annotate_values(self):
books = list(Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values())
self.assertEqual(
books, [
{
"contact_id": 1,
"id": 1,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": 1,
"rating": 4.5,
}
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg('authors__age')).values('pk', 'isbn', 'mean_age')
self.assertEqual(
list(books), [
{
"pk": 1,
"isbn": "159059725",
"mean_age": 34.5,
}
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values("name")
self.assertEqual(
list(books), [
{
"name": "The Definitive Guide to Django: Web Development Done Right"
}
]
)
books = Book.objects.filter(pk=self.b1.pk).values().annotate(mean_age=Avg('authors__age'))
self.assertEqual(
list(books), [
{
"contact_id": 1,
"id": 1,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": 1,
"rating": 4.5,
}
]
)
books = Book.objects.values("rating").annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age")).order_by("rating")
self.assertEqual(
list(books), [
{
"rating": 3.0,
"n_authors": 1,
"mean_age": 45.0,
},
{
"rating": 4.0,
"n_authors": 6,
"mean_age": Approximate(37.16, places=1)
},
{
"rating": 4.5,
"n_authors": 2,
"mean_age": 34.5,
},
{
"rating": 5.0,
"n_authors": 1,
"mean_age": 57.0,
}
]
)
authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
self.assertEqual(len(authors), 9)
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 32.0),
('Brad Dayley', None),
('Jacob Kaplan-Moss', 29.5),
('James Bennett', 34.0),
('Jeffrey Forcier', 27.0),
('Paul Bissex', 31.0),
('Peter Norvig', 46.0),
('Stuart Russell', 57.0),
('Wesley J. Chun', Approximate(33.66, places=1))
],
lambda a: (a.name, a.friends__age__avg)
)
def test_count(self):
vals = Book.objects.aggregate(Count("rating"))
self.assertEqual(vals, {"rating__count": 6})
vals = Book.objects.aggregate(Count("rating", distinct=True))
self.assertEqual(vals, {"rating__count": 4})
def test_fkey_aggregate(self):
explicit = list(Author.objects.annotate(Count('book__id')))
implicit = list(Author.objects.annotate(Count('book')))
self.assertEqual(explicit, implicit)
def test_annotate_ordering(self):
books = Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('oldest', 'rating')
self.assertEqual(
list(books), [
{
"rating": 4.5,
"oldest": 35,
},
{
"rating": 3.0,
"oldest": 45
},
{
"rating": 4.0,
"oldest": 57,
},
{
"rating": 5.0,
"oldest": 57,
}
]
)
books = Book.objects.values("rating").annotate(oldest=Max("authors__age")).order_by("-oldest", "-rating")
self.assertEqual(
list(books), [
{
"rating": 5.0,
"oldest": 57,
},
{
"rating": 4.0,
"oldest": 57,
},
{
"rating": 3.0,
"oldest": 45,
},
{
"rating": 4.5,
"oldest": 35,
}
]
)
def test_aggregate_annotation(self):
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(Avg("num_authors"))
self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
def test_avg_duration_field(self):
self.assertEqual(
Publisher.objects.aggregate(Avg('duration', output_field=DurationField())),
{'duration__avg': datetime.timedelta(days=1, hours=12)}
)
def test_sum_duration_field(self):
self.assertEqual(
Publisher.objects.aggregate(Sum('duration', output_field=DurationField())),
{'duration__sum': datetime.timedelta(days=3)}
)
def test_sum_distinct_aggregate(self):
"""
Sum on a distict() QuerySet should aggregate only the distinct items.
"""
authors = Author.objects.filter(book__in=[5, 6])
self.assertEqual(authors.count(), 3)
distinct_authors = authors.distinct()
self.assertEqual(distinct_authors.count(), 2)
# Selected author ages are 57 and 46
age_sum = distinct_authors.aggregate(Sum('age'))
self.assertEqual(age_sum['age__sum'], 103)
def test_filtering(self):
p = Publisher.objects.create(name='Expensive Publisher', num_awards=0)
Book.objects.create(
name='ExpensiveBook1',
pages=1,
isbn='111',
rating=3.5,
price=Decimal("1000"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 1)
)
Book.objects.create(
name='ExpensiveBook2',
pages=1,
isbn='222',
rating=4.0,
price=Decimal("1000"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 2)
)
Book.objects.create(
name='ExpensiveBook3',
pages=1,
isbn='333',
rating=4.5,
price=Decimal("35"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 3)
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Apress",
"Sams",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1, book__price__lt=Decimal("40.0")).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 2]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__in=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Sams",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__isnull=True)
self.assertEqual(len(publishers), 0)
def test_annotation(self):
vals = Author.objects.filter(pk=self.a1.pk).aggregate(Count("friends__id"))
self.assertEqual(vals, {"friends__id__count": 2})
books = Book.objects.annotate(num_authors=Count("authors__name")).filter(num_authors__exact=2).order_by("pk")
self.assertQuerysetEqual(
books, [
"The Definitive Guide to Django: Web Development Done Right",
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name
)
authors = Author.objects.annotate(num_friends=Count("friends__id", distinct=True)).filter(num_friends=0).order_by("pk")
self.assertQuerysetEqual(
authors, [
"Brad Dayley",
],
lambda a: a.name
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
],
lambda p: p.name
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1)
self.assertQuerysetEqual(
publishers, [
"Apress",
],
lambda p: p.name
)
books = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1)
self.assertQuerysetEqual(
books, [
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name
)
def test_more_aggregation(self):
a = Author.objects.get(name__contains='Norvig')
b = Book.objects.get(name__contains='Done Right')
b.authors.add(a)
b.save()
vals = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1).aggregate(Avg("rating"))
self.assertEqual(vals, {"rating__avg": 4.25})
def test_even_more_aggregate(self):
publishers = Publisher.objects.annotate(
earliest_book=Min("book__pubdate"),
).exclude(earliest_book=None).order_by("earliest_book").values(
'earliest_book',
'num_awards',
'id',
'name',
)
self.assertEqual(
list(publishers), [
{
'earliest_book': datetime.date(1991, 10, 15),
'num_awards': 9,
'id': 4,
'name': 'Morgan Kaufmann'
},
{
'earliest_book': datetime.date(1995, 1, 15),
'num_awards': 7,
'id': 3,
'name': 'Prentice Hall'
},
{
'earliest_book': datetime.date(2007, 12, 6),
'num_awards': 3,
'id': 1,
'name': 'Apress'
},
{
'earliest_book': datetime.date(2008, 3, 3),
'num_awards': 1,
'id': 2,
'name': 'Sams'
}
]
)
vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening"))
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
}
)
def test_annotate_values_list(self):
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("pk", "isbn", "mean_age")
self.assertEqual(
list(books), [
(1, "159059725", 34.5),
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("isbn")
self.assertEqual(
list(books), [
('159059725',)
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("mean_age")
self.assertEqual(
list(books), [
(34.5,)
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("mean_age", flat=True)
self.assertEqual(list(books), [34.5])
books = Book.objects.values_list("price").annotate(count=Count("price")).order_by("-count", "price")
self.assertEqual(
list(books), [
(Decimal("29.69"), 2),
(Decimal('23.09'), 1),
(Decimal('30'), 1),
(Decimal('75'), 1),
(Decimal('82.8'), 1),
]
)
def test_dates_with_aggregation(self):
"""
Test that .dates() returns a distinct set of dates when applied to a
QuerySet with aggregation.
Refs #18056. Previously, .dates() would return distinct (date_kind,
aggregation) sets, in this case (year, num_authors), so 2008 would be
returned twice because there are books from 2008 with a different
number of authors.
"""
dates = Book.objects.annotate(num_authors=Count("authors")).dates('pubdate', 'year')
self.assertQuerysetEqual(
dates, [
"datetime.date(1991, 1, 1)",
"datetime.date(1995, 1, 1)",
"datetime.date(2007, 1, 1)",
"datetime.date(2008, 1, 1)"
]
)
def test_values_aggregation(self):
# Refs #20782
max_rating = Book.objects.values('rating').aggregate(max_rating=Max('rating'))
self.assertEqual(max_rating['max_rating'], 5)
max_books_per_rating = Book.objects.values('rating').annotate(
books_per_rating=Count('id')
).aggregate(Max('books_per_rating'))
self.assertEqual(
max_books_per_rating,
{'books_per_rating__max': 3})
def test_ticket17424(self):
"""
Check that doing exclude() on a foreign model after annotate()
doesn't crash.
"""
all_books = list(Book.objects.values_list('pk', flat=True).order_by('pk'))
annotated_books = Book.objects.order_by('pk').annotate(one=Count("id"))
# The value doesn't matter, we just need any negative
# constraint on a related model that's a noop.
excluded_books = annotated_books.exclude(publisher__name="__UNLIKELY_VALUE__")
# Try to generate query tree
str(excluded_books.query)
self.assertQuerysetEqual(excluded_books, all_books, lambda x: x.pk)
# Check internal state
self.assertIsNone(annotated_books.query.alias_map["aggregation_book"].join_type)
self.assertIsNone(excluded_books.query.alias_map["aggregation_book"].join_type)
def test_ticket12886(self):
"""
Check that aggregation over sliced queryset works correctly.
"""
qs = Book.objects.all().order_by('-rating')[0:3]
vals = qs.aggregate(average_top3_rating=Avg('rating'))['average_top3_rating']
self.assertAlmostEqual(vals, 4.5, places=2)
def test_ticket11881(self):
"""
Check that subqueries do not needlessly contain ORDER BY, SELECT FOR UPDATE
or select_related() stuff.
"""
qs = Book.objects.all().select_for_update().order_by(
'pk').select_related('publisher').annotate(max_pk=Max('pk'))
with CaptureQueriesContext(connection) as captured_queries:
qs.aggregate(avg_pk=Avg('max_pk'))
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]['sql'].lower()
self.assertNotIn('for update', qstr)
forced_ordering = connection.ops.force_no_ordering()
if forced_ordering:
# If the backend needs to force an ordering we make sure it's
# the only "ORDER BY" clause present in the query.
self.assertEqual(
re.findall(r'order by (\w+)', qstr),
[', '.join(f[1][0] for f in forced_ordering).lower()]
)
else:
self.assertNotIn('order by', qstr)
self.assertEqual(qstr.count(' join '), 0)
def test_decimal_max_digits_has_no_effect(self):
Book.objects.all().delete()
a1 = Author.objects.first()
p1 = Publisher.objects.first()
thedate = timezone.now()
for i in range(10):
Book.objects.create(
isbn="abcde{}".format(i), name="none", pages=10, rating=4.0,
price=9999.98, contact=a1, publisher=p1, pubdate=thedate)
book = Book.objects.aggregate(price_sum=Sum('price'))
self.assertEqual(book['price_sum'], Decimal("99999.80"))
def test_nonaggregate_aggregation_throws(self):
with six.assertRaisesRegex(self, TypeError, 'fail is not an aggregate expression'):
Book.objects.aggregate(fail=F('price'))
def test_nonfield_annotation(self):
book = Book.objects.annotate(val=Max(Value(2, output_field=IntegerField()))).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(Value(2), output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(2, output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
def test_missing_output_field_raises_error(self):
with six.assertRaisesRegex(self, FieldError, 'Cannot resolve expression type, unknown output_field'):
Book.objects.annotate(val=Max(2)).first()
def test_annotation_expressions(self):
authors = Author.objects.annotate(combined_ages=Sum(F('age') + F('friends__age'))).order_by('name')
authors2 = Author.objects.annotate(combined_ages=Sum('age') + Sum('friends__age')).order_by('name')
for qs in (authors, authors2):
self.assertEqual(len(qs), 9)
self.assertQuerysetEqual(
qs, [
('Adrian Holovaty', 132),
('Brad Dayley', None),
('Jacob Kaplan-Moss', 129),
('James Bennett', 63),
('Jeffrey Forcier', 128),
('Paul Bissex', 120),
('Peter Norvig', 103),
('Stuart Russell', 103),
('Wesley J. Chun', 176)
],
lambda a: (a.name, a.combined_ages)
)
def test_aggregation_expressions(self):
a1 = Author.objects.aggregate(av_age=Sum('age') / Count('*'))
a2 = Author.objects.aggregate(av_age=Sum('age') / Count('age'))
a3 = Author.objects.aggregate(av_age=Avg('age'))
self.assertEqual(a1, {'av_age': 37})
self.assertEqual(a2, {'av_age': 37})
self.assertEqual(a3, {'av_age': Approximate(37.4, places=1)})
def test_avg_decimal_field(self):
v = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price')))['avg_price']
self.assertIsInstance(v, float)
self.assertEqual(v, Approximate(47.39, places=2))
def test_order_of_precedence(self):
p1 = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price') + 2) * 3)
self.assertEqual(p1, {'avg_price': Approximate(148.18, places=2)})
p2 = Book.objects.filter(rating=4).aggregate(avg_price=Avg('price') + 2 * 3)
self.assertEqual(p2, {'avg_price': Approximate(53.39, places=2)})
def test_combine_different_types(self):
with six.assertRaisesRegex(self, FieldError, 'Expression contains mixed types. You must set output_field'):
Book.objects.annotate(sums=Sum('rating') + Sum('pages') + Sum('price')).get(pk=self.b4.pk)
b1 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=IntegerField())).get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
b2 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=FloatField())).get(pk=self.b4.pk)
self.assertEqual(b2.sums, 383.69)
b3 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=DecimalField())).get(pk=self.b4.pk)
self.assertEqual(b3.sums, Approximate(Decimal("383.69"), places=2))
def test_complex_aggregations_require_kwarg(self):
with six.assertRaisesRegex(self, TypeError, 'Complex annotations require an alias'):
Author.objects.annotate(Sum(F('age') + F('friends__age')))
with six.assertRaisesRegex(self, TypeError, 'Complex aggregates require an alias'):
Author.objects.aggregate(Sum('age') / Count('age'))
with six.assertRaisesRegex(self, TypeError, 'Complex aggregates require an alias'):
Author.objects.aggregate(Sum(1))
def test_aggregate_over_complex_annotation(self):
qs = Author.objects.annotate(
combined_ages=Sum(F('age') + F('friends__age')))
age = qs.aggregate(max_combined_age=Max('combined_ages'))
self.assertEqual(age['max_combined_age'], 176)
age = qs.aggregate(max_combined_age_doubled=Max('combined_ages') * 2)
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'),
sum_combined_age=Sum('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
self.assertEqual(age['sum_combined_age'], 954)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'),
sum_combined_age_doubled=Sum('combined_ages') + Sum('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
self.assertEqual(age['sum_combined_age_doubled'], 954 * 2)
def test_values_annotation_with_expression(self):
# ensure the F() is promoted to the group by clause
qs = Author.objects.values('name').annotate(another_age=Sum('age') + F('age'))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a['another_age'], 68)
qs = qs.annotate(friend_count=Count('friends'))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a['friend_count'], 2)
qs = qs.annotate(combined_age=Sum('age') + F('friends__age')).filter(
name="Adrian Holovaty").order_by('-combined_age')
self.assertEqual(
list(qs), [
{
"name": 'Adrian Holovaty',
"another_age": 68,
"friend_count": 1,
"combined_age": 69
},
{
"name": 'Adrian Holovaty',
"another_age": 68,
"friend_count": 1,
"combined_age": 63
}
]
)
vals = qs.values('name', 'combined_age')
self.assertEqual(
list(vals), [
{
"name": 'Adrian Holovaty',
"combined_age": 69
},
{
"name": 'Adrian Holovaty',
"combined_age": 63
}
]
)
def test_annotate_values_aggregate(self):
alias_age = Author.objects.annotate(
age_alias=F('age')
).values(
'age_alias',
).aggregate(sum_age=Sum('age_alias'))
age = Author.objects.values('age').aggregate(sum_age=Sum('age'))
self.assertEqual(alias_age['sum_age'], age['sum_age'])
def test_annotate_over_annotate(self):
author = Author.objects.annotate(
age_alias=F('age')
).annotate(
sum_age=Sum('age_alias')
).get(name="Adrian Holovaty")
other_author = Author.objects.annotate(
sum_age=Sum('age')
).get(name="Adrian Holovaty")
self.assertEqual(author.sum_age, other_author.sum_age)
def test_annotated_aggregate_over_annotated_aggregate(self):
with self.assertRaisesMessage(FieldError, "Cannot compute Sum('id__max'): 'id__max' is an aggregate"):
Book.objects.annotate(Max('id')).annotate(Sum('id__max'))
class MyMax(Max):
def as_sql(self, compiler, connection):
self.set_source_expressions(self.get_source_expressions()[0:1])
return super(MyMax, self).as_sql(compiler, connection)
with self.assertRaisesMessage(FieldError, "Cannot compute Max('id__max'): 'id__max' is an aggregate"):
Book.objects.annotate(Max('id')).annotate(my_max=MyMax('id__max', 'price'))
def test_multi_arg_aggregate(self):
class MyMax(Max):
def as_sql(self, compiler, connection):
self.set_source_expressions(self.get_source_expressions()[0:1])
return super(MyMax, self).as_sql(compiler, connection)
with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'):
Book.objects.aggregate(MyMax('pages', 'price'))
with self.assertRaisesMessage(TypeError, 'Complex annotations require an alias'):
Book.objects.annotate(MyMax('pages', 'price'))
Book.objects.aggregate(max_field=MyMax('pages', 'price'))
def test_add_implementation(self):
class MySum(Sum):
pass
# test completely changing how the output is rendered
def lower_case_function_override(self, compiler, connection):
sql, params = compiler.compile(self.source_expressions[0])
substitutions = dict(function=self.function.lower(), expressions=sql)
substitutions.update(self.extra)
return self.template % substitutions, params
setattr(MySum, 'as_' + connection.vendor, lower_case_function_override)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('sum('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test changing the dict and delegating
def lower_case_function_super(self, compiler, connection):
self.extra['function'] = self.function.lower()
return super(MySum, self).as_sql(compiler, connection)
setattr(MySum, 'as_' + connection.vendor, lower_case_function_super)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('sum('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test overriding all parts of the template
def be_evil(self, compiler, connection):
substitutions = dict(function='MAX', expressions='2')
substitutions.update(self.extra)
return self.template % substitutions, ()
setattr(MySum, 'as_' + connection.vendor, be_evil)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('MAX('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 2)
def test_complex_values_aggregation(self):
max_rating = Book.objects.values('rating').aggregate(
double_max_rating=Max('rating') + Max('rating'))
self.assertEqual(max_rating['double_max_rating'], 5 * 2)
max_books_per_rating = Book.objects.values('rating').annotate(
books_per_rating=Count('id') + 5
).aggregate(Max('books_per_rating'))
self.assertEqual(
max_books_per_rating,
{'books_per_rating__max': 3 + 5})
def test_expression_on_aggregation(self):
# Create a plain expression
class Greatest(Func):
function = 'GREATEST'
def as_sqlite(self, compiler, connection):
return super(Greatest, self).as_sql(compiler, connection, function='MAX')
qs = Publisher.objects.annotate(
price_or_median=Greatest(Avg('book__rating'), Avg('book__price'))
).filter(price_or_median__gte=F('num_awards')).order_by('num_awards')
self.assertQuerysetEqual(
qs, [1, 3, 7, 9], lambda v: v.num_awards)
qs2 = Publisher.objects.annotate(
rating_or_num_awards=Greatest(Avg('book__rating'), F('num_awards'),
output_field=FloatField())
).filter(rating_or_num_awards__gt=F('num_awards')).order_by('num_awards')
self.assertQuerysetEqual(
qs2, [1, 3], lambda v: v.num_awards)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_backwards_compatibility(self):
from django.db.models.sql import aggregates as sql_aggregates
class SqlNewSum(sql_aggregates.Aggregate):
sql_function = 'SUM'
class NewSum(Aggregate):
name = 'Sum'
def add_to_query(self, query, alias, col, source, is_summary):
klass = SqlNewSum
aggregate = klass(
col, source=source, is_summary=is_summary, **self.extra)
query.annotations[alias] = aggregate
qs = Author.objects.values('name').annotate(another_age=NewSum('age') + F('age'))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a['another_age'], 68)
| bsd-3-clause |
david4096/ga4gh-server | ga4gh/server/auth/__init__.py | 1 | 11719 | """
Helps to implement authentication and authorization using Auth0.
Offers functions for generating the view functions needed to implement Auth0,
a login screen, callback maker, and a function decorator for protecting
endpoints.
"""
import flask
import requests
import functools
import json
import base64
import jwt
import ga4gh.server.exceptions as exceptions
def auth_decorator(app=None):
"""
This decorator wraps a view function so that it is protected when Auth0
is enabled. This means that any request will be expected to have a signed
token in the authorization header if the `AUTH0_ENABLED` configuration
setting is True.
The authorization header will have the form:
"authorization: Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9....."
If a request is not properly signed, an attempt is made to provide the
client with useful error messages. This means that if a request is not
authorized the underlying view function will not be executed.
When `AUTH0_ENABLED` is false, this decorator will simply execute the
decorated view without observing the authorization header.
:param app:
:return: Flask view decorator
"""
def requires_auth(f):
@functools.wraps(f)
def decorated(*args, **kwargs):
# This decorator will only apply with AUTH0_ENABLED set to True.
if app.config.get('AUTH0_ENABLED', False):
client_id = app.config.get("AUTH0_CLIENT_ID")
client_secret = app.config.get("AUTH0_CLIENT_SECRET")
auth_header = flask.request.headers.get('Authorization', None)
# Each of these functions will throw a 401 is there is a
# problem decoding the token with some helpful error message.
if auth_header:
token, profile = _decode_header(
auth_header, client_id, client_secret)
else:
raise exceptions.NotAuthorizedException()
# We store the token in the session so that later
# stages can use it to connect identity and authorization.
flask._request_ctx_stack.top.current_user = profile
flask.session['auth0_key'] = token
# Now we need to make sure that on top of having a good token
# They are authorized, and if not provide an error message
is_authorized(app.cache, profile['email'])
is_active(app.cache, token)
return f(*args, **kwargs)
return decorated
return requires_auth
def decode_header(auth_header, client_id, client_secret):
"""
A function that threads the header through decoding and returns a tuple
of the token and payload if successful. This does not fully authenticate
a request.
:param auth_header:
:param client_id:
:param client_secret:
:return: (token, profile)
"""
return _decode_header(
_well_formed(
_has_token(_has_bearer(_has_header(auth_header)))),
client_id, client_secret)
def logout(cache):
"""
Logs out the current session by removing it from the cache. This is
expected to only occur when a session has
"""
cache.set(flask.session['auth0_key'], None)
flask.session.clear()
return True
def callback_maker(
cache=None, domain='', client_id='',
client_secret='', redirect_uri=''):
"""
This function will generate a view function that can be used to handle
the return from Auth0. The "callback" is a redirected session from auth0
that includes the token we can use to authenticate that session.
If the session is properly authenticated Auth0 will provide a code so our
application can identify the session. Once this has been done we ask
for more information about the identified session from Auth0. We then use
the email of the user logged in to Auth0 to authorize their token to make
further requests by adding it to the application's cache.
It sets a value in the cache that sets the current session as logged in. We
can then refer to this id_token to later authenticate a session.
:param domain:
:param client_id:
:param client_secret:
:param redirect_uri:
:return : View function
"""
def callback_handling():
code = flask.request.args.get('code')
if code is None:
raise exceptions.NotAuthorizedException(
'The callback expects a well '
'formatted code, {} was provided'.format(code))
json_header = {'content-type': 'application/json'}
# Get auth token
token_url = "https://{domain}/oauth/token".format(domain=domain)
token_payload = {
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': redirect_uri,
'code': code,
'grant_type': 'authorization_code'}
try:
token_info = requests.post(
token_url,
data=json.dumps(token_payload),
headers=json_header).json()
id_token = token_info['id_token']
access_token = token_info['access_token']
except Exception as e:
raise exceptions.NotAuthorizedException(
'The callback from Auth0 did not'
'include the expected tokens: \n'
'{}'.format(e.message))
# Get profile information
try:
user_url = \
"https://{domain}/userinfo?access_token={access_token}".format(
domain=domain, access_token=access_token)
user_info = requests.get(user_url).json()
email = user_info['email']
except Exception as e:
raise exceptions.NotAuthorizedException(
'The user profile from Auth0 did '
'not contain the expected data: \n {}'.format(e.message))
# Log token in
user = cache.get(email)
if user and user['authorized']:
cache.set(id_token, user_info)
return flask.redirect('/login?code={}'.format(id_token))
else:
return flask.redirect('/login')
return callback_handling
def render_login(
app=None, scopes='', redirect_uri='', domain='', client_id=''):
"""
This function will generate a view function that can be used to handle
the return from Auth0. The "callback" is a redirected session from auth0
that includes the token we can use to authenticate that session.
If the session is properly authenticated Auth0 will provide a code so our
application can identify the session. Once this has been done we ask
for more information about the identified session from Auth0. We then use
the email of the user logged in to Auth0 to authorize their token to make
further requests by adding it to the application's cache.
It sets a value in the cache that sets the current session as logged in. We
can then refer to this id_token to later authenticate a session.
:param app:
:param scopes:
:param redirect_uri:
:param domain:
:param client_id:
:return : Rendered login template
"""
return app.jinja_env.from_string(LOGIN_HTML).render(
scopes=scopes,
redirect_uri=redirect_uri,
domain=domain,
client_id=client_id)
def render_key(app, key=""):
"""
Renders a view from the app and a key that lets the current session grab
its token.
:param app:
:param key:
:return: Rendered view
"""
return app.jinja_env.from_string(KEY_HTML).render(
key=key)
def authorize_email(email='davidcs@ucsc.edu', cache=None):
"""
Adds an email address to the list of authorized emails stored in an
ephemeral cache.
:param email:
"""
# TODO safely access cache
cache.set(email, {'authorized': True})
def _has_header(auth_header):
if not auth_header:
raise exceptions.NotAuthorizedException(
'Authorization header is expected.')
return auth_header
def _has_bearer(auth_header):
parts = auth_header.split()
if parts[0].lower() != 'bearer':
raise exceptions.NotAuthorizedException(
'Authorization header must start with "Bearer".')
return auth_header
def _has_token(auth_header):
parts = auth_header.split()
if len(parts) == 1:
raise exceptions.NotAuthorizedException(
'Token not found in header.')
return auth_header
def _well_formed(auth_header):
parts = auth_header.split()
if len(parts) > 2:
raise exceptions.NotAuthorizedException(
'Authorization header must be Bearer + \s + token.')
return auth_header
def _decode_header(auth_header, client_id, client_secret):
"""
Takes the header and tries to return an active token and decoded
payload.
:param auth_header:
:param client_id:
:param client_secret:
:return: (token, profile)
"""
try:
token = auth_header.split()[1]
b64secret = client_secret.replace(
"_", "/").replace("-", "+")
payload = jwt.decode(
token,
base64.b64decode(b64secret),
audience=client_id)
except jwt.ExpiredSignature:
raise exceptions.NotAuthorizedException(
'Token has expired, please log in again.')
# is valid client
except jwt.InvalidAudienceError:
message = 'Incorrect audience, expected: {}'.format(
client_id)
raise exceptions.NotAuthorizedException(message)
# is valid token
except jwt.DecodeError:
raise exceptions.NotAuthorizedException(
'Token signature could not be validated.')
except Exception as e:
raise exceptions.NotAuthorizedException(
'Token signature was malformed. {}'.format(e.message))
return token, payload
def is_authorized(cache, email):
if not cache.get(email):
message = '{} is not authorized to ' \
'access this resource'.format(email)
raise exceptions.NotAuthenticatedException(message)
return email
def is_active(cache, token):
"""
Accepts the cache and ID token and checks to see if the profile is
currently logged in. If so, return the token, otherwise throw a
NotAuthenticatedException.
:param cache:
:param token:
:return:
"""
profile = cache.get(token)
if not profile:
raise exceptions.NotAuthenticatedException(
'The token is good, but you are not logged in. Please '
'try logging in again.')
return profile
# This HTML string is used to render the login page. It is a jinja template.
LOGIN_HTML = """<html>
<head>
<title>Log in</title></head><body><div>
<script src="https://cdn.auth0.com/js/lock/10.0/lock.min.js"></script>
<script type="text/javascript">
var lock = new Auth0Lock('{{ client_id }}', '{{ domain }}', {
auth: {
redirectUrl: '{{ redirect_uri }}',
responseType: 'code',
params: {
scope: '{{ scopes }}' // https://auth0.com/docs/scopes
}
}
});
lock.show();
</script>
</div>"""
KEY_HTML = """<html>
<head>
<title>GA4GH Server API Token</title></head><body><div>
<h1>Your API Token</h1>
<p>Your token is now active, add it as your "Authorization: bearer $TOKEN" header
when making requests to protected endpoints</p>
<textarea cols=120 rows=5 onClick='this.select()' readonly>{{ key }}</textarea>
<h3><a href="/?key={{ key }}">Visit landing page</a></h3>
</div>
""" # noqa
| apache-2.0 |
mouton5000/DiscreteEventApplicationEditor | test/testsTriggersExpressions/testRand.py | 1 | 3182 | __author__ = 'mouton'
from unittest import TestCase
from triggerExpressions import Rand, Evaluation
from database import Variable
from test.testsTriggersExpressions import simpleTests
from arithmeticExpressions import ALitteral
class TestRand(TestCase):
@classmethod
def setUpClass(cls):
import grammar.grammars
grammar.grammars.compileGrammars()
def setUp(self):
self.eval1 = Evaluation()
self.eval2 = Evaluation()
self.eval2[Variable('X')] = 1
self.eval2[Variable('Y')] = 'abc'
self.eval2[Variable('Z')] = 12.0
self.eval2[Variable('T')] = True
def test_rand_one_with_empty_previous_evaluation(self):
rand = Rand(ALitteral(1.0))
for i in xrange(100):
simpleTests.test_evaluation(self, rand, self.eval1, None, self.eval1)
def test_rand_one_with_non_empty_previous_evaluation(self):
rand = Rand(ALitteral(1.0))
for i in xrange(100):
simpleTests.test_evaluation(self, rand, self.eval2, None, self.eval2)
def test_rand_true_with_empty_previous_evaluation(self):
self.rand_true(self.eval1)
def test_rand_true_with_none_empty_previous_evaluation(self):
self.rand_true(self.eval2)
def rand_true(self, previousEvaluation):
rand = Rand(ALitteral(0.5))
import random
random.seed(0)
random.random()
random.random()
simpleTests.test_evaluation(self, rand, previousEvaluation, None, previousEvaluation)
simpleTests.test_evaluation(self, rand, previousEvaluation, None, previousEvaluation)
random.random()
simpleTests.test_evaluation(self, rand, previousEvaluation, None, previousEvaluation)
random.random()
simpleTests.test_evaluation(self, rand, previousEvaluation, None, previousEvaluation)
simpleTests.test_evaluation(self, rand, previousEvaluation, None, previousEvaluation)
random.random()
def test_rand_zero_with_empty_previous_evaluation(self):
rand = Rand(ALitteral(0))
for i in xrange(100):
simpleTests.test_evaluation(self, rand, self.eval1, None)
def test_rand_zero_with_non_empty_previous_evaluation(self):
rand = Rand(ALitteral(0))
for i in xrange(100):
simpleTests.test_evaluation(self, rand, self.eval2, None)
def test_rand_false_with_empty_previous_evaluation(self):
self.rand_false(self.eval1)
def test_rand_false_with_non_empty_previous_evaluation(self):
self.rand_false(self.eval2)
def rand_false(self, previousEvaluation):
rand = Rand(ALitteral(0.5))
import random
random.seed(0)
simpleTests.test_evaluation(self, rand, previousEvaluation, None)
simpleTests.test_evaluation(self, rand, previousEvaluation, None)
random.random()
random.random()
simpleTests.test_evaluation(self, rand, previousEvaluation, None)
random.random()
simpleTests.test_evaluation(self, rand, previousEvaluation, None)
random.random()
random.random()
simpleTests.test_evaluation(self, rand, previousEvaluation, None) | mit |
ESOedX/edx-platform | openedx/core/djangoapps/header_control/decorators.py | 2 | 2011 | """
Middleware decorator for removing headers.
"""
from __future__ import absolute_import
from functools import wraps
from openedx.core.djangoapps.header_control import force_header_for_response, remove_headers_from_response
def remove_headers(*headers):
"""
Decorator that removes specific headers from the response.
Usage:
@remove_headers("Vary")
def myview(request):
...
The HeaderControlMiddleware must be used and placed as closely as possible to the top
of the middleware chain, ideally after any caching middleware but before everything else.
This decorator is not safe for multiple uses: each call will overwrite any previously set values.
"""
def _decorator(func):
"""
Decorates the given function.
"""
@wraps(func)
def _inner(*args, **kwargs):
"""
Alters the response.
"""
response = func(*args, **kwargs)
remove_headers_from_response(response, *headers)
return response
return _inner
return _decorator
def force_header(header, value):
"""
Decorator that forces a header in the response to have a specific value.
Usage:
@force_header("Vary", "Origin")
def myview(request):
...
The HeaderControlMiddleware must be used and placed as closely as possible to the top
of the middleware chain, ideally after any caching middleware but before everything else.
This decorator is not safe for multiple uses: each call will overwrite any previously set values.
"""
def _decorator(func):
"""
Decorates the given function.
"""
@wraps(func)
def _inner(*args, **kwargs):
"""
Alters the response.
"""
response = func(*args, **kwargs)
force_header_for_response(response, header, value)
return response
return _inner
return _decorator
| agpl-3.0 |
libvirt/autotest | client/virt/ppm_utils.py | 1 | 7287 | """
Utility functions to deal with ppm (qemu screendump format) files.
@copyright: Red Hat 2008-2009
"""
import os, struct, time, re
from autotest_lib.client.bin import utils
# Some directory/filename utils, for consistency
def find_id_for_screendump(md5sum, dir):
"""
Search dir for a PPM file whose name ends with md5sum.
@param md5sum: md5 sum string
@param dir: Directory that holds the PPM files.
@return: The file's basename without any preceding path, e.g.
'20080101_120000_d41d8cd98f00b204e9800998ecf8427e.ppm'.
"""
try:
files = os.listdir(dir)
except OSError:
files = []
for file in files:
exp = re.compile(r"(.*_)?" + md5sum + r"\.ppm", re.IGNORECASE)
if exp.match(file):
return file
def generate_id_for_screendump(md5sum, dir):
"""
Generate a unique filename using the given MD5 sum.
@return: Only the file basename, without any preceding path. The
filename consists of the current date and time, the MD5 sum and a .ppm
extension, e.g. '20080101_120000_d41d8cd98f00b204e9800998ecf8427e.ppm'.
"""
filename = time.strftime("%Y%m%d_%H%M%S") + "_" + md5sum + ".ppm"
return filename
def get_data_dir(steps_filename):
"""
Return the data dir of the given steps filename.
"""
filename = os.path.basename(steps_filename)
return os.path.join(os.path.dirname(steps_filename), "..", "steps_data",
filename + "_data")
# Functions for working with PPM files
def image_read_from_ppm_file(filename):
"""
Read a PPM image.
@return: A 3 element tuple containing the width, height and data of the
image.
"""
fin = open(filename,"rb")
l1 = fin.readline()
l2 = fin.readline()
l3 = fin.readline()
data = fin.read()
fin.close()
(w, h) = map(int, l2.split())
return (w, h, data)
def image_write_to_ppm_file(filename, width, height, data):
"""
Write a PPM image with the given width, height and data.
@param filename: PPM file path
@param width: PPM file width (pixels)
@param height: PPM file height (pixels)
"""
fout = open(filename,"wb")
fout.write("P6\n")
fout.write("%d %d\n" % (width, height))
fout.write("255\n")
fout.write(data)
fout.close()
def image_crop(width, height, data, x1, y1, dx, dy):
"""
Crop an image.
@param width: Original image width
@param height: Original image height
@param data: Image data
@param x1: Desired x coordinate of the cropped region
@param y1: Desired y coordinate of the cropped region
@param dx: Desired width of the cropped region
@param dy: Desired height of the cropped region
@return: A 3-tuple containing the width, height and data of the
cropped image.
"""
if x1 > width - 1: x1 = width - 1
if y1 > height - 1: y1 = height - 1
if dx > width - x1: dx = width - x1
if dy > height - y1: dy = height - y1
newdata = ""
index = (x1 + y1*width) * 3
for i in range(dy):
newdata += data[index:(index+dx*3)]
index += width*3
return (dx, dy, newdata)
def image_md5sum(width, height, data):
"""
Return the md5sum of an image.
@param width: PPM file width
@param height: PPM file height
@data: PPM file data
"""
header = "P6\n%d %d\n255\n" % (width, height)
hash = utils.hash('md5', header)
hash.update(data)
return hash.hexdigest()
def get_region_md5sum(width, height, data, x1, y1, dx, dy,
cropped_image_filename=None):
"""
Return the md5sum of a cropped region.
@param width: Original image width
@param height: Original image height
@param data: Image data
@param x1: Desired x coord of the cropped region
@param y1: Desired y coord of the cropped region
@param dx: Desired width of the cropped region
@param dy: Desired height of the cropped region
@param cropped_image_filename: if not None, write the resulting cropped
image to a file with this name
"""
(cw, ch, cdata) = image_crop(width, height, data, x1, y1, dx, dy)
# Write cropped image for debugging
if cropped_image_filename:
image_write_to_ppm_file(cropped_image_filename, cw, ch, cdata)
return image_md5sum(cw, ch, cdata)
def image_verify_ppm_file(filename):
"""
Verify the validity of a PPM file.
@param filename: Path of the file being verified.
@return: True if filename is a valid PPM image file. This function
reads only the first few bytes of the file so it should be rather fast.
"""
try:
size = os.path.getsize(filename)
fin = open(filename, "rb")
assert(fin.readline().strip() == "P6")
(width, height) = map(int, fin.readline().split())
assert(width > 0 and height > 0)
assert(fin.readline().strip() == "255")
size_read = fin.tell()
fin.close()
assert(size - size_read == width*height*3)
return True
except Exception:
return False
def image_comparison(width, height, data1, data2):
"""
Generate a green-red comparison image from two given images.
@param width: Width of both images
@param height: Height of both images
@param data1: Data of first image
@param data2: Data of second image
@return: A 3-element tuple containing the width, height and data of the
generated comparison image.
@note: Input images must be the same size.
"""
newdata = ""
i = 0
while i < width*height*3:
# Compute monochromatic value of current pixel in data1
pixel1_str = data1[i:i+3]
temp = struct.unpack("BBB", pixel1_str)
value1 = int((temp[0] + temp[1] + temp[2]) / 3)
# Compute monochromatic value of current pixel in data2
pixel2_str = data2[i:i+3]
temp = struct.unpack("BBB", pixel2_str)
value2 = int((temp[0] + temp[1] + temp[2]) / 3)
# Compute average of the two values
value = int((value1 + value2) / 2)
# Scale value to the upper half of the range [0, 255]
value = 128 + value / 2
# Compare pixels
if pixel1_str == pixel2_str:
# Equal -- give the pixel a greenish hue
newpixel = [0, value, 0]
else:
# Not equal -- give the pixel a reddish hue
newpixel = [value, 0, 0]
newdata += struct.pack("BBB", newpixel[0], newpixel[1], newpixel[2])
i += 3
return (width, height, newdata)
def image_fuzzy_compare(width, height, data1, data2):
"""
Return the degree of equality of two given images.
@param width: Width of both images
@param height: Height of both images
@param data1: Data of first image
@param data2: Data of second image
@return: Ratio equal_pixel_count / total_pixel_count.
@note: Input images must be the same size.
"""
equal = 0.0
different = 0.0
i = 0
while i < width*height*3:
pixel1_str = data1[i:i+3]
pixel2_str = data2[i:i+3]
# Compare pixels
if pixel1_str == pixel2_str:
equal += 1.0
else:
different += 1.0
i += 3
return equal / (equal + different)
| gpl-2.0 |
shanethehat/skype4py | Skype4Py/lang/pl.py | 23 | 7712 | apiAttachAvailable = u'API jest dost\u0119pne'
apiAttachNotAvailable = u'Niedost\u0119pny'
apiAttachPendingAuthorization = u'Autoryzacja w toku'
apiAttachRefused = u'Odmowa'
apiAttachSuccess = u'Sukces'
apiAttachUnknown = u'Nieznany'
budDeletedFriend = u'Usuni\u0119ty z listy znajomych'
budFriend = u'Znajomy'
budNeverBeenFriend = u'Nigdy nie by\u0142 na li\u015bcie znajomych'
budPendingAuthorization = u'Autoryzacja w toku'
budUnknown = u'Nieznany'
cfrBlockedByRecipient = u'Po\u0142\u0105czenie zablokowane przez odbiorc\u0119'
cfrMiscError = u'B\u0142\u0105d'
cfrNoCommonCodec = u'Brak podstawowego kodeka'
cfrNoProxyFound = u'Nie odnaleziono serwera proksy'
cfrNotAuthorizedByRecipient = u'Ten u\u017cytkownik nie ma autoryzacji odbiorcy'
cfrRecipientNotFriend = u'Odbiorca nie jest znajomym'
cfrRemoteDeviceError = u'Problem ze zdalnym urz\u0105dzeniem d\u017awi\u0119kowym'
cfrSessionTerminated = u'Sesja zako\u0144czona'
cfrSoundIOError = u'B\u0142\u0105d d\u017awi\u0119ku przychodz\u0105cego lub wychodz\u0105cego'
cfrSoundRecordingError = u'B\u0142\u0105d nagrywania d\u017awi\u0119ku'
cfrUnknown = u'Nieznany'
cfrUserDoesNotExist = u'Taki u\u017cytkownik lub numer telefonu nie istnieje'
cfrUserIsOffline = u'Ona lub On jest niedost\u0119pny'
chsAllCalls = u'Wszystkie'
chsDialog = u'Dialog'
chsIncomingCalls = u'Zaakceptuj wielu uczestnik\xf3w'
chsLegacyDialog = u'Dialog przestarza\u0142y'
chsMissedCalls = u'Nie odebrane'
chsMultiNeedAccept = u'Zaakceptuj wielu uczestnik\xf3w'
chsMultiSubscribed = u'Wielu subskrybowanych'
chsOutgoingCalls = u'Wielu subskrybowanych'
chsUnknown = u'Nieznany'
chsUnsubscribed = u'Nie jest abonentem'
clsBusy = u'Zaj\u0119te'
clsCancelled = u'Anulowane'
clsEarlyMedia = u'Odtwarzanie wczesnych medi\xf3w (Early Media)'
clsFailed = u'Niestety, nieudane po\u0142\u0105czenie!'
clsFinished = u'Zako\u0144czono'
clsInProgress = u'Rozmowa w toku'
clsLocalHold = u'Zawieszona przez u\u017cytkownika'
clsMissed = u'Nieodebrana rozmowa'
clsOnHold = u'Zawieszona'
clsRefused = u'Odmowa'
clsRemoteHold = u'Zawieszona przez odbiorc\u0119'
clsRinging = u'Dzwoni'
clsRouting = u'Trasowanie'
clsTransferred = u'Nieznany'
clsTransferring = u'Nieznany'
clsUnknown = u'Nieznany'
clsUnplaced = u'Nigdy nie \u0142aczono'
clsVoicemailBufferingGreeting = u'Pozdrowienia podczas buforowania'
clsVoicemailCancelled = u'Poczta g\u0142osowa anulowana'
clsVoicemailFailed = u'B\u0142\u0105d poczty g\u0142osowej'
clsVoicemailPlayingGreeting = u'Odtwarzanie pozdrowienia'
clsVoicemailRecording = u'Nagrywanie poczty g\u0142osowej'
clsVoicemailSent = u'Poczta g\u0142osowa wys\u0142ana'
clsVoicemailUploading = u'Wysy\u0142anie poczty g\u0142osowej'
cltIncomingP2P = u'Rozmowa przychodz\u0105ca peer-to-peer'
cltIncomingPSTN = u'Rozmowa przychodz\u0105ca'
cltOutgoingP2P = u'Rozmowa wychodz\u0105ca peer-to-peer'
cltOutgoingPSTN = u'Rozmowa wychodz\u0105ca'
cltUnknown = u'Nieznany'
cmeAddedMembers = u'Cz\u0142onkowie dodani'
cmeCreatedChatWith = u'Rozpocz\u0119ty czat z'
cmeEmoted = u'Emoted'
cmeLeft = u'Opusci\u0142'
cmeSaid = u'Powiedzia\u0142'
cmeSawMembers = u'Zobaczy\u0142e\u015b cz\u0142onk\xf3w'
cmeSetTopic = u'Ustaw temat'
cmeUnknown = u'Nieznany'
cmsRead = u'Przeczyta\u0142'
cmsReceived = u'Otrzyma\u0142'
cmsSending = u'Wysy\u0142am...'
cmsSent = u'Wys\u0142any'
cmsUnknown = u'Nieznany'
conConnecting = u'\u0141\u0105czenie'
conOffline = u'Niepod\u0142\u0105czony'
conOnline = u'Dost\u0119pny'
conPausing = u'Wstrzymane'
conUnknown = u'Nieznany'
cusAway = u'Zaraz wracam'
cusDoNotDisturb = u'Nie przeszkadza\u0107'
cusInvisible = u'Niewidoczny'
cusLoggedOut = u'Niepod\u0142\u0105czony'
cusNotAvailable = u'Niedost\u0119pny'
cusOffline = u'Niepod\u0142\u0105czony'
cusOnline = u'Dost\u0119pny'
cusSkypeMe = u"Tryb 'Skype Me'"
cusUnknown = u'Nieznany'
cvsBothEnabled = u'Odbierz i odbierz wideo'
cvsNone = u'Bez wideo'
cvsReceiveEnabled = u'Odbierz wideo'
cvsSendEnabled = u'Wy\u015blij wideo'
cvsUnknown = u'Nieznany'
grpAllFriends = u'Wszyscy znajomi'
grpAllUsers = u'Wszyscy u\u017cytkownicy'
grpCustomGroup = u'Niestandardowe'
grpOnlineFriends = u'Znajomi w sieci'
grpPendingAuthorizationFriends = u'Autoryzacja w toku'
grpProposedSharedGroup = u'Propozycja grupy wsp\xf3\u0142dzielonej'
grpRecentlyContactedUsers = u'Ostatnie kontakty'
grpSharedGroup = u'Wsp\xf3\u0142dzielona grupa'
grpSkypeFriends = u'Znajomi ze Skype'
grpSkypeOutFriends = u'Znajomi ze SkypeOut'
grpUngroupedFriends = u'Znajomi spoza grupy'
grpUnknown = u'Nieznany'
grpUsersAuthorizedByMe = u'Moja autoryzacja'
grpUsersBlockedByMe = u'Moja blokada'
grpUsersWaitingMyAuthorization = u'Pro\u015bba o autoryzacj\u0119'
leaAddDeclined = u'Dodawanie odrzucone'
leaAddedNotAuthorized = u'Osoba dodawana musi by\u0107 autoryzowana'
leaAdderNotFriend = u'Osoba dodaj\u0105ca musi by\u0107 znajomym'
leaUnknown = u'Nieznany'
leaUnsubscribe = u'Nie jest abonentem'
leaUserIncapable = u'U\u017cytkownik nie mo\u017ce rozmawia\u0107'
leaUserNotFound = u'U\u017cytkownik nie zosta\u0142 znaleziony'
olsAway = u'Zaraz wracam'
olsDoNotDisturb = u'Nie przeszkadza\u0107'
olsNotAvailable = u'Niedost\u0119pny'
olsOffline = u'Niepod\u0142\u0105czony'
olsOnline = u'Dost\u0119pny'
olsSkypeMe = u"Tryb 'Skype Me'"
olsSkypeOut = u'SkypeOut'
olsUnknown = u'Nieznany'
smsMessageStatusComposing = u'Tworzenie'
smsMessageStatusDelivered = u'Dor\u0119czona'
smsMessageStatusFailed = u'Nieudane'
smsMessageStatusRead = u'Read'
smsMessageStatusReceived = u'Otrzymany'
smsMessageStatusSendingToServer = u'Sending to Server'
smsMessageStatusSentToServer = u'Wys\u0142ana do serwera'
smsMessageStatusSomeTargetsFailed = u'Niekt\xf3re numery nieudane'
smsMessageStatusUnknown = u'Nieznany'
smsMessageTypeCCRequest = u'Pro\u015bba o kod potwierdzaj\u0105cy'
smsMessageTypeCCSubmit = u'Wys\u0142anie kodu potwierdzaj\u0105cego'
smsMessageTypeIncoming = u'Przychodz\u0105ca'
smsMessageTypeOutgoing = u'Outgoing'
smsMessageTypeUnknown = u'Unknown'
smsTargetStatusAcceptable = u'Akceptowalny'
smsTargetStatusAnalyzing = u'Analiza'
smsTargetStatusDeliveryFailed = u'Nieudane'
smsTargetStatusDeliveryPending = u'Oczekuje'
smsTargetStatusDeliverySuccessful = u'Dor\u0119czona'
smsTargetStatusNotRoutable = u'Brak trasy'
smsTargetStatusUndefined = u'Niezdefiniowana'
smsTargetStatusUnknown = u'Nieznany'
usexFemale = u'Kobieta'
usexMale = u'M\u0119\u017cczyzna'
usexUnknown = u'Nieznany'
vmrConnectError = u'B\u0142\u0105d po\u0142\u0105czenia'
vmrFileReadError = u'B\u0142\u0105d odczytu pliku'
vmrFileWriteError = u'B\u0142\u0105d zapisu pliku'
vmrMiscError = u'B\u0142\u0105d'
vmrNoError = u'Bez b\u0142\u0119du'
vmrNoPrivilege = u'Brak uprawnie\u0144 Voicemail'
vmrNoVoicemail = u'Taka poczta g\u0142osowa nie istnieje'
vmrPlaybackError = u'B\u0142\u0105d odtwarzania'
vmrRecordingError = u'B\u0142\u0105d nagrywania'
vmrUnknown = u'Nieznany'
vmsBlank = u'Pusty'
vmsBuffering = u'Buforowanie'
vmsDeleting = u'Usuwanie'
vmsDownloading = u'Trwa pobieranie'
vmsFailed = u'Nie powiodlo si\u0119'
vmsNotDownloaded = u'Niepobrany'
vmsPlayed = u'Odtworzony'
vmsPlaying = u'Odtwarzanie'
vmsRecorded = u'Nagrany'
vmsRecording = u'Nagrywanie poczty g\u0142osowej'
vmsUnknown = u'Nieznany'
vmsUnplayed = u'Nieodtworzony'
vmsUploaded = u'Przekazany'
vmsUploading = u'Przekazywanie'
vmtCustomGreeting = u'Pozdrowienia niestandardowe'
vmtDefaultGreeting = u'Pozdrowienia domy\u015blne'
vmtIncoming = u'przysy\u0142ana jest wiadomo\u015b\u0107 g\u0142osowa'
vmtOutgoing = u'Wychodz\u0105ca'
vmtUnknown = u'Nieznany'
vssAvailable = u'Dost\u0119pny'
vssNotAvailable = u'Niedostepny'
vssPaused = u'Wstrzymane'
vssRejected = u'Odrzucona'
vssRunning = u'Trwaj\u0105ca'
vssStarting = u'Rozpocz\u0119cie'
vssStopping = u'Zatrzymanie'
vssUnknown = u'Nieznany'
| bsd-3-clause |
sdfd/linux-socfpga | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py | 12980 | 5411 | # SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
| gpl-2.0 |
chrisdickinson/nojs | build/android/pylib/utils/decorators.py | 14 | 1080 | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import functools
import logging
def Memoize(f):
"""Decorator to cache return values of function."""
memoize_dict = {}
@functools.wraps(f)
def wrapper(*args, **kwargs):
key = repr((args, kwargs))
if key not in memoize_dict:
memoize_dict[key] = f(*args, **kwargs)
return memoize_dict[key]
return wrapper
def NoRaiseException(default_return_value=None, exception_message=''):
"""Returns decorator that catches and logs uncaught Exceptions.
Args:
default_return_value: Value to return in the case of uncaught Exception.
exception_message: Message for uncaught exceptions.
"""
def decorator(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception: # pylint: disable=broad-except
logging.exception(exception_message)
return default_return_value
return wrapper
return decorator
| bsd-3-clause |
MyRobotLab/pyrobotlab | home/kwatters/harry/gestures/stoprockpaperscissors.py | 1 | 1477 | def stoprockpaperscissors():
global inmoov
global human
rest()
sleep(5)
if inmoov < human:
i01.mouth.speak("congratulations you won with" + str(human - inmoov) + "points")
sleep(3)
i01.mouth.speak(str(human) + "points to you and" + str(inmoov) + "points to me")
elif inmoov > human: # changed from if to elif
i01.mouth.speak("yes yes i won with" + str(inmoov - human) + "points")
sleep(3)
i01.mouth.speak("i've got " + str(inmoov) + "points and you got" + str(human) + "points")
elif inmoov == human: # changed from if to elif
i01.mouth.speak("none of us won we both got" + str(inmoov) + "points")
inmoov = 0
human = 0
i01.mouth.speak("that was fun")
sleep(2)
i01.mouth.speak("do you want to play again")
sleep(10)
if (data == "yes let's play again"):
rockpaperscissors2(data)
elif (data == "yes"): # changed from if to elif
rockpaperscissors2(data)
elif (data == "no thanks"): # changed from if to elif
i01.mouth.speak("maybe some other time")
sleep(4)
power_down()
elif (data == "no thank you"): # changed from if to elif
i01.mouth.speak("maybe some other time")
sleep(4)
power_down()
##i01.mouth.speak("ok i'll find something else to do then")
##lookaroundyou()
| apache-2.0 |
pelson/conda-rpms | conda_rpms/tests/unit/test_build.py | 1 | 1052 | import textwrap
import unittest
from conda_rpms.build import name_version_release
class Test_name_version_release(unittest.TestCase):
def _check_output(self, spec):
expected = {'name': 'foo', 'release':'2', 'version':'1'}
actual = name_version_release(textwrap.dedent(spec).split('\n'))
self.assertEqual(expected, actual)
def test_multiple_names(self):
spec = """
Name: foo
Version: 1
Release: 2
Name: bar
"""
self._check_output(spec)
def test_multiple_versions(self):
spec = """
Name: foo
Version: 1
Release: 2
Version: 3
"""
self._check_output(spec)
def test_multiple_releases(self):
spec = """
Name: foo
Version: 1
Release: 2
Release: 3
"""
self._check_output(spec)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
aisworld/mailmanclient | src/mailmanclient/tests/test_domain.py | 1 | 1531 | # Copyright (C) 2015 by the Free Software Foundation, Inc.
#
# This file is part of mailman.client.
#
# mailman.client is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, version 3 of the License.
#
# mailman.client is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with mailman.client. If not, see <http://www.gnu.org/licenses/>.
"""Test domain corner cases."""
from __future__ import absolute_import, print_function, unicode_literals
import unittest
from mailmanclient import Client
from six.moves.urllib_error import HTTPError
__metaclass__ = type
__all__ = [
'TestDomains',
]
class TestDomains(unittest.TestCase):
def setUp(self):
self._client = Client(
'http://localhost:9001/3.0', 'restadmin', 'restpass')
def test_no_domain(self):
# Trying to get a non-existent domain returns a 404.
#
# We can't use `with self.assertRaises()` until we drop Python 2.6
try:
self._client.get_domain('example.org')
except HTTPError as error:
self.assertEqual(error.code, 404)
else:
raise AssertionError('Expected HTTPError 404')
| lgpl-3.0 |
zerebubuth/mapnik | plugins/input/csv/build.py | 2 | 2790 | #
# This file is part of Mapnik (c++ mapping toolkit)
#
# Copyright (C) 2015 Artem Pavlenko
#
# Mapnik is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
Import ('env')
can_build = False
if env.get('BOOST_LIB_VERSION_FROM_HEADER'):
boost_version_from_header = int(env['BOOST_LIB_VERSION_FROM_HEADER'].split('_')[1])
if boost_version_from_header >= 56:
can_build = True
if not can_build:
print 'WARNING: skipping building the optional CSV datasource plugin which requires boost >= 1.56'
else:
Import ('plugin_base')
PLUGIN_NAME = 'csv'
plugin_env = plugin_base.Clone()
plugin_sources = Split(
"""
%(PLUGIN_NAME)s_utils.cpp
%(PLUGIN_NAME)s_datasource.cpp
%(PLUGIN_NAME)s_featureset.cpp
%(PLUGIN_NAME)s_inline_featureset.cpp
%(PLUGIN_NAME)s_index_featureset.cpp
""" % locals()
)
# Link Library to Dependencies
libraries = []
libraries.append('mapnik-json')
libraries.append('mapnik-wkt')
if env['PLUGIN_LINKING'] == 'shared':
libraries.append('boost_system%s' % env['BOOST_APPEND'])
libraries.insert(0,env['MAPNIK_NAME'])
libraries.append(env['ICU_LIB_NAME'])
TARGET = plugin_env.SharedLibrary('../%s' % PLUGIN_NAME,
SHLIBPREFIX='',
SHLIBSUFFIX='.input',
source=plugin_sources,
LIBS=libraries)
# if the plugin links to libmapnik ensure it is built first
Depends(TARGET, env.subst('../../../src/%s' % env['MAPNIK_LIB_NAME']))
Depends(TARGET, env.subst('../../../src/json/libmapnik-json${LIBSUFFIX}'))
Depends(TARGET, env.subst('../../../src/wkt/libmapnik-wkt${LIBSUFFIX}'))
if 'uninstall' not in COMMAND_LINE_TARGETS:
env.Install(env['MAPNIK_INPUT_PLUGINS_DEST'], TARGET)
env.Alias('install', env['MAPNIK_INPUT_PLUGINS_DEST'])
plugin_obj = {
'LIBS': libraries,
'SOURCES': plugin_sources,
}
Return('plugin_obj')
| lgpl-2.1 |
benjyw/pants | src/python/pants/bin/pants_runner.py | 4 | 4366 | # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import logging
import os
import sys
import warnings
from dataclasses import dataclass
from typing import List, Mapping
from pants.base.exception_sink import ExceptionSink
from pants.base.exiter import ExitCode
from pants.bin.remote_pants_runner import RemotePantsRunner
from pants.engine.environment import CompleteEnvironment
from pants.init.logging import initialize_stdio, stdio_destination
from pants.init.util import init_workdir
from pants.option.option_value_container import OptionValueContainer
from pants.option.options_bootstrapper import OptionsBootstrapper
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class PantsRunner:
"""A higher-level runner that delegates runs to either a LocalPantsRunner or
RemotePantsRunner."""
args: List[str]
env: Mapping[str, str]
# This could be a bootstrap option, but it's preferable to keep these very limited to make it
# easier to make the daemon the default use case. Once the daemon lifecycle is stable enough we
# should be able to avoid needing to kill it at all.
def will_terminate_pantsd(self) -> bool:
_DAEMON_KILLING_GOALS = frozenset(["kill-pantsd", "clean-all"])
return not frozenset(self.args).isdisjoint(_DAEMON_KILLING_GOALS)
def _should_run_with_pantsd(self, global_bootstrap_options: OptionValueContainer) -> bool:
terminate_pantsd = self.will_terminate_pantsd()
if terminate_pantsd:
logger.debug(f"Pantsd terminating goal detected: {self.args}")
# If we want concurrent pants runs, we can't have pantsd enabled.
return (
global_bootstrap_options.pantsd
and not terminate_pantsd
and not global_bootstrap_options.concurrent
)
@staticmethod
def scrub_pythonpath() -> None:
# Do not propagate any PYTHONPATH that happens to have been set in our environment
# to our subprocesses.
# Note that don't warn (but still scrub) if RUNNING_PANTS_FROM_SOURCES is set. This allows
# scripts that run pants directly from sources, and therefore must set PYTHONPATH, to mute
# this warning.
pythonpath = os.environ.pop("PYTHONPATH", None)
if pythonpath and not os.environ.pop("RUNNING_PANTS_FROM_SOURCES", None):
logger.debug(f"Scrubbed PYTHONPATH={pythonpath} from the environment.")
def run(self, start_time: float) -> ExitCode:
self.scrub_pythonpath()
options_bootstrapper = OptionsBootstrapper.create(
env=self.env, args=self.args, allow_pantsrc=True
)
with warnings.catch_warnings(record=True):
bootstrap_options = options_bootstrapper.bootstrap_options
global_bootstrap_options = bootstrap_options.for_global_scope()
# We enable logging here, and everything before it will be routed through regular
# Python logging.
stdin_fileno = sys.stdin.fileno()
stdout_fileno = sys.stdout.fileno()
stderr_fileno = sys.stderr.fileno()
with initialize_stdio(global_bootstrap_options), stdio_destination(
stdin_fileno=stdin_fileno,
stdout_fileno=stdout_fileno,
stderr_fileno=stderr_fileno,
):
if self._should_run_with_pantsd(global_bootstrap_options):
try:
remote_runner = RemotePantsRunner(self.args, self.env, options_bootstrapper)
return remote_runner.run()
except RemotePantsRunner.Fallback as e:
logger.warning(f"Client exception: {e!r}, falling back to non-daemon mode")
# N.B. Inlining this import speeds up the python thin client run by about 100ms.
from pants.bin.local_pants_runner import LocalPantsRunner
# We only install signal handling via ExceptionSink if the run will execute in this process.
ExceptionSink.install(
log_location=init_workdir(global_bootstrap_options), pantsd_instance=False
)
runner = LocalPantsRunner.create(
env=CompleteEnvironment(self.env), options_bootstrapper=options_bootstrapper
)
return runner.run(start_time)
| apache-2.0 |
nebulans/testfixtures | testfixtures/tests/test_comparison.py | 1 | 20153 | from unittest import TestCase
import sys
from testfixtures import Comparison as C, TempDirectory, compare, diff
from testfixtures.compat import PY2, PY3, exception_module
from testfixtures.shouldraise import ShouldAssert
from testfixtures.tests.sample1 import SampleClassA, a_function
import pytest
class AClass:
def __init__(self, x, y=None):
self.x = x
if y:
self.y = y
def __repr__(self):
return '<'+self.__class__.__name__+'>'
class BClass(AClass):
pass
class WeirdException(Exception):
def __init__(self, x, y):
self.x = x
self.y = y
class X(object):
__slots__ = ['x']
class FussyDefineComparison(object):
def __init__(self, attr):
self.attr = attr
def __eq__(self, other):
if not isinstance(other, self.__class__): # pragma: no cover
raise TypeError()
return False # pragma: no cover
def __ne__(self, other):
return not self == other # pragma: no cover
def compare_repr(obj, expected):
actual = diff(expected, repr(obj))
if actual: # pragma: no cover
raise AssertionError(actual)
class TestC(TestCase):
def test_example(self):
# In this pattern, we want to check a sequence is
# of the correct type and order.
r = a_function()
self.assertEqual(r, (
C('testfixtures.tests.sample1.SampleClassA'),
C('testfixtures.tests.sample1.SampleClassB'),
C('testfixtures.tests.sample1.SampleClassA'),
))
# We also want to check specific parts of some
# of the returned objects' attributes
self.assertEqual(r[0].args[0], 1)
self.assertEqual(r[1].args[0], 2)
self.assertEqual(r[2].args[0], 3)
def test_example_with_object(self):
# Here we see compare an object with a Comparison
# based on an object of the same type and with the
# same attributes:
self.assertEqual(
C(AClass(1, 2)),
AClass(1, 2),
)
# ...even though the original class doesn't support
# meaningful comparison:
self.assertNotEqual(
AClass(1, 2),
AClass(1, 2),
)
def test_example_with_vars(self):
# Here we use a Comparison to make sure both the
# type and attributes of an object are correct.
self.assertEqual(
C('testfixtures.tests.test_comparison.AClass',
x=1, y=2),
AClass(1, 2),
)
def test_example_with_odd_vars(self):
# If the variable names class with parameters to the
# Comparison constructor, they can be specified in a
# dict:
self.assertEqual(
C('testfixtures.tests.test_comparison.AClass',
{'x': 1, 'y': 2}),
AClass(1, 2),
)
def test_example_not_strict(self):
# Here, we only care about the 'x' attribute of
# the AClass object, so we turn strict mode off.
# With strict mode off, only attributes specified
# in the Comparison object will be checked, and
# any others will be ignored.
self.assertEqual(
C('testfixtures.tests.test_comparison.AClass',
x=1,
strict=False),
AClass(1, 2),
)
def test_example_dont_use_c_wrappers_on_both_sides(self):
# NB: don't use C wrappers on both sides!
e = ValueError('some message')
x, y = C(e), C(e)
assert x != y
compare_repr(x, "<C(failed):{mod}.ValueError>wrong type</C>".format(
mod=exception_module))
compare_repr(
y,
"<C:{mod}.ValueError>args: ('some message',)</C>".format(
mod=exception_module)
)
def test_repr_module(self):
compare_repr(C('datetime'), '<C:datetime>')
def test_repr_class(self):
compare_repr(C('testfixtures.tests.sample1.SampleClassA'),
'<C:testfixtures.tests.sample1.SampleClassA>')
def test_repr_function(self):
compare_repr(C('testfixtures.tests.sample1.z'),
'<C:testfixtures.tests.sample1.z>')
def test_repr_instance(self):
compare_repr(C(SampleClassA('something')),
"<C:testfixtures.tests.sample1.SampleClassA>"
"args: ('something',)"
"</C>"
)
def test_repr_exception(self):
compare_repr(C(ValueError('something')),
("<C:{0}.ValueError>args: ('something',)</C>"
).format(exception_module))
def test_repr_exception_not_args(self):
if sys.version_info >= (3, 2, 4):
# in PY3, even args that aren't set still appear to be there
args = "args: (1, 2)\n"
else:
args = "args: ()\n"
compare_repr(
C(WeirdException(1, 2)),
"\n<C:testfixtures.tests.test_comparison.WeirdException>\n"
+ args +
"x: 1\n"
"y: 2\n"
"</C>"
)
def test_repr_class_and_vars(self):
compare_repr(
C(SampleClassA, {'args': (1,)}),
"<C:testfixtures.tests.sample1.SampleClassA>args: (1,)</C>"
)
def test_repr_nested(self):
compare_repr(
C(SampleClassA, y=C(AClass), z=C(BClass(1, 2))),
"\n"
"<C:testfixtures.tests.sample1.SampleClassA>\n"
"y: <C:testfixtures.tests.test_comparison.AClass>\n"
"z: \n"
" <C:testfixtures.tests.test_comparison.BClass>\n"
" x: 1\n"
" y: 2\n"
" </C>\n"
"</C>"
)
def test_repr_failed_wrong_class(self):
c = C('testfixtures.tests.test_comparison.AClass', x=1, y=2)
assert c != BClass(1, 2)
compare_repr(c,
"<C(failed):testfixtures.tests.test_comparison.AClass>"
"wrong type</C>"
)
def test_repr_failed_all_reasons_in_one(self):
c = C('testfixtures.tests.test_comparison.AClass',
y=5, z='missing')
assert c != AClass(1, 2)
compare_repr(c,
"\n"
"<C(failed):testfixtures.tests.test_comparison.AClass>\n"
"attributes in Comparison but not actual:\n"
"'z': 'missing'\n\n"
"attributes in actual but not Comparison:\n"
"'x': 1\n\n"
"attributes differ:\n"
"'y': 5 (Comparison) != 2 (actual)\n"
"</C>",
)
def test_repr_failed_not_in_other(self):
c = C('testfixtures.tests.test_comparison.AClass',
x=1, y=2, z=(3, ))
assert c != AClass(1, 2)
compare_repr(c ,
"\n"
"<C(failed):testfixtures.tests.test_comparison.AClass>\n"
"attributes same:\n"
"['x', 'y']\n\n"
"attributes in Comparison but not actual:\n"
"'z': (3,)\n"
"</C>",
)
def test_repr_failed_not_in_self_strict(self):
c = C('testfixtures.tests.test_comparison.AClass', y=2)
assert c != AClass((1, ), 2)
compare_repr(c,
"\n"
"<C(failed):testfixtures.tests.test_comparison.AClass>\n"
"attributes same:\n"
"['y']\n\n"
"attributes in actual but not Comparison:\n"
"'x': (1,)\n"
"</C>",
)
def test_repr_failed_not_in_self_not_strict(self):
c = C('testfixtures.tests.test_comparison.AClass',
x=1, y=2, z=(3, ))
assert c != AClass(1, 2)
compare_repr(c,
"\n"
"<C(failed):testfixtures.tests.test_comparison.AClass>\n"
"attributes same:\n"
"['x', 'y']\n\n"
"attributes in Comparison but not actual:\n"
"'z': (3,)\n"
"</C>",
)
def test_repr_failed_one_attribute_not_equal(self):
c = C('testfixtures.tests.test_comparison.AClass', x=1, y=(2, ))
assert c != AClass(1, (3, ))
compare_repr(c,
"\n"
"<C(failed):testfixtures.tests.test_comparison.AClass>\n"
"attributes same:\n"
"['x']\n\n"
"attributes differ:\n"
"'y': (2,) (Comparison) != (3,) (actual)\n"
"</C>",
)
def test_repr_failed_nested(self):
left_side = [C(AClass, x=1, y=2),
C(BClass, x=C(AClass, x=1, y=2), y=C(AClass))]
right_side = [AClass(1, 3), AClass(1, 3)]
# do the comparison
left_side == right_side
compare_repr(
left_side,
"[\n"
"<C(failed):testfixtures.tests.test_comparison.AClass>\n"
"attributes same:\n"
"['x']\n\n"
"attributes differ:\n"
"'y': 2 (Comparison) != 3 (actual)\n"
"</C>, \n"
"<C:testfixtures.tests.test_comparison.BClass>\n"
"x: \n"
" <C:testfixtures.tests.test_comparison.AClass>\n"
" x: 1\n"
" y: 2\n"
" </C>\n"
"y: <C:testfixtures.tests.test_comparison.AClass>\n"
"</C>]"
)
compare_repr(right_side, "[<AClass>, <AClass>]")
def test_repr_failed_nested_failed(self):
left_side = [C(AClass, x=1, y=2),
C(BClass,
x=C(AClass, x=1, strict=False),
y=C(AClass, z=2))]
right_side = [AClass(1, 2),
BClass(AClass(1, 2), AClass(1, 2))]
# do the comparison
left_side == right_side
compare_repr(
left_side,
"[\n"
"<C:testfixtures.tests.test_comparison.AClass>\n"
"x: 1\n"
"y: 2\n"
"</C>, \n"
"<C(failed):testfixtures.tests.test_comparison.BClass>\n"
"attributes same:\n"
"['x']\n\n"
"attributes differ:\n"
"'y': \n"
"<C(failed):testfixtures.tests.test_comparison.AClass>\n"
"attributes in Comparison but not actual:\n"
"'z': 2\n\n"
"attributes in actual but not Comparison:\n"
"'x': 1\n"
"'y': 2\n"
"</C> (Comparison) != <AClass> (actual)\n"
"</C>]",
)
compare_repr(right_side, '[<AClass>, <BClass>]')
def test_repr_failed_passed_failed(self):
c = C('testfixtures.tests.test_comparison.AClass', x=1, y=2)
assert c != AClass(1, 3)
compare_repr(c,
"\n"
"<C(failed):testfixtures.tests.test_comparison.AClass>\n"
"attributes same:\n"
"['x']\n\n"
"attributes differ:\n"
"'y': 2 (Comparison) != 3 (actual)\n"
"</C>",
)
assert c == AClass(1, 2)
assert c != AClass(3, 2)
compare_repr(c,
"\n"
"<C(failed):testfixtures.tests.test_comparison.AClass>\n"
"attributes same:\n"
"['y']\n\n"
"attributes differ:\n"
"'x': 1 (Comparison) != 3 (actual)\n"
"</C>",
)
def test_first(self):
self.assertEqual(
C('testfixtures.tests.sample1.SampleClassA'),
SampleClassA()
)
def test_second(self):
self.assertEqual(
SampleClassA(),
C('testfixtures.tests.sample1.SampleClassA'),
)
def test_not_same_first(self):
self.assertNotEqual(
C('datetime'),
SampleClassA()
)
def test_not_same_second(self):
self.assertNotEqual(
SampleClassA(),
C('datetime')
)
def test_object_supplied(self):
self.assertEqual(
SampleClassA(1),
C(SampleClassA(1))
)
def test_class_and_vars(self):
self.assertEqual(
SampleClassA(1),
C(SampleClassA, {'args': (1,)})
)
def test_class_and_kw(self):
self.assertEqual(
SampleClassA(1),
C(SampleClassA, args=(1,))
)
def test_class_and_vars_and_kw(self):
self.assertEqual(
AClass(1, 2),
C(AClass, {'x': 1}, y=2)
)
def test_object_and_vars(self):
# vars passed are used instead of the object's
self.assertEqual(
SampleClassA(1),
C(SampleClassA(), {'args': (1,)})
)
def test_object_and_kw(self):
# kws passed are used instead of the object's
self.assertEqual(
SampleClassA(1),
C(SampleClassA(), args=(1,))
)
def test_object_not_strict(self):
# only attributes on comparison object
# are used
self.assertEqual(
C(AClass(1), strict=False),
AClass(1, 2),
)
def test_exception(self):
self.assertEqual(
ValueError('foo'),
C(ValueError('foo'))
)
def test_exception_class_and_args(self):
self.assertEqual(
ValueError('foo'),
C(ValueError, args=('foo', ))
)
def test_exception_instance_and_args(self):
self.assertEqual(
ValueError('foo'),
C(ValueError('bar'), args=('foo', ))
)
def test_exception_not_same(self):
self.assertNotEqual(
ValueError('foo'),
C(ValueError('bar'))
)
def test_exception_no_args_different(self):
self.assertNotEqual(
WeirdException(1, 2),
C(WeirdException(1, 3))
)
def test_exception_no_args_same(self):
self.assertEqual(
C(WeirdException(1, 2)),
WeirdException(1, 2)
)
def test_repr_file_different(self):
with TempDirectory() as d:
path = d.write('file', b'stuff')
f = open(path)
f.close()
if PY3:
c = C('io.TextIOWrapper', name=path, mode='r', closed=False,
strict=False)
assert f != c
compare_repr(c,
"\n"
"<C(failed):_io.TextIOWrapper>\n"
"attributes same:\n"
"['mode', 'name']\n\n"
"attributes differ:\n"
"'closed': False (Comparison) != True (actual)\n"
"</C>",
)
else:
c = C(file, name=path, mode='r', closed=False, strict=False)
assert f != c
compare_repr(c,
"\n"
"<C(failed):__builtin__.file>\n"
"attributes same:\n"
"['mode', 'name']\n\n"
"attributes differ:\n"
"'closed': False (Comparison) != True (actual)\n"
"</C>",
)
def test_file_same(self):
with TempDirectory() as d:
path = d.write('file', b'stuff')
f = open(path)
f.close()
if PY3:
self.assertEqual(
f,
C('io.TextIOWrapper', name=path, mode='r', closed=True,
strict=False)
)
else:
self.assertEqual(
f,
C(file, name=path, mode='r', closed=True, strict=False)
)
def test_no___dict___strict(self):
c = C(X, x=1)
assert c != X()
compare_repr(c, "\n"
"<C(failed):testfixtures.tests.test_comparison.X>\n"
"attributes in Comparison but not actual:\n"
"'x': 1\n"
"</C>")
def test_no___dict___not_strict_same(self):
x = X()
x.x = 1
self.assertEqual(C(X, x=1, strict=False), x)
def test_no___dict___not_strict_missing_attr(self):
c = C(X, x=1, strict=False)
assert c != X()
compare_repr(c,
"\n"
"<C(failed):testfixtures.tests.test_comparison.X>\n"
"attributes in Comparison but not actual:\n"
"'x': 1\n"
"</C>",
)
def test_no___dict___not_strict_different(self):
x = X()
x.x = 2
c = C(X, x=1, y=2, strict=False)
assert c != x
compare_repr(c,
"\n"
"<C(failed):testfixtures.tests.test_comparison.X>\n"
"attributes in Comparison but not actual:\n"
"'y': 2\n\n"
"attributes differ:\n"
"'x': 1 (Comparison) != 2 (actual)\n"
"</C>",
)
def test_compared_object_defines_eq(self):
# If an object defines eq, such as Django instances,
# things become tricky
class Annoying:
def __init__(self):
self.eq_called = 0
def __eq__(self, other):
self.eq_called += 1
if isinstance(other, Annoying):
return True
return False
self.assertEqual(Annoying(), Annoying())
# Suddenly, order matters.
# This order is wrong, as it uses the class's __eq__:
self.assertFalse(Annoying() == C(Annoying))
if PY2:
# although this, which is subtly different, does not:
self.assertFalse(Annoying() != C(Annoying))
else:
# but on PY3 __eq__ is used as a fallback:
self.assertTrue(Annoying() != C(Annoying))
# This is the right ordering:
self.assertTrue(C(Annoying) == Annoying())
self.assertFalse(C(Annoying) != Annoying())
# When the ordering is right, you still get the useful
# comparison representation afterwards
c = C(Annoying, eq_called=1)
c == Annoying()
compare_repr(
c,
'\n<C(failed):testfixtures.tests.test_comparison.Annoying>\n'
'attributes differ:\n'
"'eq_called': 1 (Comparison) != 0 (actual)\n"
'</C>'
)
def test_importerror(self):
assert C(ImportError('x')) == ImportError('x')
def test_class_defines_comparison_strictly(self):
self.assertEqual(
C('testfixtures.tests.test_comparison.FussyDefineComparison',
attr=1),
FussyDefineComparison(1)
)
def test_cant_resolve(self):
try:
C('testfixtures.bonkers')
except Exception as e:
self.failUnless(isinstance(e, AttributeError))
self.assertEqual(
e.args,
("'testfixtures.bonkers' could not be resolved", )
)
else:
self.fail('No exception raised!')
def test_no_name(self):
class NoName(object):
pass
NoName.__name__ = ''
NoName.__module__ = ''
c = C(NoName)
if PY3:
expected = "<C:<class '.TestC.test_no_name.<locals>.NoName'>>"
else:
expected = "<C:<class '.'>>"
self.assertEqual(repr(c), expected)
| mit |
pupboss/xndian | deploy/site-packages/pip/cmdoptions.py | 361 | 9507 | """
shared options and groups
The principle here is to define options once, but *not* instantiate them globally.
One reason being that options with action='append' can carry state between parses.
pip parse's general options twice internally, and shouldn't pass on state.
To be consistent, all options will follow this design.
"""
import copy
from optparse import OptionGroup, SUPPRESS_HELP, Option
from pip.locations import build_prefix, default_log_file
def make_option_group(group, parser):
"""
Return an OptionGroup object
group -- assumed to be dict with 'name' and 'options' keys
parser -- an optparse Parser
"""
option_group = OptionGroup(parser, group['name'])
for option in group['options']:
option_group.add_option(option.make())
return option_group
class OptionMaker(object):
"""Class that stores the args/kwargs that would be used to make an Option,
for making them later, and uses deepcopy's to reset state."""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def make(self):
args_copy = copy.deepcopy(self.args)
kwargs_copy = copy.deepcopy(self.kwargs)
return Option(*args_copy, **kwargs_copy)
###########
# options #
###########
help_ = OptionMaker(
'-h', '--help',
dest='help',
action='help',
help='Show help.')
require_virtualenv = OptionMaker(
# Run only if inside a virtualenv, bail if not.
'--require-virtualenv', '--require-venv',
dest='require_venv',
action='store_true',
default=False,
help=SUPPRESS_HELP)
verbose = OptionMaker(
'-v', '--verbose',
dest='verbose',
action='count',
default=0,
help='Give more output. Option is additive, and can be used up to 3 times.')
version = OptionMaker(
'-V', '--version',
dest='version',
action='store_true',
help='Show version and exit.')
quiet = OptionMaker(
'-q', '--quiet',
dest='quiet',
action='count',
default=0,
help='Give less output.')
log = OptionMaker(
'--log',
dest='log',
metavar='path',
help='Path to a verbose appending log. This log is inactive by default.')
log_explicit_levels = OptionMaker(
# Writes the log levels explicitely to the log'
'--log-explicit-levels',
dest='log_explicit_levels',
action='store_true',
default=False,
help=SUPPRESS_HELP)
log_file = OptionMaker(
# The default log file
'--log-file', '--local-log',
dest='log_file',
metavar='path',
default=default_log_file,
help='Path to a verbose non-appending log, that only logs failures. This log is active by default at %default.')
no_input = OptionMaker(
# Don't ask for input
'--no-input',
dest='no_input',
action='store_true',
default=False,
help=SUPPRESS_HELP)
proxy = OptionMaker(
'--proxy',
dest='proxy',
type='str',
default='',
help="Specify a proxy in the form [user:passwd@]proxy.server:port.")
timeout = OptionMaker(
'--timeout', '--default-timeout',
metavar='sec',
dest='timeout',
type='float',
default=15,
help='Set the socket timeout (default %default seconds).')
default_vcs = OptionMaker(
# The default version control system for editables, e.g. 'svn'
'--default-vcs',
dest='default_vcs',
type='str',
default='',
help=SUPPRESS_HELP)
skip_requirements_regex = OptionMaker(
# A regex to be used to skip requirements
'--skip-requirements-regex',
dest='skip_requirements_regex',
type='str',
default='',
help=SUPPRESS_HELP)
exists_action = OptionMaker(
# Option when path already exist
'--exists-action',
dest='exists_action',
type='choice',
choices=['s', 'i', 'w', 'b'],
default=[],
action='append',
metavar='action',
help="Default action when a path already exists: "
"(s)witch, (i)gnore, (w)ipe, (b)ackup.")
cert = OptionMaker(
'--cert',
dest='cert',
type='str',
default='',
metavar='path',
help = "Path to alternate CA bundle.")
index_url = OptionMaker(
'-i', '--index-url', '--pypi-url',
dest='index_url',
metavar='URL',
default='https://pypi.python.org/simple/',
help='Base URL of Python Package Index (default %default).')
extra_index_url = OptionMaker(
'--extra-index-url',
dest='extra_index_urls',
metavar='URL',
action='append',
default=[],
help='Extra URLs of package indexes to use in addition to --index-url.')
no_index = OptionMaker(
'--no-index',
dest='no_index',
action='store_true',
default=False,
help='Ignore package index (only looking at --find-links URLs instead).')
find_links = OptionMaker(
'-f', '--find-links',
dest='find_links',
action='append',
default=[],
metavar='url',
help="If a url or path to an html file, then parse for links to archives. If a local path or file:// url that's a directory, then look for archives in the directory listing.")
# TODO: Remove after 1.6
use_mirrors = OptionMaker(
'-M', '--use-mirrors',
dest='use_mirrors',
action='store_true',
default=False,
help=SUPPRESS_HELP)
# TODO: Remove after 1.6
mirrors = OptionMaker(
'--mirrors',
dest='mirrors',
metavar='URL',
action='append',
default=[],
help=SUPPRESS_HELP)
allow_external = OptionMaker(
"--allow-external",
dest="allow_external",
action="append",
default=[],
metavar="PACKAGE",
help="Allow the installation of externally hosted files",
)
allow_all_external = OptionMaker(
"--allow-all-external",
dest="allow_all_external",
action="store_true",
default=False,
help="Allow the installation of all externally hosted files",
)
# Remove after 1.7
no_allow_external = OptionMaker(
"--no-allow-external",
dest="allow_all_external",
action="store_false",
default=False,
help=SUPPRESS_HELP,
)
# Remove --allow-insecure after 1.7
allow_unsafe = OptionMaker(
"--allow-unverified", "--allow-insecure",
dest="allow_unverified",
action="append",
default=[],
metavar="PACKAGE",
help="Allow the installation of insecure and unverifiable files",
)
# Remove after 1.7
no_allow_unsafe = OptionMaker(
"--no-allow-insecure",
dest="allow_all_insecure",
action="store_false",
default=False,
help=SUPPRESS_HELP
)
# Remove after 1.5
process_dependency_links = OptionMaker(
"--process-dependency-links",
dest="process_dependency_links",
action="store_true",
default=False,
help="Enable the processing of dependency links.",
)
requirements = OptionMaker(
'-r', '--requirement',
dest='requirements',
action='append',
default=[],
metavar='file',
help='Install from the given requirements file. '
'This option can be used multiple times.')
use_wheel = OptionMaker(
'--use-wheel',
dest='use_wheel',
action='store_true',
help=SUPPRESS_HELP,
)
no_use_wheel = OptionMaker(
'--no-use-wheel',
dest='use_wheel',
action='store_false',
default=True,
help=('Do not Find and prefer wheel archives when searching indexes and '
'find-links locations.'),
)
download_cache = OptionMaker(
'--download-cache',
dest='download_cache',
metavar='dir',
default=None,
help='Cache downloaded packages in <dir>.')
no_deps = OptionMaker(
'--no-deps', '--no-dependencies',
dest='ignore_dependencies',
action='store_true',
default=False,
help="Don't install package dependencies.")
build_dir = OptionMaker(
'-b', '--build', '--build-dir', '--build-directory',
dest='build_dir',
metavar='dir',
default=build_prefix,
help='Directory to unpack packages into and build in. '
'The default in a virtualenv is "<venv path>/build". '
'The default for global installs is "<OS temp dir>/pip_build_<username>".')
install_options = OptionMaker(
'--install-option',
dest='install_options',
action='append',
metavar='options',
help="Extra arguments to be supplied to the setup.py install "
"command (use like --install-option=\"--install-scripts=/usr/local/bin\"). "
"Use multiple --install-option options to pass multiple options to setup.py install. "
"If you are using an option with a directory path, be sure to use absolute path.")
global_options = OptionMaker(
'--global-option',
dest='global_options',
action='append',
metavar='options',
help="Extra global options to be supplied to the setup.py "
"call before the install command.")
no_clean = OptionMaker(
'--no-clean',
action='store_true',
default=False,
help="Don't clean up build directories.")
##########
# groups #
##########
general_group = {
'name': 'General Options',
'options': [
help_,
require_virtualenv,
verbose,
version,
quiet,
log_file,
log,
log_explicit_levels,
no_input,
proxy,
timeout,
default_vcs,
skip_requirements_regex,
exists_action,
cert,
]
}
index_group = {
'name': 'Package Index Options',
'options': [
index_url,
extra_index_url,
no_index,
find_links,
use_mirrors,
mirrors,
allow_external,
allow_all_external,
no_allow_external,
allow_unsafe,
no_allow_unsafe,
process_dependency_links,
]
}
| mit |
Justin-Yuan/Image2Music-Generator | library/jython2.5.3/Lib/test/test_sax.py | 9 | 21680 | # -*- coding: iso-8859-1 -*-
# regression test for SAX 2.0
# $Id: test_sax.py,v 1.13 2004/03/20 07:46:04 fdrake Exp $
import urllib
from xml.sax import handler, make_parser, ContentHandler, \
SAXException, SAXReaderNotAvailable, SAXParseException
try:
make_parser()
except SAXReaderNotAvailable:
# don't try to test this module if we cannot create a parser
raise ImportError("no XML parsers available")
from xml.sax.saxutils import XMLGenerator, escape, unescape, quoteattr, \
XMLFilterBase, Location
from xml.sax.xmlreader import InputSource, AttributesImpl, AttributesNSImpl
from cStringIO import StringIO
from test.test_support import is_jython, verbose, TestFailed, findfile
# ===== Utilities
tests = 0
failures = []
def confirm(outcome, name):
global tests
tests = tests + 1
if outcome:
if verbose:
print "Passed", name
else:
print "Failed", name
failures.append(name)
def test_make_parser2():
try:
# Creating parsers several times in a row should succeed.
# Testing this because there have been failures of this kind
# before.
from xml.sax import make_parser
p = make_parser()
from xml.sax import make_parser
p = make_parser()
from xml.sax import make_parser
p = make_parser()
from xml.sax import make_parser
p = make_parser()
from xml.sax import make_parser
p = make_parser()
from xml.sax import make_parser
p = make_parser()
except:
return 0
else:
return p
# ===========================================================================
#
# saxutils tests
#
# ===========================================================================
# ===== escape
def test_escape_basic():
return escape("Donald Duck & Co") == "Donald Duck & Co"
def test_escape_all():
return escape("<Donald Duck & Co>") == "<Donald Duck & Co>"
def test_escape_extra():
return escape("Hei på deg", {"å" : "å"}) == "Hei på deg"
# ===== unescape
def test_unescape_basic():
return unescape("Donald Duck & Co") == "Donald Duck & Co"
def test_unescape_all():
return unescape("<Donald Duck & Co>") == "<Donald Duck & Co>"
def test_unescape_extra():
return unescape("Hei på deg", {"å" : "å"}) == "Hei på deg"
def test_unescape_amp_extra():
return unescape("&foo;", {"&foo;": "splat"}) == "&foo;"
# ===== quoteattr
def test_quoteattr_basic():
return quoteattr("Donald Duck & Co") == '"Donald Duck & Co"'
def test_single_quoteattr():
return (quoteattr('Includes "double" quotes')
== '\'Includes "double" quotes\'')
def test_double_quoteattr():
return (quoteattr("Includes 'single' quotes")
== "\"Includes 'single' quotes\"")
def test_single_double_quoteattr():
return (quoteattr("Includes 'single' and \"double\" quotes")
== "\"Includes 'single' and "double" quotes\"")
# ===== make_parser
def test_make_parser():
try:
# Creating a parser should succeed - it should fall back
# to the expatreader
p = make_parser(['xml.parsers.no_such_parser'])
except:
return 0
else:
return p
# ===== XMLGenerator
start = '<?xml version="1.0" encoding="iso-8859-1"?>\n'
def test_xmlgen_basic():
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {})
gen.endElement("doc")
gen.endDocument()
return result.getvalue() == start + "<doc></doc>"
def test_xmlgen_content():
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {})
gen.characters("huhei")
gen.endElement("doc")
gen.endDocument()
return result.getvalue() == start + "<doc>huhei</doc>"
def test_xmlgen_escaped_content():
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {})
gen.characters(unicode("\xa0\\u3042", "unicode-escape"))
gen.endElement("doc")
gen.endDocument()
return result.getvalue() == start + "<doc>\xa0あ</doc>"
def test_xmlgen_escaped_attr():
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {"x": unicode("\\u3042", "unicode-escape")})
gen.endElement("doc")
gen.endDocument()
return result.getvalue() == start + '<doc x="あ"></doc>'
def test_xmlgen_pi():
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.processingInstruction("test", "data")
gen.startElement("doc", {})
gen.endElement("doc")
gen.endDocument()
return result.getvalue() == start + "<?test data?><doc></doc>"
def test_xmlgen_content_escape():
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {})
gen.characters("<huhei&")
gen.endElement("doc")
gen.endDocument()
return result.getvalue() == start + "<doc><huhei&</doc>"
def test_xmlgen_attr_escape():
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {"a": '"'})
gen.startElement("e", {"a": "'"})
gen.endElement("e")
gen.startElement("e", {"a": "'\""})
gen.endElement("e")
gen.endElement("doc")
gen.endDocument()
return result.getvalue() == start \
+ "<doc a='\"'><e a=\"'\"></e><e a=\"'"\"></e></doc>"
def test_xmlgen_attr_escape_manydouble():
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {"a": '"\'"'})
gen.endElement("doc")
gen.endDocument()
return result.getvalue() == start + "<doc a='\"'\"'></doc>"
def test_xmlgen_attr_escape_manysingle():
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {"a": "'\"'"})
gen.endElement("doc")
gen.endDocument()
return result.getvalue() == start + '<doc a="\'"\'"></doc>'
def test_xmlgen_ignorable():
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startElement("doc", {})
gen.ignorableWhitespace(" ")
gen.endElement("doc")
gen.endDocument()
return result.getvalue() == start + "<doc> </doc>"
ns_uri = "http://www.python.org/xml-ns/saxtest/"
def test_xmlgen_ns():
result = StringIO()
gen = XMLGenerator(result)
gen.startDocument()
gen.startPrefixMapping("ns1", ns_uri)
gen.startElementNS((ns_uri, "doc"), "ns1:doc", {})
# add an unqualified name
gen.startElementNS((None, "udoc"), None, {})
gen.endElementNS((None, "udoc"), None)
gen.endElementNS((ns_uri, "doc"), "ns1:doc")
gen.endPrefixMapping("ns1")
gen.endDocument()
return result.getvalue() == start + \
('<ns1:doc xmlns:ns1="%s"><udoc></udoc></ns1:doc>' %
ns_uri)
# ===== XMLFilterBase
def test_filter_basic():
result = StringIO()
gen = XMLGenerator(result)
filter = XMLFilterBase()
filter.setContentHandler(gen)
filter.startDocument()
filter.startElement("doc", {})
filter.characters("content")
filter.ignorableWhitespace(" ")
filter.endElement("doc")
filter.endDocument()
return result.getvalue() == start + "<doc>content </doc>"
# ===========================================================================
#
# expatreader tests
#
# ===========================================================================
# ===== XMLReader support
def test_expat_file():
parser = make_parser()
result = StringIO()
xmlgen = XMLGenerator(result)
parser.setContentHandler(xmlgen)
parser.parse(open(findfile("test.xml")))
return result.getvalue() == xml_test_out
# ===== DTDHandler support
class TestDTDHandler:
def __init__(self):
self._notations = []
self._entities = []
def notationDecl(self, name, publicId, systemId):
self._notations.append((name, publicId, systemId))
def unparsedEntityDecl(self, name, publicId, systemId, ndata):
self._entities.append((name, publicId, systemId, ndata))
def test_expat_dtdhandler():
parser = make_parser()
handler = TestDTDHandler()
parser.setDTDHandler(handler)
parser.parse(StringIO('''<!DOCTYPE doc [
<!ENTITY img SYSTEM "expat.gif" NDATA GIF>
<!NOTATION GIF PUBLIC "-//CompuServe//NOTATION Graphics Interchange Format 89a//EN">
]>
<doc></doc>'''))
if len(handler._entities) != 1 or len(handler._entities[0]) != 4:
return 0
name, pubId, sysId, ndata = handler._entities[0]
if name != 'img' or not pubId is None or not sysId.endswith('expat.gif') or ndata != 'GIF':
return 0
return handler._notations == [("GIF", "-//CompuServe//NOTATION Graphics Interchange Format 89a//EN", None)]
# ===== EntityResolver support
class TestEntityResolver:
def resolveEntity(self, publicId, systemId):
inpsrc = InputSource()
inpsrc.setByteStream(StringIO("<entity/>"))
return inpsrc
def test_expat_entityresolver():
parser = make_parser()
parser.setEntityResolver(TestEntityResolver())
result = StringIO()
parser.setContentHandler(XMLGenerator(result))
parser.parse(StringIO('''<!DOCTYPE doc [
<!ENTITY test SYSTEM "whatever">
]>
<doc>&test;</doc>'''))
return result.getvalue() == start + "<doc><entity></entity></doc>"
# ===== Attributes support
class AttrGatherer(ContentHandler):
def startElement(self, name, attrs):
self._attrs = attrs
def startElementNS(self, name, qname, attrs):
self._attrs = attrs
def test_expat_attrs_empty():
parser = make_parser()
gather = AttrGatherer()
parser.setContentHandler(gather)
parser.parse(StringIO("<doc/>"))
return verify_empty_attrs(gather._attrs)
def test_expat_attrs_wattr():
parser = make_parser()
gather = AttrGatherer()
parser.setContentHandler(gather)
parser.parse(StringIO("<doc attr='val'/>"))
return verify_attrs_wattr(gather._attrs)
def test_expat_nsattrs_empty():
parser = make_parser()
parser.setFeature(handler.feature_namespaces, 1)
gather = AttrGatherer()
parser.setContentHandler(gather)
parser.parse(StringIO("<doc/>"))
return verify_empty_nsattrs(gather._attrs)
def test_expat_nsattrs_wattr():
parser = make_parser()
parser.setFeature(handler.feature_namespaces, 1)
gather = AttrGatherer()
parser.setContentHandler(gather)
a_name = "id" ; a_val = "val"
parser.parse(StringIO("<doc xmlns:ns='%s' ns:%s='%s'/>" % (ns_uri, a_name, a_val) ))
attrs = gather._attrs
return attrs.getLength() == 1 and \
attrs.getNames() == [(ns_uri, a_name)] and \
attrs.getQNames() == ["ns:%s" % a_name] and \
len(attrs) == 1 and \
attrs.has_key((ns_uri, a_name)) and \
attrs.keys() == [(ns_uri, a_name)] and \
attrs.get((ns_uri, a_name)) == a_val and \
attrs.get((ns_uri, a_name), 25) == a_val and \
attrs.items() == [((ns_uri, a_name), a_val)] and \
attrs.values() == [a_val] and \
attrs.getValue((ns_uri, a_name)) == a_val and \
attrs[(ns_uri, a_name)] == a_val
def test_expat_nsattrs_no_namespace():
parser = make_parser()
parser.setFeature(handler.feature_namespaces, 1)
gather = AttrGatherer()
parser.setContentHandler(gather)
a_name = "id" ; a_val = "val"
parser.parse(StringIO("<doc %s='%s'/>" % (a_name, a_val) ))
attrs = gather._attrs
return attrs.getLength() == 1 and \
attrs.getNames() == [(None, a_name)] and \
attrs.getQNames() == [a_name] and \
len(attrs) == 1 and \
attrs.has_key((None, a_name)) and \
attrs.keys() == [(None, a_name)] and \
attrs.get((None, a_name)) == a_val and \
attrs.get((None, a_name), 25) == a_val and \
attrs.items() == [((None, a_name), a_val)] and \
attrs.values() == [a_val] and \
attrs.getValue((None, a_name)) == a_val and \
attrs[(None, a_name)] == a_val
# ===== InputSource support
xml_test_out = open(findfile("test.xml.out")).read()
def test_expat_inpsource_filename():
parser = make_parser()
result = StringIO()
xmlgen = XMLGenerator(result)
parser.setContentHandler(xmlgen)
parser.parse(findfile("test.xml"))
return result.getvalue() == xml_test_out
def test_expat_inpsource_sysid():
parser = make_parser()
result = StringIO()
xmlgen = XMLGenerator(result)
parser.setContentHandler(xmlgen)
parser.parse(InputSource(findfile("test.xml")))
return result.getvalue() == xml_test_out
def test_expat_inpsource_stream():
parser = make_parser()
result = StringIO()
xmlgen = XMLGenerator(result)
parser.setContentHandler(xmlgen)
inpsrc = InputSource()
inpsrc.setByteStream(open(findfile("test.xml")))
parser.parse(inpsrc)
return result.getvalue() == xml_test_out
# ===== Locator support
class LocatorTest(XMLGenerator):
def __init__(self, out=None, encoding="iso-8859-1"):
XMLGenerator.__init__(self, out, encoding)
self.location = None
def setDocumentLocator(self, locator):
XMLGenerator.setDocumentLocator(self, locator)
self.location = Location(self._locator)
def test_expat_locator_noinfo():
result = StringIO()
xmlgen = LocatorTest(result)
parser = make_parser()
parser.setContentHandler(xmlgen)
parser.parse(StringIO("<doc></doc>"))
return xmlgen.location.getSystemId() is None and \
xmlgen.location.getPublicId() is None and \
xmlgen.location.getLineNumber() == 1
def test_expat_locator_withinfo():
result = StringIO()
xmlgen = LocatorTest(result)
parser = make_parser()
parser.setContentHandler(xmlgen)
testfile = findfile("test.xml")
parser.parse(testfile)
if is_jython:
# In Jython, the system id is a URL with forward slashes, and
# under Windows findfile returns a path with backslashes, so
# replace the backslashes with forward
testfile = testfile.replace('\\', '/')
# urllib.quote isn't the exact encoder (e.g. ':' isn't escaped)
expected = urllib.quote(testfile).replace('%3A', ':')
return xmlgen.location.getSystemId().endswith(expected) and \
xmlgen.location.getPublicId() is None
# ===========================================================================
#
# error reporting
#
# ===========================================================================
def test_expat_incomplete():
parser = make_parser()
parser.setContentHandler(ContentHandler()) # do nothing
try:
parser.parse(StringIO("<foo>"))
except SAXParseException:
return 1 # ok, error found
else:
return 0
def test_sax_location_str():
# pass various values from a locator to the SAXParseException to
# make sure that the __str__() doesn't fall apart when None is
# passed instead of an integer line and column number
#
# use "normal" values for the locator:
str(Location(DummyLocator(1, 1)))
# use None for the line number:
str(Location(DummyLocator(None, 1)))
# use None for the column number:
str(Location(DummyLocator(1, None)))
# use None for both:
str(Location(DummyLocator(None, None)))
return 1
def test_sax_parse_exception_str():
# pass various values from a locator to the SAXParseException to
# make sure that the __str__() doesn't fall apart when None is
# passed instead of an integer line and column number
#
# use "normal" values for the locator:
str(SAXParseException("message", None,
DummyLocator(1, 1)))
# use None for the line number:
str(SAXParseException("message", None,
DummyLocator(None, 1)))
# use None for the column number:
str(SAXParseException("message", None,
DummyLocator(1, None)))
# use None for both:
str(SAXParseException("message", None,
DummyLocator(None, None)))
return 1
class DummyLocator:
def __init__(self, lineno, colno):
self._lineno = lineno
self._colno = colno
def getPublicId(self):
return "pubid"
def getSystemId(self):
return "sysid"
def getLineNumber(self):
return self._lineno
def getColumnNumber(self):
return self._colno
# ===========================================================================
#
# xmlreader tests
#
# ===========================================================================
# ===== AttributesImpl
def verify_empty_attrs(attrs):
try:
attrs.getValue("attr")
gvk = 0
except KeyError:
gvk = 1
try:
attrs.getValueByQName("attr")
gvqk = 0
except KeyError:
gvqk = 1
try:
attrs.getNameByQName("attr")
gnqk = 0
except KeyError:
gnqk = 1
try:
attrs.getQNameByName("attr")
gqnk = 0
except KeyError:
gqnk = 1
try:
attrs["attr"]
gik = 0
except KeyError:
gik = 1
return attrs.getLength() == 0 and \
attrs.getNames() == [] and \
attrs.getQNames() == [] and \
len(attrs) == 0 and \
not attrs.has_key("attr") and \
attrs.keys() == [] and \
attrs.get("attrs") is None and \
attrs.get("attrs", 25) == 25 and \
attrs.items() == [] and \
attrs.values() == [] and \
gvk and gvqk and gnqk and gik and gqnk
def verify_attrs_wattr(attrs):
return attrs.getLength() == 1 and \
attrs.getNames() == ["attr"] and \
attrs.getQNames() == ["attr"] and \
len(attrs) == 1 and \
attrs.has_key("attr") and \
attrs.keys() == ["attr"] and \
attrs.get("attr") == "val" and \
attrs.get("attr", 25) == "val" and \
attrs.items() == [("attr", "val")] and \
attrs.values() == ["val"] and \
attrs.getValue("attr") == "val" and \
attrs.getValueByQName("attr") == "val" and \
attrs.getNameByQName("attr") == "attr" and \
attrs["attr"] == "val" and \
attrs.getQNameByName("attr") == "attr"
def test_attrs_empty():
return verify_empty_attrs(AttributesImpl({}))
def test_attrs_wattr():
return verify_attrs_wattr(AttributesImpl({"attr" : "val"}))
# ===== AttributesImpl
def verify_empty_nsattrs(attrs):
try:
attrs.getValue((ns_uri, "attr"))
gvk = 0
except KeyError:
gvk = 1
try:
attrs.getValueByQName("ns:attr")
gvqk = 0
except KeyError:
gvqk = 1
try:
attrs.getNameByQName("ns:attr")
gnqk = 0
except KeyError:
gnqk = 1
try:
attrs.getQNameByName((ns_uri, "attr"))
gqnk = 0
except KeyError:
gqnk = 1
try:
attrs[(ns_uri, "attr")]
gik = 0
except KeyError:
gik = 1
return attrs.getLength() == 0 and \
attrs.getNames() == [] and \
attrs.getQNames() == [] and \
len(attrs) == 0 and \
not attrs.has_key((ns_uri, "attr")) and \
attrs.keys() == [] and \
attrs.get((ns_uri, "attr")) is None and \
attrs.get((ns_uri, "attr"), 25) == 25 and \
attrs.items() == [] and \
attrs.values() == [] and \
gvk and gvqk and gnqk and gik and gqnk
def test_nsattrs_empty():
return verify_empty_nsattrs(AttributesNSImpl({}, {}))
def test_nsattrs_wattr():
attrs = AttributesNSImpl({(ns_uri, "attr") : "val"},
{(ns_uri, "attr") : "ns:attr"})
return attrs.getLength() == 1 and \
attrs.getNames() == [(ns_uri, "attr")] and \
attrs.getQNames() == ["ns:attr"] and \
len(attrs) == 1 and \
attrs.has_key((ns_uri, "attr")) and \
attrs.keys() == [(ns_uri, "attr")] and \
attrs.get((ns_uri, "attr")) == "val" and \
attrs.get((ns_uri, "attr"), 25) == "val" and \
attrs.items() == [((ns_uri, "attr"), "val")] and \
attrs.values() == ["val"] and \
attrs.getValue((ns_uri, "attr")) == "val" and \
attrs.getValueByQName("ns:attr") == "val" and \
attrs.getNameByQName("ns:attr") == (ns_uri, "attr") and \
attrs[(ns_uri, "attr")] == "val" and \
attrs.getQNameByName((ns_uri, "attr")) == "ns:attr"
# ===== Main program
def make_test_output():
parser = make_parser()
result = StringIO()
xmlgen = XMLGenerator(result)
parser.setContentHandler(xmlgen)
parser.parse(findfile("test.xml"))
outf = open(findfile("test.xml.out"), "w")
outf.write(result.getvalue())
outf.close()
import sys
java_14 = sys.platform.startswith("java1.4")
del sys
items = locals().items()
items.sort()
for (name, value) in items:
if name.startswith('test_expat') and java_14:
#skip expat tests on java14 since the crimson parser is so crappy
continue
if name[:5] == "test_":
confirm(value(), name)
if verbose:
print "%d tests, %d failures" % (tests, len(failures))
if failures:
raise TestFailed("%d of %d tests failed: %s"
% (len(failures), tests, ", ".join(failures)))
| gpl-2.0 |
basicthinker/THNVM | configs/ruby/MI_example.py | 13 | 7567 | # Copyright (c) 2006-2007 The Regents of The University of Michigan
# Copyright (c) 2009 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Brad Beckmann
import math
import m5
from m5.objects import *
from m5.defines import buildEnv
from Ruby import create_topology
from Ruby import send_evicts
#
# Note: the cache latency is only used by the sequencer on fast path hits
#
class Cache(RubyCache):
latency = 3
def define_options(parser):
return
def create_system(options, full_system, system, dma_ports, ruby_system):
if buildEnv['PROTOCOL'] != 'MI_example':
panic("This script requires the MI_example protocol to be built.")
cpu_sequencers = []
#
# The ruby network creation expects the list of nodes in the system to be
# consistent with the NetDest list. Therefore the l1 controller nodes must be
# listed before the directory nodes and directory nodes before dma nodes, etc.
#
l1_cntrl_nodes = []
dir_cntrl_nodes = []
dma_cntrl_nodes = []
#
# Must create the individual controllers before the network to ensure the
# controller constructors are called before the network constructor
#
block_size_bits = int(math.log(options.cacheline_size, 2))
for i in xrange(options.num_cpus):
#
# First create the Ruby objects associated with this cpu
# Only one cache exists for this protocol, so by default use the L1D
# config parameters.
#
cache = Cache(size = options.l1d_size,
assoc = options.l1d_assoc,
start_index_bit = block_size_bits)
#
# Only one unified L1 cache exists. Can cache instructions and data.
#
l1_cntrl = L1Cache_Controller(version = i,
cacheMemory = cache,
send_evictions = send_evicts(options),
transitions_per_cycle = options.ports,
clk_domain=system.cpu[i].clk_domain,
ruby_system = ruby_system)
cpu_seq = RubySequencer(version = i,
icache = cache,
dcache = cache,
clk_domain=system.cpu[i].clk_domain,
ruby_system = ruby_system)
l1_cntrl.sequencer = cpu_seq
exec("ruby_system.l1_cntrl%d = l1_cntrl" % i)
# Add controllers and sequencers to the appropriate lists
cpu_sequencers.append(cpu_seq)
l1_cntrl_nodes.append(l1_cntrl)
# Connect the L1 controllers and the network
l1_cntrl.requestFromCache = ruby_system.network.slave
l1_cntrl.responseFromCache = ruby_system.network.slave
l1_cntrl.forwardToCache = ruby_system.network.master
l1_cntrl.responseToCache = ruby_system.network.master
phys_mem_size = sum(map(lambda r: r.size(), system.mem_ranges))
assert(phys_mem_size % options.num_dirs == 0)
mem_module_size = phys_mem_size / options.num_dirs
# Run each of the ruby memory controllers at a ratio of the frequency of
# the ruby system.
# clk_divider value is a fix to pass regression.
ruby_system.memctrl_clk_domain = DerivedClockDomain(
clk_domain=ruby_system.clk_domain,
clk_divider=3)
for i in xrange(options.num_dirs):
dir_size = MemorySize('0B')
dir_size.value = mem_module_size
dir_cntrl = Directory_Controller(version = i,
directory = RubyDirectoryMemory(
version = i, size = dir_size),
transitions_per_cycle = options.ports,
ruby_system = ruby_system)
exec("ruby_system.dir_cntrl%d = dir_cntrl" % i)
dir_cntrl_nodes.append(dir_cntrl)
# Connect the directory controllers and the network
dir_cntrl.requestToDir = ruby_system.network.master
dir_cntrl.dmaRequestToDir = ruby_system.network.master
dir_cntrl.responseFromDir = ruby_system.network.slave
dir_cntrl.dmaResponseFromDir = ruby_system.network.slave
dir_cntrl.forwardFromDir = ruby_system.network.slave
for i, dma_port in enumerate(dma_ports):
#
# Create the Ruby objects associated with the dma controller
#
dma_seq = DMASequencer(version = i,
ruby_system = ruby_system)
dma_cntrl = DMA_Controller(version = i,
dma_sequencer = dma_seq,
transitions_per_cycle = options.ports,
ruby_system = ruby_system)
exec("ruby_system.dma_cntrl%d = dma_cntrl" % i)
exec("ruby_system.dma_cntrl%d.dma_sequencer.slave = dma_port" % i)
dma_cntrl_nodes.append(dma_cntrl)
# Connect the directory controllers and the network
dma_cntrl.requestToDir = ruby_system.network.slave
dma_cntrl.responseFromDir = ruby_system.network.master
all_cntrls = l1_cntrl_nodes + dir_cntrl_nodes + dma_cntrl_nodes
# Create the io controller and the sequencer
if full_system:
io_seq = DMASequencer(version=len(dma_ports), ruby_system=ruby_system)
ruby_system._io_port = io_seq
io_controller = DMA_Controller(version = len(dma_ports),
dma_sequencer = io_seq,
ruby_system = ruby_system)
ruby_system.io_controller = io_controller
# Connect the dma controller to the network
io_controller.responseFromDir = ruby_system.network.master
io_controller.requestToDir = ruby_system.network.slave
all_cntrls = all_cntrls + [io_controller]
topology = create_topology(all_cntrls, options)
return (cpu_sequencers, dir_cntrl_nodes, topology)
| bsd-3-clause |
gnumdk/eolie | eolie/toolbar.py | 1 | 2783 | # Copyright (c) 2017 Cedric Bellegarde <cedric.bellegarde@adishatz.org>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk
from eolie.toolbar_actions import ToolbarActions
from eolie.toolbar_title import ToolbarTitle
from eolie.toolbar_end import ToolbarEnd
class Toolbar(Gtk.EventBox):
"""
Eolie toolbar
"""
def __init__(self, window, fullscreen=False):
"""
Init toolbar
@param window as Window
@param fullscreen as bool
"""
Gtk.EventBox.__init__(self)
self.__window = window
self.__headerbar = Gtk.HeaderBar()
self.__headerbar.show()
self.__headerbar.set_title("Eolie")
self.__toolbar_actions = ToolbarActions(window, fullscreen)
self.__toolbar_actions.show()
self.__toolbar_title = ToolbarTitle(window)
self.__toolbar_title.show()
self.__toolbar_end = ToolbarEnd(window, fullscreen)
self.__toolbar_end.show()
self.__headerbar.pack_start(self.__toolbar_actions)
self.__headerbar.set_custom_title(self.__toolbar_title)
self.__headerbar.pack_end(self.__toolbar_end)
self.connect("button-press-event", self.__on_button_press)
self.add(self.__headerbar)
@property
def headerbar(self):
"""
Get headerbar
@return Gtk.HeaderBar
"""
return self.__headerbar
@property
def title(self):
"""
Toolbar title
@return ToolbarTitle
"""
return self.__toolbar_title
@property
def end(self):
"""
Toolbar end
@return ToolbarEnd
"""
return self.__toolbar_end
@property
def actions(self):
"""
Toolbar actions
@return ToolbarActions
"""
return self.__toolbar_actions
#######################
# PRIVATE #
#######################
def __on_button_press(self, widget, event):
"""
Hide popover if visible
@param widget as Gtk.Widget
@param event as Gdk.Event
"""
self.__window.close_popovers()
| gpl-3.0 |
Kongsea/tensorflow | tensorflow/examples/speech_commands/freeze.py | 37 | 6965 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Converts a trained checkpoint into a frozen model for mobile inference.
Once you've trained a model using the `train.py` script, you can use this tool
to convert it into a binary GraphDef file that can be loaded into the Android,
iOS, or Raspberry Pi example code. Here's an example of how to run it:
bazel run tensorflow/examples/speech_commands/freeze -- \
--sample_rate=16000 --dct_coefficient_count=40 --window_size_ms=20 \
--window_stride_ms=10 --clip_duration_ms=1000 \
--model_architecture=conv \
--start_checkpoint=/tmp/speech_commands_train/conv.ckpt-1300 \
--output_file=/tmp/my_frozen_graph.pb
One thing to watch out for is that you need to pass in the same arguments for
`sample_rate` and other command line variables here as you did for the training
script.
The resulting graph has an input for WAV-encoded data named 'wav_data', one for
raw PCM data (as floats in the range -1.0 to 1.0) called 'decoded_sample_data',
and the output is called 'labels_softmax'.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os.path
import sys
import tensorflow as tf
from tensorflow.contrib.framework.python.ops import audio_ops as contrib_audio
import input_data
import models
from tensorflow.python.framework import graph_util
FLAGS = None
def create_inference_graph(wanted_words, sample_rate, clip_duration_ms,
clip_stride_ms, window_size_ms, window_stride_ms,
dct_coefficient_count, model_architecture):
"""Creates an audio model with the nodes needed for inference.
Uses the supplied arguments to create a model, and inserts the input and
output nodes that are needed to use the graph for inference.
Args:
wanted_words: Comma-separated list of the words we're trying to recognize.
sample_rate: How many samples per second are in the input audio files.
clip_duration_ms: How many samples to analyze for the audio pattern.
clip_stride_ms: How often to run recognition. Useful for models with cache.
window_size_ms: Time slice duration to estimate frequencies from.
window_stride_ms: How far apart time slices should be.
dct_coefficient_count: Number of frequency bands to analyze.
model_architecture: Name of the kind of model to generate.
"""
words_list = input_data.prepare_words_list(wanted_words.split(','))
model_settings = models.prepare_model_settings(
len(words_list), sample_rate, clip_duration_ms, window_size_ms,
window_stride_ms, dct_coefficient_count)
runtime_settings = {'clip_stride_ms': clip_stride_ms}
wav_data_placeholder = tf.placeholder(tf.string, [], name='wav_data')
decoded_sample_data = contrib_audio.decode_wav(
wav_data_placeholder,
desired_channels=1,
desired_samples=model_settings['desired_samples'],
name='decoded_sample_data')
spectrogram = contrib_audio.audio_spectrogram(
decoded_sample_data.audio,
window_size=model_settings['window_size_samples'],
stride=model_settings['window_stride_samples'],
magnitude_squared=True)
fingerprint_input = contrib_audio.mfcc(
spectrogram,
decoded_sample_data.sample_rate,
dct_coefficient_count=dct_coefficient_count)
fingerprint_frequency_size = model_settings['dct_coefficient_count']
fingerprint_time_size = model_settings['spectrogram_length']
reshaped_input = tf.reshape(fingerprint_input, [
-1, fingerprint_time_size * fingerprint_frequency_size
])
logits = models.create_model(
reshaped_input, model_settings, model_architecture, is_training=False,
runtime_settings=runtime_settings)
# Create an output to use for inference.
tf.nn.softmax(logits, name='labels_softmax')
def main(_):
# Create the model and load its weights.
sess = tf.InteractiveSession()
create_inference_graph(FLAGS.wanted_words, FLAGS.sample_rate,
FLAGS.clip_duration_ms, FLAGS.clip_stride_ms,
FLAGS.window_size_ms, FLAGS.window_stride_ms,
FLAGS.dct_coefficient_count, FLAGS.model_architecture)
models.load_variables_from_checkpoint(sess, FLAGS.start_checkpoint)
# Turn all the variables into inline constants inside the graph and save it.
frozen_graph_def = graph_util.convert_variables_to_constants(
sess, sess.graph_def, ['labels_softmax'])
tf.train.write_graph(
frozen_graph_def,
os.path.dirname(FLAGS.output_file),
os.path.basename(FLAGS.output_file),
as_text=False)
tf.logging.info('Saved frozen graph to %s', FLAGS.output_file)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--sample_rate',
type=int,
default=16000,
help='Expected sample rate of the wavs',)
parser.add_argument(
'--clip_duration_ms',
type=int,
default=1000,
help='Expected duration in milliseconds of the wavs',)
parser.add_argument(
'--clip_stride_ms',
type=int,
default=30,
help='How often to run recognition. Useful for models with cache.',)
parser.add_argument(
'--window_size_ms',
type=float,
default=30.0,
help='How long each spectrogram timeslice is',)
parser.add_argument(
'--window_stride_ms',
type=float,
default=10.0,
help='How long the stride is between spectrogram timeslices',)
parser.add_argument(
'--dct_coefficient_count',
type=int,
default=40,
help='How many bins to use for the MFCC fingerprint',)
parser.add_argument(
'--start_checkpoint',
type=str,
default='',
help='If specified, restore this pretrained model before any training.')
parser.add_argument(
'--model_architecture',
type=str,
default='conv',
help='What model architecture to use')
parser.add_argument(
'--wanted_words',
type=str,
default='yes,no,up,down,left,right,on,off,stop,go',
help='Words to use (others will be added to an unknown label)',)
parser.add_argument(
'--output_file', type=str, help='Where to save the frozen graph.')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
| apache-2.0 |
ChetnaChaudhari/hadoop | hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/job_history_summary.py | 323 | 3444 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
pat = re.compile('(?P<name>[^=]+)="(?P<value>[^"]*)" *')
counterPat = re.compile('(?P<name>[^:]+):(?P<value>[^,]*),?')
def parse(tail):
result = {}
for n,v in re.findall(pat, tail):
result[n] = v
return result
mapStartTime = {}
mapEndTime = {}
reduceStartTime = {}
reduceShuffleTime = {}
reduceSortTime = {}
reduceEndTime = {}
reduceBytes = {}
for line in sys.stdin:
words = line.split(" ",1)
event = words[0]
attrs = parse(words[1])
if event == 'MapAttempt':
if attrs.has_key("START_TIME"):
mapStartTime[attrs["TASKID"]] = int(attrs["START_TIME"])/1000
elif attrs.has_key("FINISH_TIME"):
mapEndTime[attrs["TASKID"]] = int(attrs["FINISH_TIME"])/1000
elif event == 'ReduceAttempt':
if attrs.has_key("START_TIME"):
reduceStartTime[attrs["TASKID"]] = int(attrs["START_TIME"]) / 1000
elif attrs.has_key("FINISH_TIME"):
reduceShuffleTime[attrs["TASKID"]] = int(attrs["SHUFFLE_FINISHED"])/1000
reduceSortTime[attrs["TASKID"]] = int(attrs["SORT_FINISHED"])/1000
reduceEndTime[attrs["TASKID"]] = int(attrs["FINISH_TIME"])/1000
elif event == 'Task':
if attrs["TASK_TYPE"] == "REDUCE" and attrs.has_key("COUNTERS"):
for n,v in re.findall(counterPat, attrs["COUNTERS"]):
if n == "File Systems.HDFS bytes written":
reduceBytes[attrs["TASKID"]] = int(v)
runningMaps = {}
shufflingReduces = {}
sortingReduces = {}
runningReduces = {}
startTime = min(reduce(min, mapStartTime.values()),
reduce(min, reduceStartTime.values()))
endTime = max(reduce(max, mapEndTime.values()),
reduce(max, reduceEndTime.values()))
reduces = reduceBytes.keys()
reduces.sort()
print "Name reduce-output-bytes shuffle-finish reduce-finish"
for r in reduces:
print r, reduceBytes[r], reduceShuffleTime[r] - startTime,
print reduceEndTime[r] - startTime
print
for t in range(startTime, endTime):
runningMaps[t] = 0
shufflingReduces[t] = 0
sortingReduces[t] = 0
runningReduces[t] = 0
for map in mapStartTime.keys():
for t in range(mapStartTime[map], mapEndTime[map]):
runningMaps[t] += 1
for reduce in reduceStartTime.keys():
for t in range(reduceStartTime[reduce], reduceShuffleTime[reduce]):
shufflingReduces[t] += 1
for t in range(reduceShuffleTime[reduce], reduceSortTime[reduce]):
sortingReduces[t] += 1
for t in range(reduceSortTime[reduce], reduceEndTime[reduce]):
runningReduces[t] += 1
print "time maps shuffle merge reduce"
for t in range(startTime, endTime):
print t - startTime, runningMaps[t], shufflingReduces[t], sortingReduces[t],
print runningReduces[t]
| apache-2.0 |
GabrielBrascher/cloudstack | plugins/network-elements/stratosphere-ssp/sspmock/sspmock.py | 8 | 3571 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import uuid
from flask import Flask,request,make_response
from beaker.middleware import SessionMiddleware
app = Flask(__name__)
tenant_networks = []
tenant_ports = []
@app.route("/ws.v1/login", methods=["POST",])
def login():
assert "username" in request.form
assert "password" in request.form
request.environ["beaker.session"]["login"] = True
res = make_response("", 200)
res.headers["Content-type"] = "application/json"
return res
@app.route("/ssp.v1/tenant-networks", methods=["POST",])
def create_tenant_network():
if "login" not in request.environ["beaker.session"]:
return make_response("", 401)
obj = request.json
obj["uuid"] = str(uuid.uuid1())
tenant_networks.append(obj)
res = make_response(json.dumps(obj), 201)
res.headers["Content-type"] = "application/json"
return res
@app.route("/ssp.v1/tenant-networks/<tenant_net_uuid>", methods=["DELETE",])
def delete_tenant_network(tenant_net_uuid):
if "login" not in request.environ["beaker.session"]:
return make_response("", 401)
for net in tenant_networks:
if net["uuid"] == tenant_net_uuid:
tenant_networks.remove(net)
return make_response("", 204)
return make_response("", 404)
@app.route("/ssp.v1/tenant-ports", methods=["POST",])
def create_tenant_port():
if "login" not in request.environ["beaker.session"]:
return make_response("", 401)
obj = request.json
obj["uuid"] = str(uuid.uuid1())
tenant_ports.append(obj)
res = make_response(json.dumps(obj), 201)
res.headers["Content-type"] = "application/json"
return res
@app.route("/ssp.v1/tenant-ports/<tenant_port_uuid>", methods=["DELETE",])
def delete_tenant_port(tenant_port_uuid):
if "login" not in request.environ["beaker.session"]:
return make_response("", 401)
for port in tenant_ports:
if port["uuid"] == tenant_port_uuid:
tenant_ports.remove(port)
return make_response("", 204)
return make_response("", 404)
@app.route("/ssp.v1/tenant-ports/<tenant_port_uuid>", methods=["PUT",])
def update_tenant_port(tenant_port_uuid):
if "login" not in request.environ["beaker.session"]:
return make_response("", 401)
for port in tenant_ports:
if port["uuid"] == tenant_port_uuid:
obj = request.json
obj["uuid"] = tenant_port_uuid
obj["vlan_id"] = 100
tenant_ports.remove(port)
tenant_ports.append(obj)
res = make_response(json.dumps(obj), 200)
res.headers["Content-type"] = "application/json"
return res
return make_response("", 404)
if __name__=="__main__":
app.wsgi_app = SessionMiddleware(app.wsgi_app, {
"session.auto":True,
"session.type":"cookie",
"session.validate_key":"hoge"})
app.run(host="0.0.0.0", port=9080, debug=True)
| apache-2.0 |
yoer/hue | desktop/core/ext-py/Django-1.6.10/django/contrib/gis/management/commands/ogrinspect.py | 104 | 6065 | from optparse import make_option
from django.contrib.gis import gdal
from django.core.management.base import LabelCommand, CommandError
def layer_option(option, opt, value, parser):
"""
Callback for `make_option` for the `ogrinspect` `layer_key`
keyword option which may be an integer or a string.
"""
try:
dest = int(value)
except ValueError:
dest = value
setattr(parser.values, option.dest, dest)
def list_option(option, opt, value, parser):
"""
Callback for `make_option` for `ogrinspect` keywords that require
a string list. If the string is 'True'/'true' then the option
value will be a boolean instead.
"""
if value.lower() == 'true':
dest = True
else:
dest = [s for s in value.split(',')]
setattr(parser.values, option.dest, dest)
class Command(LabelCommand):
help = ('Inspects the given OGR-compatible data source (e.g., a shapefile) and outputs\n'
'a GeoDjango model with the given model name. For example:\n'
' ./manage.py ogrinspect zipcode.shp Zipcode')
args = '[data_source] [model_name]'
option_list = LabelCommand.option_list + (
make_option('--blank', dest='blank', type='string', action='callback',
callback=list_option, default=False,
help='Use a comma separated list of OGR field names to add '
'the `blank=True` option to the field definition. Set with'
'`true` to apply to all applicable fields.'),
make_option('--decimal', dest='decimal', type='string', action='callback',
callback=list_option, default=False,
help='Use a comma separated list of OGR float fields to '
'generate `DecimalField` instead of the default '
'`FloatField`. Set to `true` to apply to all OGR float fields.'),
make_option('--geom-name', dest='geom_name', type='string', default='geom',
help='Specifies the model name for the Geometry Field '
'(defaults to `geom`)'),
make_option('--layer', dest='layer_key', type='string', action='callback',
callback=layer_option, default=0,
help='The key for specifying which layer in the OGR data '
'source to use. Defaults to 0 (the first layer). May be '
'an integer or a string identifier for the layer.'),
make_option('--multi-geom', action='store_true', dest='multi_geom', default=False,
help='Treat the geometry in the data source as a geometry collection.'),
make_option('--name-field', dest='name_field',
help='Specifies a field name to return for the `__unicode__`/`__str__` function.'),
make_option('--no-imports', action='store_false', dest='imports', default=True,
help='Do not include `from django.contrib.gis.db import models` '
'statement.'),
make_option('--null', dest='null', type='string', action='callback',
callback=list_option, default=False,
help='Use a comma separated list of OGR field names to add '
'the `null=True` option to the field definition. Set with'
'`true` to apply to all applicable fields.'),
make_option('--srid', dest='srid',
help='The SRID to use for the Geometry Field. If it can be '
'determined, the SRID of the data source is used.'),
make_option('--mapping', action='store_true', dest='mapping',
help='Generate mapping dictionary for use with `LayerMapping`.')
)
requires_model_validation = False
def handle(self, *args, **options):
try:
data_source, model_name = args
except ValueError:
raise CommandError('Invalid arguments, must provide: %s' % self.args)
if not gdal.HAS_GDAL:
raise CommandError('GDAL is required to inspect geospatial data sources.')
# Removing options with `None` values.
options = dict([(k, v) for k, v in options.items() if not v is None])
# Getting the OGR DataSource from the string parameter.
try:
ds = gdal.DataSource(data_source)
except gdal.OGRException as msg:
raise CommandError(msg)
# Whether the user wants to generate the LayerMapping dictionary as well.
show_mapping = options.pop('mapping', False)
# Getting rid of settings that `_ogrinspect` doesn't like.
verbosity = options.pop('verbosity', False)
settings = options.pop('settings', False)
# Returning the output of ogrinspect with the given arguments
# and options.
from django.contrib.gis.utils.ogrinspect import _ogrinspect, mapping
output = [s for s in _ogrinspect(ds, model_name, **options)]
if show_mapping:
# Constructing the keyword arguments for `mapping`, and
# calling it on the data source.
kwargs = {'geom_name' : options['geom_name'],
'layer_key' : options['layer_key'],
'multi_geom' : options['multi_geom'],
}
mapping_dict = mapping(ds, **kwargs)
# This extra legwork is so that the dictionary definition comes
# out in the same order as the fields in the model definition.
rev_mapping = dict([(v, k) for k, v in mapping_dict.items()])
output.extend(['', '# Auto-generated `LayerMapping` dictionary for %s model' % model_name,
'%s_mapping = {' % model_name.lower()])
output.extend([" '%s' : '%s'," % (rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields])
output.extend([" '%s' : '%s'," % (options['geom_name'], mapping_dict[options['geom_name']]), '}'])
return '\n'.join(output) + '\n'
| apache-2.0 |
AyoubZahid/odoo | addons/website_forum/tests/test_forum.py | 27 | 12005 | # -*- coding: utf-8 -*-
from .common import KARMA, TestForumCommon
from ..models.forum import KarmaError
from openerp.exceptions import UserError, AccessError
from openerp.tools import mute_logger
class TestForum(TestForumCommon):
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_ask(self):
Post = self.env['forum.post']
# Public user asks a question: not allowed
with self.assertRaises(AccessError):
Post.sudo(self.user_public).create({
'name': " Question ?",
'forum_id': self.forum.id,
})
# Portal user asks a question with tags: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
Post.sudo(self.user_portal).create({
'name': " Q_0",
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag0', 'forum_id': self.forum.id})]
})
# Portal user asks a question with tags: ok if enough karma
self.user_portal.karma = KARMA['ask']
Post.sudo(self.user_portal).create({
'name': " Q0",
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag1', 'forum_id': self.forum.id})]
})
self.assertEqual(self.user_portal.karma, KARMA['ask'], 'website_forum: wrong karma generation when asking question')
self.user_portal.karma = KARMA['post']
Post.sudo(self.user_portal).create({
'name': " Q0",
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag42', 'forum_id': self.forum.id})]
})
self.assertEqual(self.user_portal.karma, KARMA['post'] + KARMA['gen_que_new'], 'website_forum: wrong karma generation when asking question')
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_answer(self):
Post = self.env['forum.post']
# Answers its own question: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
Post.sudo(self.user_employee).create({
'name': " A0",
'forum_id': self.forum.id,
'parent_id': self.post.id,
})
# Answers on question: ok if enough karma
self.user_employee.karma = KARMA['ans']
Post.sudo(self.user_employee).create({
'name': " A0",
'forum_id': self.forum.id,
'parent_id': self.post.id,
})
self.assertEqual(self.user_employee.karma, KARMA['ans'], 'website_forum: wrong karma generation when answering question')
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_vote_crash(self):
Post = self.env['forum.post']
self.user_employee.karma = KARMA['ans']
emp_answer = Post.sudo(self.user_employee).create({
'name': 'TestAnswer',
'forum_id': self.forum.id,
'parent_id': self.post.id})
# upvote its own post
with self.assertRaises(UserError):
emp_answer.vote(upvote=True)
# not enough karma
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).vote(upvote=True)
def test_vote(self):
self.post.create_uid.karma = KARMA['ask']
self.user_portal.karma = KARMA['upv']
self.post.sudo(self.user_portal).vote(upvote=True)
self.assertEqual(self.post.create_uid.karma, KARMA['ask'] + KARMA['gen_que_upv'], 'website_forum: wrong karma generation of upvoted question author')
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_downvote_crash(self):
Post = self.env['forum.post']
self.user_employee.karma = KARMA['ans']
emp_answer = Post.sudo(self.user_employee).create({
'name': 'TestAnswer',
'forum_id': self.forum.id,
'parent_id': self.post.id})
# downvote its own post
with self.assertRaises(UserError):
emp_answer.vote(upvote=False)
# not enough karma
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).vote(upvote=False)
def test_downvote(self):
self.post.create_uid.karma = 50
self.user_portal.karma = KARMA['dwv']
self.post.sudo(self.user_portal).vote(upvote=False)
self.assertEqual(self.post.create_uid.karma, 50 + KARMA['gen_que_dwv'], 'website_forum: wrong karma generation of downvoted question author')
def test_comment_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).message_post(body='Should crash', message_type='comment')
def test_comment(self):
self.post.sudo(self.user_employee).message_post(body='Test0', message_type='notification')
self.user_employee.karma = KARMA['com_all']
self.post.sudo(self.user_employee).message_post(body='Test1', message_type='comment')
self.assertEqual(len(self.post.message_ids), 4, 'website_forum: wrong behavior of message_post')
def test_flag_a_post(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.sudo(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user flags a post: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
post.sudo(self.user_portal).flag()
# portal user flags a post: ok if enough karma
self.user_portal.karma = KARMA['flag']
post.state = 'active'
post.sudo(self.user_portal).flag()
self.assertEqual(post.state, 'flagged', 'website_forum: wrong state when flagging a post')
def test_validate_a_post(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.sudo(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user validate a post: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
post.sudo(self.user_portal).validate()
# portal user validate a pending post
self.user_portal.karma = KARMA['moderate']
post.state = 'pending'
init_karma = post.create_uid.karma
post.sudo(self.user_portal).validate()
self.assertEqual(post.state, 'active', 'website_forum: wrong state when validate a post after pending')
self.assertEqual(post.create_uid.karma, init_karma + KARMA['gen_que_new'], 'website_forum: wrong karma when validate a post after pending')
# portal user validate a flagged post: ok if enough karma
self.user_portal.karma = KARMA['moderate']
post.state = 'flagged'
post.sudo(self.user_portal).validate()
self.assertEqual(post.state, 'active', 'website_forum: wrong state when validate a post after flagged')
# portal user validate an offensive post: ok if enough karma
self.user_portal.karma = KARMA['moderate']
post.state = 'offensive'
init_karma = post.create_uid.karma
post.sudo(self.user_portal).validate()
self.assertEqual(post.state, 'active', 'website_forum: wrong state when validate a post after offensive')
def test_refuse_a_post(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.sudo(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user validate a post: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
post.sudo(self.user_portal).refuse()
# portal user validate a pending post
self.user_portal.karma = KARMA['moderate']
post.state = 'pending'
init_karma = post.create_uid.karma
post.sudo(self.user_portal).refuse()
self.assertEqual(post.moderator_id, self.user_portal, 'website_forum: wrong moderator_id when refusing')
self.assertEqual(post.create_uid.karma, init_karma, 'website_forum: wrong karma when refusing a post')
def test_mark_a_post_as_offensive(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.sudo(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user mark a post as offensive: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
post.sudo(self.user_portal).mark_as_offensive(12)
# portal user mark a post as offensive
self.user_portal.karma = KARMA['moderate']
post.state = 'flagged'
init_karma = post.create_uid.karma
post.sudo(self.user_portal).mark_as_offensive(12)
self.assertEqual(post.state, 'offensive', 'website_forum: wrong state when marking a post as offensive')
self.assertEqual(post.create_uid.karma, init_karma + KARMA['gen_ans_flag'], 'website_forum: wrong karma when marking a post as offensive')
def test_convert_answer_to_comment_crash(self):
Post = self.env['forum.post']
# converting a question does nothing
msg_ids = self.post.sudo(self.user_portal).convert_answer_to_comment()
self.assertEqual(msg_ids[0], False, 'website_forum: question to comment conversion failed')
self.assertEqual(Post.search([('name', '=', 'TestQuestion')])[0].forum_id.name, 'TestForum', 'website_forum: question to comment conversion failed')
with self.assertRaises(KarmaError):
self.answer.sudo(self.user_portal).convert_answer_to_comment()
def test_convert_answer_to_comment(self):
self.user_portal.karma = KARMA['com_conv_all']
post_author = self.answer.create_uid.partner_id
msg_ids = self.answer.sudo(self.user_portal).convert_answer_to_comment()
self.assertEqual(len(msg_ids), 1, 'website_forum: wrong answer to comment conversion')
msg = self.env['mail.message'].browse(msg_ids[0])
self.assertEqual(msg.author_id, post_author, 'website_forum: wrong answer to comment conversion')
self.assertIn('I am an anteater', msg.body, 'website_forum: wrong answer to comment conversion')
def test_edit_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).write({'name': 'I am not your father.'})
def test_edit_post(self):
self.post.create_uid.karma = KARMA['edit_own']
self.post.write({'name': 'Actually I am your dog.'})
self.user_portal.karma = KARMA['edit_all']
self.post.sudo(self.user_portal).write({'name': 'Actually I am your cat.'})
def test_close_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).close(None)
def test_close_post_own(self):
self.post.create_uid.karma = KARMA['close_own']
self.post.close(None)
def test_close_post_all(self):
self.user_portal.karma = KARMA['close_all']
self.post.sudo(self.user_portal).close(None)
def test_deactivate_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).write({'active': False})
def test_deactivate_post_own(self):
self.post.create_uid.karma = KARMA['unlink_own']
self.post.write({'active': False})
def test_deactivate_post_all(self):
self.user_portal.karma = KARMA['unlink_all']
self.post.sudo(self.user_portal).write({'active': False})
def test_unlink_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).unlink()
def test_unlink_post_own(self):
self.post.create_uid.karma = KARMA['unlink_own']
self.post.unlink()
def test_unlink_post_all(self):
self.user_portal.karma = KARMA['unlink_all']
self.post.sudo(self.user_portal).unlink()
| gpl-3.0 |
sschiau/swift | utils/swift_build_support/tests/test_targets.py | 60 | 1427 | # test_targets.py - Unit tests for swift_build_support.targets -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
import unittest
from swift_build_support.targets import StdlibDeploymentTarget
class HostTargetTestCase(unittest.TestCase):
def test_is_not_none_on_this_platform(self):
self.assertIsNotNone(StdlibDeploymentTarget.host_target())
class PlatformTargetsTestCase(unittest.TestCase):
def test_platform_contains(self):
"""
Checks that Platform.contains(target_name)
matches all of its targets' names and rejects non-matching names.
"""
# Pick a few platforms with lots of targets
for platform in [StdlibDeploymentTarget.Linux,
StdlibDeploymentTarget.iOS,
StdlibDeploymentTarget.iOSSimulator]:
for target in platform.targets:
self.assertTrue(platform.contains(target.name))
self.assertFalse(platform.contains("fakeCPU-MSDOS"))
self.assertFalse(platform.contains("singleTransistor-fakeOS"))
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
nelseric/qmk_firmware | lib/python/qmk/cli/c2json.py | 4 | 2196 | """Generate a keymap.json from a keymap.c file.
"""
import json
from milc import cli
import qmk.keymap
import qmk.path
@cli.argument('--no-cpp', arg_only=True, action='store_false', help='Do not use \'cpp\' on keymap.c')
@cli.argument('-o', '--output', arg_only=True, type=qmk.path.normpath, help='File to write to')
@cli.argument('-q', '--quiet', arg_only=True, action='store_true', help="Quiet mode, only output error messages")
@cli.argument('-kb', '--keyboard', arg_only=True, required=True, help='The keyboard\'s name')
@cli.argument('-km', '--keymap', arg_only=True, required=True, help='The keymap\'s name')
@cli.argument('filename', arg_only=True, help='keymap.c file')
@cli.subcommand('Creates a keymap.json from a keymap.c file.')
def c2json(cli):
"""Generate a keymap.json from a keymap.c file.
This command uses the `qmk.keymap` module to generate a keymap.json from a keymap.c file. The generated keymap is written to stdout, or to a file if -o is provided.
"""
if cli.args.filename != '-':
cli.args.filename = qmk.path.normpath(cli.args.filename)
# Error checking
if not cli.args.filename.exists():
cli.log.error('C file does not exist!')
cli.print_usage()
return False
# Environment processing
if cli.args.output == ('-'):
cli.args.output = None
# Parse the keymap.c
keymap_json = qmk.keymap.c2json(cli.args.keyboard, cli.args.keymap, cli.args.filename, use_cpp=cli.args.no_cpp)
# Generate the keymap.json
try:
keymap_json = qmk.keymap.generate_json(keymap_json['keymap'], keymap_json['keyboard'], keymap_json['layout'], keymap_json['layers'])
except KeyError:
cli.log.error('Something went wrong. Try to use --no-cpp.')
return False
if cli.args.output:
cli.args.output.parent.mkdir(parents=True, exist_ok=True)
if cli.args.output.exists():
cli.args.output.replace(cli.args.output.name + '.bak')
cli.args.output.write_text(json.dumps(keymap_json))
if not cli.args.quiet:
cli.log.info('Wrote keymap to %s.', cli.args.output)
else:
print(json.dumps(keymap_json))
| gpl-2.0 |
dulems/hue | desktop/core/ext-py/Django-1.6.10/django/contrib/auth/hashers.py | 95 | 17370 | from __future__ import unicode_literals
import base64
import binascii
import hashlib
from django.dispatch import receiver
from django.conf import settings
from django.test.signals import setting_changed
from django.utils import importlib
from django.utils.datastructures import SortedDict
from django.utils.encoding import force_bytes, force_str, force_text
from django.core.exceptions import ImproperlyConfigured
from django.utils.crypto import (
pbkdf2, constant_time_compare, get_random_string)
from django.utils.module_loading import import_by_path
from django.utils.translation import ugettext_noop as _
UNUSABLE_PASSWORD_PREFIX = '!' # This will never be a valid encoded hash
UNUSABLE_PASSWORD_SUFFIX_LENGTH = 40 # number of random chars to add after UNUSABLE_PASSWORD_PREFIX
HASHERS = None # lazily loaded from PASSWORD_HASHERS
PREFERRED_HASHER = None # defaults to first item in PASSWORD_HASHERS
@receiver(setting_changed)
def reset_hashers(**kwargs):
if kwargs['setting'] == 'PASSWORD_HASHERS':
global HASHERS, PREFERRED_HASHER
HASHERS = None
PREFERRED_HASHER = None
def is_password_usable(encoded):
if encoded is None or encoded.startswith(UNUSABLE_PASSWORD_PREFIX):
return False
try:
identify_hasher(encoded)
except ValueError:
return False
return True
def check_password(password, encoded, setter=None, preferred='default'):
"""
Returns a boolean of whether the raw password matches the three
part encoded digest.
If setter is specified, it'll be called when you need to
regenerate the password.
"""
if password is None or not is_password_usable(encoded):
return False
preferred = get_hasher(preferred)
hasher = identify_hasher(encoded)
must_update = hasher.algorithm != preferred.algorithm
if not must_update:
must_update = preferred.must_update(encoded)
is_correct = hasher.verify(password, encoded)
if setter and is_correct and must_update:
setter(password)
return is_correct
def make_password(password, salt=None, hasher='default'):
"""
Turn a plain-text password into a hash for database storage
Same as encode() but generates a new random salt.
If password is None then a concatenation of
UNUSABLE_PASSWORD_PREFIX and a random string will be returned
which disallows logins. Additional random string reduces chances
of gaining access to staff or superuser accounts.
See ticket #20079 for more info.
"""
if password is None:
return UNUSABLE_PASSWORD_PREFIX + get_random_string(UNUSABLE_PASSWORD_SUFFIX_LENGTH)
hasher = get_hasher(hasher)
if not salt:
salt = hasher.salt()
return hasher.encode(password, salt)
def load_hashers(password_hashers=None):
global HASHERS
global PREFERRED_HASHER
hashers = []
if not password_hashers:
password_hashers = settings.PASSWORD_HASHERS
for backend in password_hashers:
hasher = import_by_path(backend)()
if not getattr(hasher, 'algorithm'):
raise ImproperlyConfigured("hasher doesn't specify an "
"algorithm name: %s" % backend)
hashers.append(hasher)
HASHERS = dict([(hasher.algorithm, hasher) for hasher in hashers])
PREFERRED_HASHER = hashers[0]
def get_hasher(algorithm='default'):
"""
Returns an instance of a loaded password hasher.
If algorithm is 'default', the default hasher will be returned.
This function will also lazy import hashers specified in your
settings file if needed.
"""
if hasattr(algorithm, 'algorithm'):
return algorithm
elif algorithm == 'default':
if PREFERRED_HASHER is None:
load_hashers()
return PREFERRED_HASHER
else:
if HASHERS is None:
load_hashers()
if algorithm not in HASHERS:
raise ValueError("Unknown password hashing algorithm '%s'. "
"Did you specify it in the PASSWORD_HASHERS "
"setting?" % algorithm)
return HASHERS[algorithm]
def identify_hasher(encoded):
"""
Returns an instance of a loaded password hasher.
Identifies hasher algorithm by examining encoded hash, and calls
get_hasher() to return hasher. Raises ValueError if
algorithm cannot be identified, or if hasher is not loaded.
"""
# Ancient versions of Django created plain MD5 passwords and accepted
# MD5 passwords with an empty salt.
if ((len(encoded) == 32 and '$' not in encoded) or
(len(encoded) == 37 and encoded.startswith('md5$$'))):
algorithm = 'unsalted_md5'
# Ancient versions of Django accepted SHA1 passwords with an empty salt.
elif len(encoded) == 46 and encoded.startswith('sha1$$'):
algorithm = 'unsalted_sha1'
else:
algorithm = encoded.split('$', 1)[0]
return get_hasher(algorithm)
def mask_hash(hash, show=6, char="*"):
"""
Returns the given hash, with only the first ``show`` number shown. The
rest are masked with ``char`` for security reasons.
"""
masked = hash[:show]
masked += char * len(hash[show:])
return masked
class BasePasswordHasher(object):
"""
Abstract base class for password hashers
When creating your own hasher, you need to override algorithm,
verify(), encode() and safe_summary().
PasswordHasher objects are immutable.
"""
algorithm = None
library = None
def _load_library(self):
if self.library is not None:
if isinstance(self.library, (tuple, list)):
name, mod_path = self.library
else:
name = mod_path = self.library
try:
module = importlib.import_module(mod_path)
except ImportError as e:
raise ValueError("Couldn't load %r algorithm library: %s" %
(self.__class__.__name__, e))
return module
raise ValueError("Hasher %r doesn't specify a library attribute" %
self.__class__.__name__)
def salt(self):
"""
Generates a cryptographically secure nonce salt in ascii
"""
return get_random_string()
def verify(self, password, encoded):
"""
Checks if the given password is correct
"""
raise NotImplementedError()
def encode(self, password, salt):
"""
Creates an encoded database value
The result is normally formatted as "algorithm$salt$hash" and
must be fewer than 128 characters.
"""
raise NotImplementedError()
def safe_summary(self, encoded):
"""
Returns a summary of safe values
The result is a dictionary and will be used where the password field
must be displayed to construct a safe representation of the password.
"""
raise NotImplementedError()
def must_update(self, encoded):
return False
class PBKDF2PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the PBKDF2 algorithm (recommended)
Configured to use PBKDF2 + HMAC + SHA256 with 12000 iterations.
The result is a 64 byte binary string. Iterations may be changed
safely but you must rename the algorithm if you change SHA256.
"""
algorithm = "pbkdf2_sha256"
iterations = 12000
digest = hashlib.sha256
def encode(self, password, salt, iterations=None):
assert password is not None
assert salt and '$' not in salt
if not iterations:
iterations = self.iterations
hash = pbkdf2(password, salt, iterations, digest=self.digest)
hash = base64.b64encode(hash).decode('ascii').strip()
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
def verify(self, password, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt, int(iterations))
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('iterations'), iterations),
(_('salt'), mask_hash(salt)),
(_('hash'), mask_hash(hash)),
])
def must_update(self, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
return int(iterations) != self.iterations
class PBKDF2SHA1PasswordHasher(PBKDF2PasswordHasher):
"""
Alternate PBKDF2 hasher which uses SHA1, the default PRF
recommended by PKCS #5. This is compatible with other
implementations of PBKDF2, such as openssl's
PKCS5_PBKDF2_HMAC_SHA1().
"""
algorithm = "pbkdf2_sha1"
digest = hashlib.sha1
class BCryptSHA256PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the bcrypt algorithm (recommended)
This is considered by many to be the most secure algorithm but you
must first install the bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
"""
algorithm = "bcrypt_sha256"
digest = hashlib.sha256
library = ("bcrypt", "bcrypt")
rounds = 12
def salt(self):
bcrypt = self._load_library()
return bcrypt.gensalt(self.rounds)
def encode(self, password, salt):
bcrypt = self._load_library()
# Need to reevaluate the force_bytes call once bcrypt is supported on
# Python 3
# Hash the password prior to using bcrypt to prevent password truncation
# See: https://code.djangoproject.com/ticket/20138
if self.digest is not None:
# We use binascii.hexlify here because Python3 decided that a hex encoded
# bytestring is somehow a unicode.
password = binascii.hexlify(self.digest(force_bytes(password)).digest())
else:
password = force_bytes(password)
data = bcrypt.hashpw(password, salt)
return "%s$%s" % (self.algorithm, force_text(data))
def verify(self, password, encoded):
algorithm, data = encoded.split('$', 1)
assert algorithm == self.algorithm
bcrypt = self._load_library()
# Hash the password prior to using bcrypt to prevent password truncation
# See: https://code.djangoproject.com/ticket/20138
if self.digest is not None:
# We use binascii.hexlify here because Python3 decided that a hex encoded
# bytestring is somehow a unicode.
password = binascii.hexlify(self.digest(force_bytes(password)).digest())
else:
password = force_bytes(password)
# Ensure that our data is a bytestring
data = force_bytes(data)
# force_bytes() necessary for py-bcrypt compatibility
hashpw = force_bytes(bcrypt.hashpw(password, data))
return constant_time_compare(data, hashpw)
def safe_summary(self, encoded):
algorithm, empty, algostr, work_factor, data = encoded.split('$', 4)
assert algorithm == self.algorithm
salt, checksum = data[:22], data[22:]
return SortedDict([
(_('algorithm'), algorithm),
(_('work factor'), work_factor),
(_('salt'), mask_hash(salt)),
(_('checksum'), mask_hash(checksum)),
])
class BCryptPasswordHasher(BCryptSHA256PasswordHasher):
"""
Secure password hashing using the bcrypt algorithm
This is considered by many to be the most secure algorithm but you
must first install the bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
This hasher does not first hash the password which means it is subject to
the 72 character bcrypt password truncation, most use cases should prefer
the BCryptSha512PasswordHasher.
See: https://code.djangoproject.com/ticket/20138
"""
algorithm = "bcrypt"
digest = None
class SHA1PasswordHasher(BasePasswordHasher):
"""
The SHA1 password hashing algorithm (not recommended)
"""
algorithm = "sha1"
def encode(self, password, salt):
assert password is not None
assert salt and '$' not in salt
hash = hashlib.sha1(force_bytes(salt + password)).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def verify(self, password, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('salt'), mask_hash(salt, show=2)),
(_('hash'), mask_hash(hash)),
])
class MD5PasswordHasher(BasePasswordHasher):
"""
The Salted MD5 password hashing algorithm (not recommended)
"""
algorithm = "md5"
def encode(self, password, salt):
assert password is not None
assert salt and '$' not in salt
hash = hashlib.md5(force_bytes(salt + password)).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def verify(self, password, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('salt'), mask_hash(salt, show=2)),
(_('hash'), mask_hash(hash)),
])
class UnsaltedSHA1PasswordHasher(BasePasswordHasher):
"""
Very insecure algorithm that you should *never* use; stores SHA1 hashes
with an empty salt.
This class is implemented because Django used to accept such password
hashes. Some older Django installs still have these values lingering
around so we need to handle and upgrade them properly.
"""
algorithm = "unsalted_sha1"
def salt(self):
return ''
def encode(self, password, salt):
assert salt == ''
hash = hashlib.sha1(force_bytes(password)).hexdigest()
return 'sha1$$%s' % hash
def verify(self, password, encoded):
encoded_2 = self.encode(password, '')
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
assert encoded.startswith('sha1$$')
hash = encoded[6:]
return SortedDict([
(_('algorithm'), self.algorithm),
(_('hash'), mask_hash(hash)),
])
class UnsaltedMD5PasswordHasher(BasePasswordHasher):
"""
Incredibly insecure algorithm that you should *never* use; stores unsalted
MD5 hashes without the algorithm prefix, also accepts MD5 hashes with an
empty salt.
This class is implemented because Django used to store passwords this way
and to accept such password hashes. Some older Django installs still have
these values lingering around so we need to handle and upgrade them
properly.
"""
algorithm = "unsalted_md5"
def salt(self):
return ''
def encode(self, password, salt):
assert salt == ''
return hashlib.md5(force_bytes(password)).hexdigest()
def verify(self, password, encoded):
if len(encoded) == 37 and encoded.startswith('md5$$'):
encoded = encoded[5:]
encoded_2 = self.encode(password, '')
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
return SortedDict([
(_('algorithm'), self.algorithm),
(_('hash'), mask_hash(encoded, show=3)),
])
class CryptPasswordHasher(BasePasswordHasher):
"""
Password hashing using UNIX crypt (not recommended)
The crypt module is not supported on all platforms.
"""
algorithm = "crypt"
library = "crypt"
def salt(self):
return get_random_string(2)
def encode(self, password, salt):
crypt = self._load_library()
assert len(salt) == 2
data = crypt.crypt(force_str(password), salt)
# we don't need to store the salt, but Django used to do this
return "%s$%s$%s" % (self.algorithm, '', data)
def verify(self, password, encoded):
crypt = self._load_library()
algorithm, salt, data = encoded.split('$', 2)
assert algorithm == self.algorithm
return constant_time_compare(data, crypt.crypt(force_str(password), data))
def safe_summary(self, encoded):
algorithm, salt, data = encoded.split('$', 2)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('salt'), salt),
(_('hash'), mask_hash(data, show=3)),
])
| apache-2.0 |
facelessuser/Pywin32 | lib/x32/win32com/demos/connect.py | 10 | 3658 | # Implements _both_ a connectable client, and a connectable server.
#
# Note that we cheat just a little - the Server in this demo is not created
# via Normal COM - this means we can avoid registering the server.
# However, the server _is_ accessed as a COM object - just the creation
# is cheated on - so this is still working as a fully-fledged server.
import pythoncom
import win32com.server.util
import win32com.server.connect
from win32com.server.exception import Exception
from pywin32_testutil import str2bytes
# This is the IID of the Events interface both Client and Server support.
IID_IConnectDemoEvents = pythoncom.MakeIID("{A4988850-49C3-11d0-AE5D-52342E000000}")
# The server which implements
# Create a connectable class, that has a single public method
# 'DoIt', which echos to a single sink 'DoneIt'
class ConnectableServer(win32com.server.connect.ConnectableServer):
_public_methods_ = ["DoIt"] + win32com.server.connect.ConnectableServer._public_methods_
_connect_interfaces_ = [IID_IConnectDemoEvents]
# The single public method that the client can call on us
# (ie, as a normal COM server, this exposes just this single method.
def DoIt(self,arg):
# Simply broadcast a notification.
self._BroadcastNotify(self.NotifyDoneIt, (arg,))
def NotifyDoneIt(self, interface, arg):
interface.Invoke(1000, 0, pythoncom.DISPATCH_METHOD, 1, arg)
# Here is the client side of the connection world.
# Define a COM object which implements the methods defined by the
# IConnectDemoEvents interface.
class ConnectableClient:
# This is another cheat - I _know_ the server defines the "DoneIt" event
# as DISPID==1000 - I also know from the implementation details of COM
# that the first method in _public_methods_ gets 1000.
# Normally some explicit DISPID->Method mapping is required.
_public_methods_ = ["OnDoneIt"]
def __init__(self):
self.last_event_arg = None
# A client must implement QI, and respond to a query for the Event interface.
# In addition, it must provide a COM object (which server.util.wrap) does.
def _query_interface_(self, iid):
import win32com.server.util
# Note that this seems like a necessary hack. I am responding to IID_IConnectDemoEvents
# but only creating an IDispatch gateway object.
if iid==IID_IConnectDemoEvents: return win32com.server.util.wrap(self)
# And here is our event method which gets called.
def OnDoneIt(self, arg):
self.last_event_arg = arg
def CheckEvent(server, client, val, verbose):
client.last_event_arg = None
server.DoIt(val)
if client.last_event_arg != val:
raise RuntimeError("Sent %r, but got back %r" % (val, client.last_event_arg))
if verbose:
print("Sent and received %r" % val)
# A simple test script for all this.
# In the real world, it is likely that the code controlling the server
# will be in the same class as that getting the notifications.
def test(verbose=0):
import win32com.client.dynamic, win32com.client.connect
import win32com.server.policy
server = win32com.client.dynamic.Dispatch(win32com.server.util.wrap(ConnectableServer()))
connection = win32com.client.connect.SimpleConnection()
client = ConnectableClient()
connection.Connect(server, client, IID_IConnectDemoEvents)
CheckEvent(server, client, "Hello", verbose)
CheckEvent(server, client, str2bytes("Here is a null>\x00<"), verbose)
CheckEvent(server, client, "Here is a null>\x00<", verbose)
val = "test-\xe0\xf2" # 2 extended characters.
CheckEvent(server, client, val, verbose)
if verbose:
print("Everything seemed to work!")
# Aggressive memory leak checking (ie, do nothing!) :-) All should cleanup OK???
if __name__=='__main__':
test(1)
| bsd-3-clause |
aselle/tensorflow | tensorflow/python/keras/wrappers/scikit_learn.py | 26 | 12799 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Wrapper for using the Scikit-Learn API with Keras models.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import types
import numpy as np
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.utils.generic_utils import has_arg
from tensorflow.python.keras.utils.np_utils import to_categorical
from tensorflow.python.util.tf_export import tf_export
class BaseWrapper(object):
"""Base class for the Keras scikit-learn wrapper.
Warning: This class should not be used directly.
Use descendant classes instead.
Arguments:
build_fn: callable function or class instance
**sk_params: model parameters & fitting parameters
The `build_fn` should construct, compile and return a Keras model, which
will then be used to fit/predict. One of the following
three values could be passed to `build_fn`:
1. A function
2. An instance of a class that implements the `__call__` method
3. None. This means you implement a class that inherits from either
`KerasClassifier` or `KerasRegressor`. The `__call__` method of the
present class will then be treated as the default `build_fn`.
`sk_params` takes both model parameters and fitting parameters. Legal model
parameters are the arguments of `build_fn`. Note that like all other
estimators in scikit-learn, `build_fn` should provide default values for
its arguments, so that you could create the estimator without passing any
values to `sk_params`.
`sk_params` could also accept parameters for calling `fit`, `predict`,
`predict_proba`, and `score` methods (e.g., `epochs`, `batch_size`).
fitting (predicting) parameters are selected in the following order:
1. Values passed to the dictionary arguments of
`fit`, `predict`, `predict_proba`, and `score` methods
2. Values passed to `sk_params`
3. The default values of the `keras.models.Sequential`
`fit`, `predict`, `predict_proba` and `score` methods
When using scikit-learn's `grid_search` API, legal tunable parameters are
those you could pass to `sk_params`, including fitting parameters.
In other words, you could use `grid_search` to search for the best
`batch_size` or `epochs` as well as the model parameters.
"""
def __init__(self, build_fn=None, **sk_params):
self.build_fn = build_fn
self.sk_params = sk_params
self.check_params(sk_params)
def check_params(self, params):
"""Checks for user typos in `params`.
Arguments:
params: dictionary; the parameters to be checked
Raises:
ValueError: if any member of `params` is not a valid argument.
"""
legal_params_fns = [
Sequential.fit, Sequential.predict, Sequential.predict_classes,
Sequential.evaluate
]
if self.build_fn is None:
legal_params_fns.append(self.__call__)
elif (not isinstance(self.build_fn, types.FunctionType) and
not isinstance(self.build_fn, types.MethodType)):
legal_params_fns.append(self.build_fn.__call__)
else:
legal_params_fns.append(self.build_fn)
for params_name in params:
for fn in legal_params_fns:
if has_arg(fn, params_name):
break
else:
if params_name != 'nb_epoch':
raise ValueError('{} is not a legal parameter'.format(params_name))
def get_params(self, **params): # pylint: disable=unused-argument
"""Gets parameters for this estimator.
Arguments:
**params: ignored (exists for API compatibility).
Returns:
Dictionary of parameter names mapped to their values.
"""
res = copy.deepcopy(self.sk_params)
res.update({'build_fn': self.build_fn})
return res
def set_params(self, **params):
"""Sets the parameters of this estimator.
Arguments:
**params: Dictionary of parameter names mapped to their values.
Returns:
self
"""
self.check_params(params)
self.sk_params.update(params)
return self
def fit(self, x, y, **kwargs):
"""Constructs a new model with `build_fn` & fit the model to `(x, y)`.
Arguments:
x : array-like, shape `(n_samples, n_features)`
Training samples where `n_samples` is the number of samples
and `n_features` is the number of features.
y : array-like, shape `(n_samples,)` or `(n_samples, n_outputs)`
True labels for `x`.
**kwargs: dictionary arguments
Legal arguments are the arguments of `Sequential.fit`
Returns:
history : object
details about the training history at each epoch.
"""
if self.build_fn is None:
self.model = self.__call__(**self.filter_sk_params(self.__call__))
elif (not isinstance(self.build_fn, types.FunctionType) and
not isinstance(self.build_fn, types.MethodType)):
self.model = self.build_fn(
**self.filter_sk_params(self.build_fn.__call__))
else:
self.model = self.build_fn(**self.filter_sk_params(self.build_fn))
loss_name = self.model.loss
if hasattr(loss_name, '__name__'):
loss_name = loss_name.__name__
if loss_name == 'categorical_crossentropy' and len(y.shape) != 2:
y = to_categorical(y)
fit_args = copy.deepcopy(self.filter_sk_params(Sequential.fit))
fit_args.update(kwargs)
history = self.model.fit(x, y, **fit_args)
return history
def filter_sk_params(self, fn, override=None):
"""Filters `sk_params` and returns those in `fn`'s arguments.
Arguments:
fn : arbitrary function
override: dictionary, values to override `sk_params`
Returns:
res : dictionary containing variables
in both `sk_params` and `fn`'s arguments.
"""
override = override or {}
res = {}
for name, value in self.sk_params.items():
if has_arg(fn, name):
res.update({name: value})
res.update(override)
return res
@tf_export('keras.wrappers.scikit_learn.KerasClassifier')
class KerasClassifier(BaseWrapper):
"""Implementation of the scikit-learn classifier API for Keras.
"""
def fit(self, x, y, **kwargs):
"""Constructs a new model with `build_fn` & fit the model to `(x, y)`.
Arguments:
x : array-like, shape `(n_samples, n_features)`
Training samples where `n_samples` is the number of samples
and `n_features` is the number of features.
y : array-like, shape `(n_samples,)` or `(n_samples, n_outputs)`
True labels for `x`.
**kwargs: dictionary arguments
Legal arguments are the arguments of `Sequential.fit`
Returns:
history : object
details about the training history at each epoch.
Raises:
ValueError: In case of invalid shape for `y` argument.
"""
y = np.array(y)
if len(y.shape) == 2 and y.shape[1] > 1:
self.classes_ = np.arange(y.shape[1])
elif (len(y.shape) == 2 and y.shape[1] == 1) or len(y.shape) == 1:
self.classes_ = np.unique(y)
y = np.searchsorted(self.classes_, y)
else:
raise ValueError('Invalid shape for y: ' + str(y.shape))
self.n_classes_ = len(self.classes_)
return super(KerasClassifier, self).fit(x, y, **kwargs)
def predict(self, x, **kwargs):
"""Returns the class predictions for the given test data.
Arguments:
x: array-like, shape `(n_samples, n_features)`
Test samples where `n_samples` is the number of samples
and `n_features` is the number of features.
**kwargs: dictionary arguments
Legal arguments are the arguments
of `Sequential.predict_classes`.
Returns:
preds: array-like, shape `(n_samples,)`
Class predictions.
"""
kwargs = self.filter_sk_params(Sequential.predict_classes, kwargs)
classes = self.model.predict_classes(x, **kwargs)
return self.classes_[classes]
def predict_proba(self, x, **kwargs):
"""Returns class probability estimates for the given test data.
Arguments:
x: array-like, shape `(n_samples, n_features)`
Test samples where `n_samples` is the number of samples
and `n_features` is the number of features.
**kwargs: dictionary arguments
Legal arguments are the arguments
of `Sequential.predict_classes`.
Returns:
proba: array-like, shape `(n_samples, n_outputs)`
Class probability estimates.
In the case of binary classification,
to match the scikit-learn API,
will return an array of shape `(n_samples, 2)`
(instead of `(n_sample, 1)` as in Keras).
"""
kwargs = self.filter_sk_params(Sequential.predict_proba, kwargs)
probs = self.model.predict_proba(x, **kwargs)
# check if binary classification
if probs.shape[1] == 1:
# first column is probability of class 0 and second is of class 1
probs = np.hstack([1 - probs, probs])
return probs
def score(self, x, y, **kwargs):
"""Returns the mean accuracy on the given test data and labels.
Arguments:
x: array-like, shape `(n_samples, n_features)`
Test samples where `n_samples` is the number of samples
and `n_features` is the number of features.
y: array-like, shape `(n_samples,)` or `(n_samples, n_outputs)`
True labels for `x`.
**kwargs: dictionary arguments
Legal arguments are the arguments of `Sequential.evaluate`.
Returns:
score: float
Mean accuracy of predictions on `x` wrt. `y`.
Raises:
ValueError: If the underlying model isn't configured to
compute accuracy. You should pass `metrics=["accuracy"]` to
the `.compile()` method of the model.
"""
y = np.searchsorted(self.classes_, y)
kwargs = self.filter_sk_params(Sequential.evaluate, kwargs)
loss_name = self.model.loss
if hasattr(loss_name, '__name__'):
loss_name = loss_name.__name__
if loss_name == 'categorical_crossentropy' and len(y.shape) != 2:
y = to_categorical(y)
outputs = self.model.evaluate(x, y, **kwargs)
if not isinstance(outputs, list):
outputs = [outputs]
for name, output in zip(self.model.metrics_names, outputs):
if name == 'acc':
return output
raise ValueError('The model is not configured to compute accuracy. '
'You should pass `metrics=["accuracy"]` to '
'the `model.compile()` method.')
@tf_export('keras.wrappers.scikit_learn.KerasRegressor')
class KerasRegressor(BaseWrapper):
"""Implementation of the scikit-learn regressor API for Keras.
"""
def predict(self, x, **kwargs):
"""Returns predictions for the given test data.
Arguments:
x: array-like, shape `(n_samples, n_features)`
Test samples where `n_samples` is the number of samples
and `n_features` is the number of features.
**kwargs: dictionary arguments
Legal arguments are the arguments of `Sequential.predict`.
Returns:
preds: array-like, shape `(n_samples,)`
Predictions.
"""
kwargs = self.filter_sk_params(Sequential.predict, kwargs)
return np.squeeze(self.model.predict(x, **kwargs))
def score(self, x, y, **kwargs):
"""Returns the mean loss on the given test data and labels.
Arguments:
x: array-like, shape `(n_samples, n_features)`
Test samples where `n_samples` is the number of samples
and `n_features` is the number of features.
y: array-like, shape `(n_samples,)`
True labels for `x`.
**kwargs: dictionary arguments
Legal arguments are the arguments of `Sequential.evaluate`.
Returns:
score: float
Mean accuracy of predictions on `x` wrt. `y`.
"""
kwargs = self.filter_sk_params(Sequential.evaluate, kwargs)
loss = self.model.evaluate(x, y, **kwargs)
if isinstance(loss, list):
return -loss[0]
return -loss
| apache-2.0 |
fangxingli/hue | desktop/core/ext-py/boto-2.38.0/boto/beanstalk/__init__.py | 145 | 1680 | # Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the AWS Elastic Beanstalk service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
import boto.beanstalk.layer1
return get_regions(
'elasticbeanstalk',
connection_cls=boto.beanstalk.layer1.Layer1
)
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| apache-2.0 |
surgebiswas/poker | PokerBots_2017/Johnny/yaml/representer.py | 64 | 17711 |
__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
'RepresenterError']
from error import *
from nodes import *
import datetime
import sys, copy_reg, types
class RepresenterError(YAMLError):
pass
class BaseRepresenter(object):
yaml_representers = {}
yaml_multi_representers = {}
def __init__(self, default_style=None, default_flow_style=None):
self.default_style = default_style
self.default_flow_style = default_flow_style
self.represented_objects = {}
self.object_keeper = []
self.alias_key = None
def represent(self, data):
node = self.represent_data(data)
self.serialize(node)
self.represented_objects = {}
self.object_keeper = []
self.alias_key = None
def get_classobj_bases(self, cls):
bases = [cls]
for base in cls.__bases__:
bases.extend(self.get_classobj_bases(base))
return bases
def represent_data(self, data):
if self.ignore_aliases(data):
self.alias_key = None
else:
self.alias_key = id(data)
if self.alias_key is not None:
if self.alias_key in self.represented_objects:
node = self.represented_objects[self.alias_key]
#if node is None:
# raise RepresenterError("recursive objects are not allowed: %r" % data)
return node
#self.represented_objects[alias_key] = None
self.object_keeper.append(data)
data_types = type(data).__mro__
if type(data) is types.InstanceType:
data_types = self.get_classobj_bases(data.__class__)+list(data_types)
if data_types[0] in self.yaml_representers:
node = self.yaml_representers[data_types[0]](self, data)
else:
for data_type in data_types:
if data_type in self.yaml_multi_representers:
node = self.yaml_multi_representers[data_type](self, data)
break
else:
if None in self.yaml_multi_representers:
node = self.yaml_multi_representers[None](self, data)
elif None in self.yaml_representers:
node = self.yaml_representers[None](self, data)
else:
node = ScalarNode(None, unicode(data))
#if alias_key is not None:
# self.represented_objects[alias_key] = node
return node
def add_representer(cls, data_type, representer):
if not 'yaml_representers' in cls.__dict__:
cls.yaml_representers = cls.yaml_representers.copy()
cls.yaml_representers[data_type] = representer
add_representer = classmethod(add_representer)
def add_multi_representer(cls, data_type, representer):
if not 'yaml_multi_representers' in cls.__dict__:
cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
cls.yaml_multi_representers[data_type] = representer
add_multi_representer = classmethod(add_multi_representer)
def represent_scalar(self, tag, value, style=None):
if style is None:
style = self.default_style
node = ScalarNode(tag, value, style=style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
return node
def represent_sequence(self, tag, sequence, flow_style=None):
value = []
node = SequenceNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
for item in sequence:
node_item = self.represent_data(item)
if not (isinstance(node_item, ScalarNode) and not node_item.style):
best_style = False
value.append(node_item)
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def represent_mapping(self, tag, mapping, flow_style=None):
value = []
node = MappingNode(tag, value, flow_style=flow_style)
if self.alias_key is not None:
self.represented_objects[self.alias_key] = node
best_style = True
if hasattr(mapping, 'items'):
mapping = mapping.items()
mapping.sort()
for item_key, item_value in mapping:
node_key = self.represent_data(item_key)
node_value = self.represent_data(item_value)
if not (isinstance(node_key, ScalarNode) and not node_key.style):
best_style = False
if not (isinstance(node_value, ScalarNode) and not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
if self.default_flow_style is not None:
node.flow_style = self.default_flow_style
else:
node.flow_style = best_style
return node
def ignore_aliases(self, data):
return False
class SafeRepresenter(BaseRepresenter):
def ignore_aliases(self, data):
if data is None:
return True
if isinstance(data, tuple) and data == ():
return True
if isinstance(data, (str, unicode, bool, int, float)):
return True
def represent_none(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:null',
u'null')
def represent_str(self, data):
tag = None
style = None
try:
data = unicode(data, 'ascii')
tag = u'tag:yaml.org,2002:str'
except UnicodeDecodeError:
try:
data = unicode(data, 'utf-8')
tag = u'tag:yaml.org,2002:str'
except UnicodeDecodeError:
data = data.encode('base64')
tag = u'tag:yaml.org,2002:binary'
style = '|'
return self.represent_scalar(tag, data, style=style)
def represent_unicode(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:str', data)
def represent_bool(self, data):
if data:
value = u'true'
else:
value = u'false'
return self.represent_scalar(u'tag:yaml.org,2002:bool', value)
def represent_int(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
def represent_long(self, data):
return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data))
inf_value = 1e300
while repr(inf_value) != repr(inf_value*inf_value):
inf_value *= inf_value
def represent_float(self, data):
if data != data or (data == 0.0 and data == 1.0):
value = u'.nan'
elif data == self.inf_value:
value = u'.inf'
elif data == -self.inf_value:
value = u'-.inf'
else:
value = unicode(repr(data)).lower()
# Note that in some cases `repr(data)` represents a float number
# without the decimal parts. For instance:
# >>> repr(1e17)
# '1e17'
# Unfortunately, this is not a valid float representation according
# to the definition of the `!!float` tag. We fix this by adding
# '.0' before the 'e' symbol.
if u'.' not in value and u'e' in value:
value = value.replace(u'e', u'.0e', 1)
return self.represent_scalar(u'tag:yaml.org,2002:float', value)
def represent_list(self, data):
#pairs = (len(data) > 0 and isinstance(data, list))
#if pairs:
# for item in data:
# if not isinstance(item, tuple) or len(item) != 2:
# pairs = False
# break
#if not pairs:
return self.represent_sequence(u'tag:yaml.org,2002:seq', data)
#value = []
#for item_key, item_value in data:
# value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
# [(item_key, item_value)]))
#return SequenceNode(u'tag:yaml.org,2002:pairs', value)
def represent_dict(self, data):
return self.represent_mapping(u'tag:yaml.org,2002:map', data)
def represent_set(self, data):
value = {}
for key in data:
value[key] = None
return self.represent_mapping(u'tag:yaml.org,2002:set', value)
def represent_date(self, data):
value = unicode(data.isoformat())
return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
def represent_datetime(self, data):
value = unicode(data.isoformat(' '))
return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value)
def represent_yaml_object(self, tag, data, cls, flow_style=None):
if hasattr(data, '__getstate__'):
state = data.__getstate__()
else:
state = data.__dict__.copy()
return self.represent_mapping(tag, state, flow_style=flow_style)
def represent_undefined(self, data):
raise RepresenterError("cannot represent an object: %s" % data)
SafeRepresenter.add_representer(type(None),
SafeRepresenter.represent_none)
SafeRepresenter.add_representer(str,
SafeRepresenter.represent_str)
SafeRepresenter.add_representer(unicode,
SafeRepresenter.represent_unicode)
SafeRepresenter.add_representer(bool,
SafeRepresenter.represent_bool)
SafeRepresenter.add_representer(int,
SafeRepresenter.represent_int)
SafeRepresenter.add_representer(long,
SafeRepresenter.represent_long)
SafeRepresenter.add_representer(float,
SafeRepresenter.represent_float)
SafeRepresenter.add_representer(list,
SafeRepresenter.represent_list)
SafeRepresenter.add_representer(tuple,
SafeRepresenter.represent_list)
SafeRepresenter.add_representer(dict,
SafeRepresenter.represent_dict)
SafeRepresenter.add_representer(set,
SafeRepresenter.represent_set)
SafeRepresenter.add_representer(datetime.date,
SafeRepresenter.represent_date)
SafeRepresenter.add_representer(datetime.datetime,
SafeRepresenter.represent_datetime)
SafeRepresenter.add_representer(None,
SafeRepresenter.represent_undefined)
class Representer(SafeRepresenter):
def represent_str(self, data):
tag = None
style = None
try:
data = unicode(data, 'ascii')
tag = u'tag:yaml.org,2002:str'
except UnicodeDecodeError:
try:
data = unicode(data, 'utf-8')
tag = u'tag:yaml.org,2002:python/str'
except UnicodeDecodeError:
data = data.encode('base64')
tag = u'tag:yaml.org,2002:binary'
style = '|'
return self.represent_scalar(tag, data, style=style)
def represent_unicode(self, data):
tag = None
try:
data.encode('ascii')
tag = u'tag:yaml.org,2002:python/unicode'
except UnicodeEncodeError:
tag = u'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data)
def represent_long(self, data):
tag = u'tag:yaml.org,2002:int'
if int(data) is not data:
tag = u'tag:yaml.org,2002:python/long'
return self.represent_scalar(tag, unicode(data))
def represent_complex(self, data):
if data.imag == 0.0:
data = u'%r' % data.real
elif data.real == 0.0:
data = u'%rj' % data.imag
elif data.imag > 0:
data = u'%r+%rj' % (data.real, data.imag)
else:
data = u'%r%rj' % (data.real, data.imag)
return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data)
def represent_tuple(self, data):
return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data)
def represent_name(self, data):
name = u'%s.%s' % (data.__module__, data.__name__)
return self.represent_scalar(u'tag:yaml.org,2002:python/name:'+name, u'')
def represent_module(self, data):
return self.represent_scalar(
u'tag:yaml.org,2002:python/module:'+data.__name__, u'')
def represent_instance(self, data):
# For instances of classic classes, we use __getinitargs__ and
# __getstate__ to serialize the data.
# If data.__getinitargs__ exists, the object must be reconstructed by
# calling cls(**args), where args is a tuple returned by
# __getinitargs__. Otherwise, the cls.__init__ method should never be
# called and the class instance is created by instantiating a trivial
# class and assigning to the instance's __class__ variable.
# If data.__getstate__ exists, it returns the state of the object.
# Otherwise, the state of the object is data.__dict__.
# We produce either a !!python/object or !!python/object/new node.
# If data.__getinitargs__ does not exist and state is a dictionary, we
# produce a !!python/object node . Otherwise we produce a
# !!python/object/new node.
cls = data.__class__
class_name = u'%s.%s' % (cls.__module__, cls.__name__)
args = None
state = None
if hasattr(data, '__getinitargs__'):
args = list(data.__getinitargs__())
if hasattr(data, '__getstate__'):
state = data.__getstate__()
else:
state = data.__dict__
if args is None and isinstance(state, dict):
return self.represent_mapping(
u'tag:yaml.org,2002:python/object:'+class_name, state)
if isinstance(state, dict) and not state:
return self.represent_sequence(
u'tag:yaml.org,2002:python/object/new:'+class_name, args)
value = {}
if args:
value['args'] = args
value['state'] = state
return self.represent_mapping(
u'tag:yaml.org,2002:python/object/new:'+class_name, value)
def represent_object(self, data):
# We use __reduce__ API to save the data. data.__reduce__ returns
# a tuple of length 2-5:
# (function, args, state, listitems, dictitems)
# For reconstructing, we calls function(*args), then set its state,
# listitems, and dictitems if they are not None.
# A special case is when function.__name__ == '__newobj__'. In this
# case we create the object with args[0].__new__(*args).
# Another special case is when __reduce__ returns a string - we don't
# support it.
# We produce a !!python/object, !!python/object/new or
# !!python/object/apply node.
cls = type(data)
if cls in copy_reg.dispatch_table:
reduce = copy_reg.dispatch_table[cls](data)
elif hasattr(data, '__reduce_ex__'):
reduce = data.__reduce_ex__(2)
elif hasattr(data, '__reduce__'):
reduce = data.__reduce__()
else:
raise RepresenterError("cannot represent object: %r" % data)
reduce = (list(reduce)+[None]*5)[:5]
function, args, state, listitems, dictitems = reduce
args = list(args)
if state is None:
state = {}
if listitems is not None:
listitems = list(listitems)
if dictitems is not None:
dictitems = dict(dictitems)
if function.__name__ == '__newobj__':
function = args[0]
args = args[1:]
tag = u'tag:yaml.org,2002:python/object/new:'
newobj = True
else:
tag = u'tag:yaml.org,2002:python/object/apply:'
newobj = False
function_name = u'%s.%s' % (function.__module__, function.__name__)
if not args and not listitems and not dictitems \
and isinstance(state, dict) and newobj:
return self.represent_mapping(
u'tag:yaml.org,2002:python/object:'+function_name, state)
if not listitems and not dictitems \
and isinstance(state, dict) and not state:
return self.represent_sequence(tag+function_name, args)
value = {}
if args:
value['args'] = args
if state or not isinstance(state, dict):
value['state'] = state
if listitems:
value['listitems'] = listitems
if dictitems:
value['dictitems'] = dictitems
return self.represent_mapping(tag+function_name, value)
Representer.add_representer(str,
Representer.represent_str)
Representer.add_representer(unicode,
Representer.represent_unicode)
Representer.add_representer(long,
Representer.represent_long)
Representer.add_representer(complex,
Representer.represent_complex)
Representer.add_representer(tuple,
Representer.represent_tuple)
Representer.add_representer(type,
Representer.represent_name)
Representer.add_representer(types.ClassType,
Representer.represent_name)
Representer.add_representer(types.FunctionType,
Representer.represent_name)
Representer.add_representer(types.BuiltinFunctionType,
Representer.represent_name)
Representer.add_representer(types.ModuleType,
Representer.represent_module)
Representer.add_multi_representer(types.InstanceType,
Representer.represent_instance)
Representer.add_multi_representer(object,
Representer.represent_object)
| mit |
GunoH/intellij-community | python/helpers/pydev/pydev_tests_runfiles/test_pydevd_property.py | 26 | 3422 | '''
Created on Aug 22, 2011
@author: hussain.bohra
@author: fabioz
'''
import os
import sys
import unittest
#=======================================================================================================================
# Test
#=======================================================================================================================
class Test(unittest.TestCase):
"""Test cases to validate custom property implementation in pydevd
"""
def setUp(self, nused=None):
self.tempdir = os.path.join(os.path.dirname(os.path.dirname(__file__)))
sys.path.insert(0, self.tempdir)
from _pydevd_bundle import pydevd_traceproperty
self.old = pydevd_traceproperty.replace_builtin_property()
def tearDown(self, unused=None):
from _pydevd_bundle import pydevd_traceproperty
pydevd_traceproperty.replace_builtin_property(self.old)
sys.path.remove(self.tempdir)
def test_property(self):
"""Test case to validate custom property
"""
from _pydevd_bundle import pydevd_traceproperty
class TestProperty(object):
def __init__(self):
self._get = 0
self._set = 0
self._del = 0
def get_name(self):
self._get += 1
return self.__name
def set_name(self, value):
self._set += 1
self.__name = value
def del_name(self):
self._del += 1
del self.__name
name = property(get_name, set_name, del_name, "name's docstring")
self.assertEqual(name.__class__, pydevd_traceproperty.DebugProperty)
testObj = TestProperty()
self._check(testObj)
def test_property2(self):
"""Test case to validate custom property
"""
class TestProperty(object):
def __init__(self):
self._get = 0
self._set = 0
self._del = 0
def name(self):
self._get += 1
return self.__name
name = property(name)
def set_name(self, value):
self._set += 1
self.__name = value
name.setter(set_name)
def del_name(self):
self._del += 1
del self.__name
name.deleter(del_name)
testObj = TestProperty()
self._check(testObj)
def test_property3(self):
"""Test case to validate custom property
"""
class TestProperty(object):
def __init__(self):
self._name = 'foo'
def name(self):
return self._name
name = property(name)
testObj = TestProperty()
self.assertRaises(AttributeError, setattr, testObj, 'name', 'bar')
self.assertRaises(AttributeError, delattr, testObj, 'name')
def _check(self, testObj):
testObj.name = "Custom"
self.assertEqual(1, testObj._set)
self.assertEqual(testObj.name, "Custom")
self.assertEqual(1, testObj._get)
self.assertTrue(hasattr(testObj, 'name'))
del testObj.name
self.assertEqual(1, testObj._del)
self.assertTrue(not hasattr(testObj, 'name'))
testObj.name = "Custom2"
self.assertEqual(testObj.name, "Custom2")
| apache-2.0 |
SOKP/external_chromium_org | tools/perf/metrics/timeline_unittest.py | 32 | 7170 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from metrics import test_page_test_results
from metrics import timeline
from telemetry.timeline import model as model_module
from telemetry.web_perf import timeline_interaction_record as tir_module
def _GetInteractionRecord(start, end):
return tir_module.TimelineInteractionRecord("test-record", start, end)
class LoadTimesTimelineMetric(unittest.TestCase):
def GetResults(self, metric, model, renderer_thread, interaction_records):
results = test_page_test_results.TestPageTestResults(self)
metric.AddResults(model, renderer_thread, interaction_records, results)
return results
def testSanitizing(self):
model = model_module.TimelineModel()
renderer_main = model.GetOrCreateProcess(1).GetOrCreateThread(2)
renderer_main.name = 'CrRendererMain'
# [ X ]
# [ Y ]
renderer_main.BeginSlice('cat1', 'x.y', 10, 0)
renderer_main.EndSlice(20, 20)
model.FinalizeImport()
metric = timeline.LoadTimesTimelineMetric()
results = self.GetResults(
metric, model=model, renderer_thread=renderer_main,
interaction_records=[_GetInteractionRecord(0, float('inf'))])
results.AssertHasPageSpecificScalarValue(
'CrRendererMain|x_y', 'ms', 10)
results.AssertHasPageSpecificScalarValue(
'CrRendererMain|x_y_max', 'ms', 10)
results.AssertHasPageSpecificScalarValue(
'CrRendererMain|x_y_avg', 'ms', 10)
def testTimelineBetweenRange(self):
model = model_module.TimelineModel()
renderer_main = model.GetOrCreateProcess(1).GetOrCreateThread(2)
renderer_main.name = 'CrRendererMain'
# [ X ] [ Z ]
# [ Y ] [ T ]
# [ interaction record ]
renderer_main.BeginSlice('cat1', 'x.y', 10, 0)
renderer_main.EndSlice(20, 20)
renderer_main.BeginSlice('cat1', 'z.t', 30, 0)
renderer_main.EndSlice(35, 35)
model.FinalizeImport()
metric = timeline.LoadTimesTimelineMetric()
results = self.GetResults(
metric, model=model, renderer_thread=renderer_main,
interaction_records=[_GetInteractionRecord(10, 20)])
results.AssertHasPageSpecificScalarValue(
'CrRendererMain|x_y', 'ms', 10)
results.AssertHasPageSpecificScalarValue(
'CrRendererMain|x_y_max', 'ms', 10)
results.AssertHasPageSpecificScalarValue(
'CrRendererMain|x_y_avg', 'ms', 10)
def testCounterSanitizing(self):
model = model_module.TimelineModel()
renderer_main = model.GetOrCreateProcess(1).GetOrCreateThread(2)
renderer_main.name = 'CrRendererMain'
x_counter = renderer_main.parent.GetOrCreateCounter('cat', 'x.y')
x_counter.samples += [1, 2]
x_counter.series_names += ['a']
x_counter.timestamps += [0, 1]
model.FinalizeImport()
metric = timeline.LoadTimesTimelineMetric()
results = self.GetResults(
metric, model=model, renderer_thread=renderer_main,
interaction_records=[_GetInteractionRecord(0, float('inf'))])
results.AssertHasPageSpecificScalarValue(
'cat_x_y', 'count', 3)
results.AssertHasPageSpecificScalarValue(
'cat_x_y_avg', 'count', 1.5)
class ThreadTimesTimelineMetricUnittest(unittest.TestCase):
def GetResults(self, metric, model, renderer_thread, interaction_record):
results = test_page_test_results.TestPageTestResults(self)
metric.AddResults(model, renderer_thread, interaction_record,
results)
return results
def testResults(self):
model = model_module.TimelineModel()
renderer_main = model.GetOrCreateProcess(1).GetOrCreateThread(2)
renderer_main.name = 'CrRendererMain'
metric = timeline.ThreadTimesTimelineMetric()
metric.details_to_report = timeline.ReportMainThreadOnly
results = self.GetResults(metric, model, renderer_main.parent,
[_GetInteractionRecord(1,2)])
# Test that all result thread categories exist
for name in timeline.TimelineThreadCategories.values():
results.GetPageSpecificValueNamed(timeline.ThreadCpuTimeResultName(name))
def testBasic(self):
model = model_module.TimelineModel()
renderer_main = model.GetOrCreateProcess(1).GetOrCreateThread(2)
renderer_main.name = 'CrRendererMain'
# Create two frame swaps (Results times should be divided by two)
cc_main = model.GetOrCreateProcess(1).GetOrCreateThread(3)
cc_main.name = 'Compositor'
cc_main.BeginSlice('cc_cat', timeline.FrameTraceName, 10, 10)
cc_main.EndSlice(11, 11)
cc_main.BeginSlice('cc_cat', timeline.FrameTraceName, 12, 12)
cc_main.EndSlice(13, 13)
# [ X ] [ Z ]
# [ Y ]
renderer_main.BeginSlice('cat1', 'X', 10, 0)
renderer_main.BeginSlice('cat2', 'Y', 15, 5)
renderer_main.EndSlice(16, 5.5)
renderer_main.EndSlice(30, 19.5)
renderer_main.BeginSlice('cat1', 'Z', 31, 20)
renderer_main.BeginSlice('cat1', 'Z', 33, 21)
model.FinalizeImport()
# Exclude 'Z' using an action-range.
metric = timeline.ThreadTimesTimelineMetric()
metric.details_to_report = timeline.ReportMainThreadOnly
results = self.GetResults(metric, model, renderer_main.parent,
[_GetInteractionRecord(10, 30)])
# Test a couple specific results.
assert_results = {
timeline.ThreadCpuTimeResultName('renderer_main') : 9.75,
timeline.ThreadDetailResultName('renderer_main','cat1') : 9.5,
timeline.ThreadDetailResultName('renderer_main','cat2') : 0.5,
timeline.ThreadDetailResultName('renderer_main','idle') : 0
}
for name, value in assert_results.iteritems():
results.AssertHasPageSpecificScalarValue(name, 'ms', value)
def testOverheadIsRemoved(self):
model = model_module.TimelineModel()
renderer_main = model.GetOrCreateProcess(1).GetOrCreateThread(2)
renderer_main.name = 'CrRendererMain'
# Create one frame swap.
cc_main = model.GetOrCreateProcess(1).GetOrCreateThread(3)
cc_main.name = 'Compositor'
cc_main.BeginSlice('cc_cat', timeline.FrameTraceName, 10, 10)
cc_main.EndSlice(11, 11)
# [ X ]
# [Overhead]
overhead_category = timeline.OverheadTraceCategory
overhead_name = timeline.OverheadTraceName
renderer_main.BeginSlice('cat1', 'X', 10, 0)
renderer_main.BeginSlice(overhead_category, overhead_name, 15, 5)
renderer_main.EndSlice(16, 6)
renderer_main.EndSlice(30, 10)
model.FinalizeImport()
# Include everything in an action-range.
metric = timeline.ThreadTimesTimelineMetric()
metric.details_to_report = timeline.ReportMainThreadOnly
results = self.GetResults(metric, model, renderer_main.parent,
[_GetInteractionRecord(10, 30)])
# Test a couple specific results.
assert_results = {
timeline.ThreadCpuTimeResultName('renderer_main') : 9.0,
}
for name, value in assert_results.iteritems():
results.AssertHasPageSpecificScalarValue(name, 'ms', value)
| bsd-3-clause |
XiaosongWei/blink-crosswalk | Tools/Scripts/webkitpy/common/checksvnconfigfile.py | 37 | 2672 | # Copyright (C) 2012 Balazs Ankes (bank@inf.u-szeged.hu) University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This file is used by:
# webkitpy/style/checkers/png.py
import os
import re
def check(host, fs):
"""
check the svn config file
return with three logical value:
is svn config file missing, is auto-props missing, is the svn:mime-type for png missing
"""
cfg_file_path = config_file_path(host, fs)
try:
config_file = fs.read_text_file(cfg_file_path)
except IOError:
return (True, True, True)
errorcode_autoprop = not re.search("^\s*enable-auto-props\s*=\s*yes", config_file, re.MULTILINE)
errorcode_png = not re.search("^\s*\*\.png\s*=\s*svn:mime-type=image/png", config_file, re.MULTILINE)
return (False, errorcode_autoprop, errorcode_png)
def config_file_path(host, fs):
if host.platform.is_win():
config_file_path = fs.join(os.environ['APPDATA'], "Subversion", "config")
else:
config_file_path = fs.join(fs.expanduser("~"), ".subversion", "config")
return config_file_path
def errorstr_autoprop(config_file_path):
return 'Have to enable auto props in the subversion config file (%s "enable-auto-props = yes"). ' % config_file_path
def errorstr_png(config_file_path):
return 'Have to set the svn:mime-type in the subversion config file (%s "*.png = svn:mime-type=image/png").' % config_file_path
| bsd-3-clause |
sysadminmatmoz/OCB | addons/account_budget/__openerp__.py | 27 | 2220 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Budgets Management',
'version': '1.0',
'category': 'Accounting & Finance',
'description': """
This module allows accountants to manage analytic and crossovered budgets.
==========================================================================
Once the Budgets are defined (in Invoicing/Budgets/Budgets), the Project Managers
can set the planned amount on each Analytic Account.
The accountant has the possibility to see the total of amount planned for each
Budget in order to ensure the total planned is not greater/lower than what he
planned for this Budget. Each list of record can also be switched to a graphical
view of it.
Three reports are available:
----------------------------
1. The first is available from a list of Budgets. It gives the spreading, for
these Budgets, of the Analytic Accounts.
2. The second is a summary of the previous one, it only gives the spreading,
for the selected Budgets, of the Analytic Accounts.
3. The last one is available from the Analytic Chart of Accounts. It gives
the spreading, for the selected Analytic Accounts of Budgets.
""",
'website': 'https://www.odoo.com/page/accounting',
'depends': ['account'],
'data': [
'security/ir.model.access.csv',
'security/account_budget_security.xml',
'account_budget_view.xml',
'account_budget_report.xml',
'account_budget_workflow.xml',
'wizard/account_budget_analytic_view.xml',
'wizard/account_budget_report_view.xml',
'wizard/account_budget_crossovered_summary_report_view.xml',
'wizard/account_budget_crossovered_report_view.xml',
'views/report_analyticaccountbudget.xml',
'views/report_budget.xml',
'views/report_crossoveredbudget.xml',
],
'demo': ['account_budget_demo.xml', 'account_budget_demo.yml'],
'test': [
'../account/test/account_minimal_test.xml',
'account_budget_demo.yml',
'test/account_budget.yml',
'test/account_budget_report.yml',
],
'installable': True,
'auto_install': False,
}
| agpl-3.0 |
ansible/ansible | test/lib/ansible_test/_internal/ci/__init__.py | 21 | 8224 | """Support code for CI environments."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import abc
import base64
import json
import os
import tempfile
from .. import types as t
from ..encoding import (
to_bytes,
to_text,
)
from ..io import (
read_text_file,
write_text_file,
)
from ..config import (
CommonConfig,
TestConfig,
)
from ..util import (
ABC,
ApplicationError,
display,
get_subclasses,
import_plugins,
raw_command,
)
class ChangeDetectionNotSupported(ApplicationError):
"""Exception for cases where change detection is not supported."""
class AuthContext:
"""Context information required for Ansible Core CI authentication."""
def __init__(self): # type: () -> None
pass
class CIProvider(ABC):
"""Base class for CI provider plugins."""
priority = 500
@staticmethod
@abc.abstractmethod
def is_supported(): # type: () -> bool
"""Return True if this provider is supported in the current running environment."""
@property
@abc.abstractmethod
def code(self): # type: () -> str
"""Return a unique code representing this provider."""
@property
@abc.abstractmethod
def name(self): # type: () -> str
"""Return descriptive name for this provider."""
@abc.abstractmethod
def generate_resource_prefix(self): # type: () -> str
"""Return a resource prefix specific to this CI provider."""
@abc.abstractmethod
def get_base_branch(self): # type: () -> str
"""Return the base branch or an empty string."""
@abc.abstractmethod
def detect_changes(self, args): # type: (TestConfig) -> t.Optional[t.List[str]]
"""Initialize change detection."""
@abc.abstractmethod
def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
"""Return True if Ansible Core CI is supported."""
@abc.abstractmethod
def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
"""Return authentication details for Ansible Core CI."""
@abc.abstractmethod
def get_git_details(self, args): # type: (CommonConfig) -> t.Optional[t.Dict[str, t.Any]]
"""Return details about git in the current environment."""
def get_ci_provider(): # type: () -> CIProvider
"""Return a CI provider instance for the current environment."""
try:
return get_ci_provider.provider
except AttributeError:
pass
provider = None
import_plugins('ci')
candidates = sorted(get_subclasses(CIProvider), key=lambda c: (c.priority, c.__name__))
for candidate in candidates:
if candidate.is_supported():
provider = candidate()
break
if provider.code:
display.info('Detected CI provider: %s' % provider.name)
get_ci_provider.provider = provider
return provider
class AuthHelper(ABC):
"""Public key based authentication helper for Ansible Core CI."""
def sign_request(self, request): # type: (t.Dict[str, t.Any]) -> None
"""Sign the given auth request and make the public key available."""
payload_bytes = to_bytes(json.dumps(request, sort_keys=True))
signature_raw_bytes = self.sign_bytes(payload_bytes)
signature = to_text(base64.b64encode(signature_raw_bytes))
request.update(signature=signature)
def initialize_private_key(self): # type: () -> str
"""
Initialize and publish a new key pair (if needed) and return the private key.
The private key is cached across ansible-test invocations so it is only generated and published once per CI job.
"""
path = os.path.expanduser('~/.ansible-core-ci-private.key')
if os.path.exists(to_bytes(path)):
private_key_pem = read_text_file(path)
else:
private_key_pem = self.generate_private_key()
write_text_file(path, private_key_pem)
return private_key_pem
@abc.abstractmethod
def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes
"""Sign the given payload and return the signature, initializing a new key pair if required."""
@abc.abstractmethod
def publish_public_key(self, public_key_pem): # type: (str) -> None
"""Publish the given public key."""
@abc.abstractmethod
def generate_private_key(self): # type: () -> str
"""Generate a new key pair, publishing the public key and returning the private key."""
class CryptographyAuthHelper(AuthHelper, ABC): # pylint: disable=abstract-method
"""Cryptography based public key based authentication helper for Ansible Core CI."""
def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes
"""Sign the given payload and return the signature, initializing a new key pair if required."""
# import cryptography here to avoid overhead and failures in environments which do not use/provide it
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.serialization import load_pem_private_key
private_key_pem = self.initialize_private_key()
private_key = load_pem_private_key(to_bytes(private_key_pem), None, default_backend())
signature_raw_bytes = private_key.sign(payload_bytes, ec.ECDSA(hashes.SHA256()))
return signature_raw_bytes
def generate_private_key(self): # type: () -> str
"""Generate a new key pair, publishing the public key and returning the private key."""
# import cryptography here to avoid overhead and failures in environments which do not use/provide it
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import ec
private_key = ec.generate_private_key(ec.SECP384R1(), default_backend())
public_key = private_key.public_key()
# noinspection PyUnresolvedReferences
private_key_pem = to_text(private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption(),
))
# noinspection PyTypeChecker
public_key_pem = to_text(public_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo,
))
self.publish_public_key(public_key_pem)
return private_key_pem
class OpenSSLAuthHelper(AuthHelper, ABC): # pylint: disable=abstract-method
"""OpenSSL based public key based authentication helper for Ansible Core CI."""
def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes
"""Sign the given payload and return the signature, initializing a new key pair if required."""
private_key_pem = self.initialize_private_key()
with tempfile.NamedTemporaryFile() as private_key_file:
private_key_file.write(to_bytes(private_key_pem))
private_key_file.flush()
with tempfile.NamedTemporaryFile() as payload_file:
payload_file.write(payload_bytes)
payload_file.flush()
with tempfile.NamedTemporaryFile() as signature_file:
raw_command(['openssl', 'dgst', '-sha256', '-sign', private_key_file.name, '-out', signature_file.name, payload_file.name], capture=True)
signature_raw_bytes = signature_file.read()
return signature_raw_bytes
def generate_private_key(self): # type: () -> str
"""Generate a new key pair, publishing the public key and returning the private key."""
private_key_pem = raw_command(['openssl', 'ecparam', '-genkey', '-name', 'secp384r1', '-noout'], capture=True)[0]
public_key_pem = raw_command(['openssl', 'ec', '-pubout'], data=private_key_pem, capture=True)[0]
self.publish_public_key(public_key_pem)
return private_key_pem
| gpl-3.0 |
rbarlow/pulp_puppet | pulp_puppet_plugins/test/unit/plugins/importer/downloaders/test_web.py | 4 | 5848 | import os
import mock
from nectar.report import DownloadReport
import base_downloader
from pulp_puppet.common import constants
from pulp_puppet.plugins.importers.downloaders import exceptions, web
from pulp_puppet.plugins.importers.downloaders.web import HttpDownloader
TEST_SOURCE = 'http://forge.puppetlabs.com/'
class HttpDownloaderTests(base_downloader.BaseDownloaderTests):
def setUp(self):
super(HttpDownloaderTests, self).setUp()
self.config.repo_plugin_config[constants.CONFIG_FEED] = TEST_SOURCE
self.downloader = HttpDownloader(self.repo, None, self.config)
@mock.patch('nectar.config.DownloaderConfig.finalize')
@mock.patch('nectar.downloaders.threaded.HTTPThreadedDownloader.download')
def test_retrieve_metadata(self, mock_downloader_download, mock_finalize):
docs = self.downloader.retrieve_metadata(self.mock_progress_report)
self.assertEqual(len(docs), 1)
self.assertEqual(mock_downloader_download.call_count, 1)
mock_finalize.assert_called_once()
@mock.patch('nectar.downloaders.threaded.HTTPThreadedDownloader.download')
def test_retrieve_metadata_multiple_queries(self, mock_downloader_download):
self.config.repo_plugin_config[constants.CONFIG_QUERIES] = ['a', ['b', 'c']]
docs = self.downloader.retrieve_metadata(self.mock_progress_report)
self.assertEqual(2, len(docs))
self.assertEqual(mock_downloader_download.call_count, 1)
@mock.patch('pulp_puppet.plugins.importers.downloaders.web.HTTPMetadataDownloadEventListener')
@mock.patch('nectar.downloaders.threaded.HTTPThreadedDownloader.download')
def test_retrieve_metadata_with_error(self, mock_downloader_download, mock_listener_constructor):
# Setup
mock_listener = mock.MagicMock()
report = DownloadReport(None, None)
report.error_msg = 'oops'
mock_listener.failed_reports = [report]
mock_listener_constructor.return_value = mock_listener
# Test
try:
self.downloader.retrieve_metadata(self.mock_progress_report)
self.fail()
except exceptions.FileRetrievalException:
pass
@mock.patch.object(HttpDownloader, 'retrieve_modules')
def test_retrieve_module(self, mock_retrieve_modules):
mock_retrieve_modules.return_value = ['foo', 'bar']
try:
stored_filename = self.downloader.retrieve_module(self.mock_progress_report, self.module)
except:
self.fail()
mock_retrieve_modules.assert_called_once_with(self.mock_progress_report, [self.module])
self.assertEqual(stored_filename, 'foo')
@mock.patch('pulp_puppet.plugins.importers.downloaders.web.HTTPModuleDownloadEventListener')
@mock.patch('nectar.downloaders.threaded.HTTPThreadedDownloader.download')
def test_retrieve_module_missing_module(self, mock_downloader_download, mock_listener_constructor):
# Setup
self.module.author = 'asdf'
self.module.puppet_standard_filename.return_value = 'puppet-filename.tar.gz'
mock_listener = mock.MagicMock()
report = DownloadReport(None, None)
report.error_msg = 'oops'
mock_listener.failed_reports = [report]
mock_listener_constructor.return_value = mock_listener
# Test
try:
self.downloader.retrieve_module(self.mock_progress_report, self.module)
self.fail()
except exceptions.FileRetrievalException:
expected_filename = web._create_download_tmp_dir(self.working_dir)
expected_filename = os.path.join(expected_filename, self.module.filename())
@mock.patch('nectar.downloaders.threaded.HTTPThreadedDownloader.download')
def test_cleanup_module(self, mock_downloader_download):
self.module.author = 'asdf'
self.module.puppet_standard_filename.return_value = 'puppet-filename.tar.gz'
stored_filename = self.downloader.retrieve_module(self.mock_progress_report, self.module)
self.downloader.cleanup_module(self.module)
self.assertTrue(not os.path.exists(stored_filename))
def test_create_metadata_download_urls(self):
# Setup
self.config.repo_plugin_config[constants.CONFIG_QUERIES] = ['a', ['b', 'c']]
# Test
urls = self.downloader._create_metadata_download_urls()
# Verify
self.assertEqual(2, len(urls))
self.assertEqual(urls[0], TEST_SOURCE + 'modules.json?q=a')
self.assertEqual(urls[1], TEST_SOURCE + 'modules.json?q=b&q=c')
def test_create_metadata_download_urls_no_queries(self):
# Test
urls = self.downloader._create_metadata_download_urls()
# Verify
self.assertEqual(1, len(urls))
self.assertEqual(urls[0], TEST_SOURCE + 'modules.json')
def test_create_module_url(self):
# Setup
self.module.author = 'asdf'
self.module.puppet_standard_filename.return_value = 'puppet-filename.tar.gz'
self.module.filename.return_value = 'puppet-filename.tar.gz'
# Test
# Strip the trailing / off to make sure that branch is followed
self.config.repo_plugin_config[constants.CONFIG_FEED] = TEST_SOURCE[:-1]
url = self.downloader._create_module_url(self.module)
# Verify
expected = TEST_SOURCE + \
constants.HOSTED_MODULE_FILE_RELATIVE_PATH % (self.module.author[0], self.module.author) + \
self.module.filename()
self.assertEqual(url, expected)
def test_create_download_tmp_dir(self):
# Test
created = web._create_download_tmp_dir(self.working_dir)
# Verify
self.assertTrue(os.path.exists(created))
self.assertEqual(created, os.path.join(self.working_dir, web.DOWNLOAD_TMP_DIR))
| gpl-2.0 |
andrewtron3000/hacdc-ros-pkg | irobot_sensor_simulator/src/sensor_simulator.py | 1 | 4289 | #! /usr/bin/env python
#*********************************************************************
# Software License Agreement (BSD License)
#
# Copyright (c) 2011 andrewtron3000
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the Willow Garage nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#********************************************************************/
#!/usr/bin/env python
import roslib; roslib.load_manifest('irobot_sensor_simulator')
import rospy
import dynamic_reconfigure.server
from irobot_sensor_simulator.cfg import SensorConfig
from irobot_create_2_1.msg import SensorPacket
import threading
data_lock = threading.Lock()
sensor_data = None
def getSimulatedData():
global data_lock
data_lock.acquire()
data = sensor_data
data_lock.release()
return data
def setSimulatedData(data):
global data_lock, sensor_data
data_lock.acquire()
sensor_data = data
data_lock.release()
def generateSensorMessage(simulated_data):
msg = SensorPacket(wheeldropCaster=simulated_data['wheeldropCaster'],wheeldropLeft=simulated_data['wheeldropLeft'],wheeldropRight=simulated_data['wheeldropRight'],bumpLeft=simulated_data['bumpLeft'],bumpRight=simulated_data['bumpRight'],wall=simulated_data['wall'],cliffLeft=simulated_data['cliffLeft'],cliffFronLeft=simulated_data['cliffFronLeft'],cliffFrontRight=simulated_data['cliffFrontRight'],cliffRight=simulated_data['cliffRight'],virtualWall=simulated_data['virtualWall'],infraredByte=simulated_data['infraredByte'],advance=simulated_data['advance'],play=simulated_data['play'],distance=simulated_data['distance'],angle=simulated_data['angle'],chargingState=simulated_data['chargingState'],voltage=simulated_data['voltage'],current=simulated_data['current'],batteryTemperature=simulated_data['batteryTemperature'],batteryCharge=simulated_data['batteryCharge'],batteryCapacity=simulated_data['batteryCapacity'],wallSignal=simulated_data['wallSignal'],cliffLeftSignal=simulated_data['cliffLeftSignal'],cliffFrontLeftSignal=simulated_data['cliffFrontLeftSignal'],cliffFrontRightSignal=simulated_data['cliffFrontRightSignal'],cliffRightSignal=simulated_data['cliffRightSignal'],homeBase=simulated_data['homeBase'],internalCharger=simulated_data['internalCharger'],songNumber=simulated_data['songNumber'],songPlaying=simulated_data['songPlaying'])
return msg
def main():
rospy.init_node("irobot_sensor_simulator")
publisher = rospy.Publisher('sensorPacket', SensorPacket)
dynamic_reconfigure.server.Server(SensorConfig, reconfigure)
while not rospy.is_shutdown():
data = getSimulatedData()
if data != None:
publisher.publish(generateSensorMessage(data))
rospy.sleep(0.1)
def reconfigure(config, level):
setSimulatedData(config)
return config # Returns the updated configuration.
if __name__ == '__main__':
main()
| bsd-2-clause |
lociii/googleads-python-lib | examples/adspygoogle/dfp/v201208/create_mobile_line_item.py | 4 | 3764 | #!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates a new mobile line item. Mobile features needs to be
enabled in your account to use mobile targeting. To determine which line
items exist, run get_all_line_items.py. To determine which orders exist, run
get_all_orders.py. To determine which placements exist, run
get_all_placements.py."""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
from datetime import date
import os
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.common import Utils
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
line_item_service = client.GetService('LineItemService', version='v201208')
# Set order that the created line item will belong to and add the id of a
# placement containing ad units with a mobile target platform.
order_id = 'INSERT_ORDER_ID_HERE'
targeted_placement_ids = ['INSERT_MOBILE_PLACEMENT_ID_HERE']
# Create the line item.
# Target the line items in the following manner:
# Target the Google device manufacturer (40100) but exclude the Nexus One
# device (604046).
# Target the iPhone 4 device submodel (640003).
line_item = {
'name': 'Mobile line item #%s' % Utils.GetUniqueName(),
'orderId': order_id,
'targetPlatform': 'MOBILE',
'targeting': {
'inventoryTargeting': {
'targetedPlacementIds': targeted_placement_ids
},
'technologyTargeting': {
'deviceManufacturerTargeting': {
'deviceManufacturers': [{'id': '40100'}],
'isTargeted': 'true'
},
'mobileDeviceTargeting': {
'targetedMobileDevices': [],
'excludedMobileDevices': [{'id': '604046'}]
},
'mobileDeviceSubmodelTargeting': {
'targetedMobileDeviceSubmodels': [{'id': '640003'}],
'excludedMobileDeviceSubmodels': []
}
}
},
'creativePlaceholders': [
{
'size': {
'width': '300',
'height': '250'
}
}
],
'startDateTimeType': 'IMMEDIATELY',
'lineItemType': 'STANDARD',
'endDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '30'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'costType': 'CPM',
'costPerUnit': {
'currencyCode': 'USD',
'microAmount': '2000000'
},
'creativeRotationType': 'EVEN',
'discountType': 'PERCENTAGE',
'unitsBought': '500000',
'unitType': 'IMPRESSIONS'
}
line_item = line_item_service.CreateLineItem(line_item)[0]
# Display results.
print ('Line item with id \'%s\', belonging to order id \'%s\', and named '
'\'%s\' was created.' % (line_item['id'], line_item['orderId'],
line_item['name']))
| apache-2.0 |
hermes-jr/npui | netprofile_entities/netprofile_entities/__init__.py | 2 | 4304 | #!/usr/bin/env python
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: t -*-
#
# NetProfile: Entities module
# © Copyright 2013-2014 Alex 'Unik' Unigovsky
#
# This file is part of NetProfile.
# NetProfile is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# NetProfile is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General
# Public License along with NetProfile. If not, see
# <http://www.gnu.org/licenses/>.
from __future__ import (
unicode_literals,
print_function,
absolute_import,
division
)
from netprofile.common.modules import ModuleBase
from sqlalchemy.orm.exc import NoResultFound
from pyramid.i18n import TranslationStringFactory
_ = TranslationStringFactory('netprofile_entities')
class Module(ModuleBase):
def __init__(self, mmgr):
self.mmgr = mmgr
mmgr.cfg.add_translation_dirs('netprofile_entities:locale/')
mmgr.cfg.scan()
@classmethod
def get_deps(cls):
return ('geo',)
@classmethod
def get_models(cls):
from netprofile_entities import models
return (
models.Address,
models.Phone,
models.Entity,
models.EntityComment,
models.EntityFile,
models.EntityFlag,
models.EntityFlagType,
models.EntityState,
models.PhysicalEntity,
models.LegalEntity,
models.StructuralEntity,
models.ExternalEntity
)
@classmethod
def get_sql_views(cls):
from netprofile_entities import models
return (
models.EntitiesBaseView,
)
@classmethod
def get_sql_data(cls, modobj, sess):
from netprofile_entities import models
from netprofile_core.models import (
Group,
GroupCapability,
LogType,
Privilege
)
sess.add(LogType(
id=2,
name='Entities'
))
sess.flush()
privs = (
Privilege(
code='BASE_ENTITIES',
name='Access: Entities'
),
Privilege(
code='ENTITIES_LIST',
name='Entities: List'
),
Privilege(
code='ENTITIES_CREATE',
name='Entities: Create'
),
Privilege(
code='ENTITIES_EDIT',
name='Entities: Edit'
),
Privilege(
code='ENTITIES_DELETE',
name='Entities: Delete'
),
Privilege(
code='FILES_ATTACH_2ENTITIES',
name='Files: Attach to entities'
),
Privilege(
code='ENTITIES_STATES_CREATE',
name='Entities: Create states'
),
Privilege(
code='ENTITIES_STATES_EDIT',
name='Entities: Edit states'
),
Privilege(
code='ENTITIES_STATES_DELETE',
name='Entities: Delete states'
),
Privilege(
code='ENTITIES_FLAGTYPES_CREATE',
name='Entities: Create flag types'
),
Privilege(
code='ENTITIES_FLAGTYPES_EDIT',
name='Entities: Edit flag types'
),
Privilege(
code='ENTITIES_FLAGTYPES_DELETE',
name='Entities: Delete flag types'
),
Privilege(
code='ENTITIES_COMMENT',
name='Entities: Add comments'
),
Privilege(
code='ENTITIES_COMMENTS_EDIT',
name='Entities: Edit comments'
),
Privilege(
code='ENTITIES_COMMENTS_DELETE',
name='Entities: Delete comments'
),
Privilege(
code='ENTITIES_COMMENTS_MARK',
name='Entities: Mark comments as obsolete'
),
)
for priv in privs:
priv.module = modobj
sess.add(priv)
try:
grp_admins = sess.query(Group).filter(Group.name == 'Administrators').one()
for priv in privs:
cap = GroupCapability()
cap.group = grp_admins
cap.privilege = priv
except NoResultFound:
pass
sess.add(models.EntityState(
name='Default',
description='Default entity state. You can safely rename and/or delete it if you wish.'
))
def get_local_js(self, request, lang):
return (
'netprofile_entities:static/webshell/locale/webshell-lang-' + lang + '.js',
)
def get_autoload_js(self, request):
return (
'NetProfile.entities.grid.HistoryGrid',
)
def get_css(self, request):
return (
'netprofile_entities:static/css/main.css',
)
@property
def name(self):
return _('Entities')
| agpl-3.0 |
vkomenda/linux-sunxi | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
marissazhou/django | django/contrib/auth/__init__.py | 69 | 7305 | import inspect
import re
from django.apps import apps as django_apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.middleware.csrf import rotate_token
from django.utils.crypto import constant_time_compare
from django.utils.module_loading import import_string
from django.utils.translation import LANGUAGE_SESSION_KEY
from .signals import user_logged_in, user_logged_out, user_login_failed
SESSION_KEY = '_auth_user_id'
BACKEND_SESSION_KEY = '_auth_user_backend'
HASH_SESSION_KEY = '_auth_user_hash'
REDIRECT_FIELD_NAME = 'next'
def load_backend(path):
return import_string(path)()
def _get_backends(return_tuples=False):
backends = []
for backend_path in settings.AUTHENTICATION_BACKENDS:
backend = load_backend(backend_path)
backends.append((backend, backend_path) if return_tuples else backend)
if not backends:
raise ImproperlyConfigured(
'No authentication backends have been defined. Does '
'AUTHENTICATION_BACKENDS contain anything?'
)
return backends
def get_backends():
return _get_backends(return_tuples=False)
def _clean_credentials(credentials):
"""
Cleans a dictionary of credentials of potentially sensitive info before
sending to less secure functions.
Not comprehensive - intended for user_login_failed signal
"""
SENSITIVE_CREDENTIALS = re.compile('api|token|key|secret|password|signature', re.I)
CLEANSED_SUBSTITUTE = '********************'
for key in credentials:
if SENSITIVE_CREDENTIALS.search(key):
credentials[key] = CLEANSED_SUBSTITUTE
return credentials
def _get_user_session_key(request):
# This value in the session is always serialized to a string, so we need
# to convert it back to Python whenever we access it.
return get_user_model()._meta.pk.to_python(request.session[SESSION_KEY])
def authenticate(**credentials):
"""
If the given credentials are valid, return a User object.
"""
for backend, backend_path in _get_backends(return_tuples=True):
try:
inspect.getcallargs(backend.authenticate, **credentials)
except TypeError:
# This backend doesn't accept these credentials as arguments. Try the next one.
continue
try:
user = backend.authenticate(**credentials)
except PermissionDenied:
# This backend says to stop in our tracks - this user should not be allowed in at all.
return None
if user is None:
continue
# Annotate the user object with the path of the backend.
user.backend = backend_path
return user
# The credentials supplied are invalid to all backends, fire signal
user_login_failed.send(sender=__name__,
credentials=_clean_credentials(credentials))
def login(request, user):
"""
Persist a user id and a backend in the request. This way a user doesn't
have to reauthenticate on every request. Note that data set during
the anonymous session is retained when the user logs in.
"""
session_auth_hash = ''
if user is None:
user = request.user
if hasattr(user, 'get_session_auth_hash'):
session_auth_hash = user.get_session_auth_hash()
if SESSION_KEY in request.session:
if _get_user_session_key(request) != user.pk or (
session_auth_hash and
request.session.get(HASH_SESSION_KEY) != session_auth_hash):
# To avoid reusing another user's session, create a new, empty
# session if the existing session corresponds to a different
# authenticated user.
request.session.flush()
else:
request.session.cycle_key()
request.session[SESSION_KEY] = user._meta.pk.value_to_string(user)
request.session[BACKEND_SESSION_KEY] = user.backend
request.session[HASH_SESSION_KEY] = session_auth_hash
if hasattr(request, 'user'):
request.user = user
rotate_token(request)
user_logged_in.send(sender=user.__class__, request=request, user=user)
def logout(request):
"""
Removes the authenticated user's ID from the request and flushes their
session data.
"""
# Dispatch the signal before the user is logged out so the receivers have a
# chance to find out *who* logged out.
user = getattr(request, 'user', None)
if hasattr(user, 'is_authenticated') and not user.is_authenticated():
user = None
user_logged_out.send(sender=user.__class__, request=request, user=user)
# remember language choice saved to session
language = request.session.get(LANGUAGE_SESSION_KEY)
request.session.flush()
if language is not None:
request.session[LANGUAGE_SESSION_KEY] = language
if hasattr(request, 'user'):
from django.contrib.auth.models import AnonymousUser
request.user = AnonymousUser()
def get_user_model():
"""
Returns the User model that is active in this project.
"""
try:
return django_apps.get_model(settings.AUTH_USER_MODEL)
except ValueError:
raise ImproperlyConfigured("AUTH_USER_MODEL must be of the form 'app_label.model_name'")
except LookupError:
raise ImproperlyConfigured(
"AUTH_USER_MODEL refers to model '%s' that has not been installed" % settings.AUTH_USER_MODEL
)
def get_user(request):
"""
Returns the user model instance associated with the given request session.
If no user is retrieved an instance of `AnonymousUser` is returned.
"""
from .models import AnonymousUser
user = None
try:
user_id = _get_user_session_key(request)
backend_path = request.session[BACKEND_SESSION_KEY]
except KeyError:
pass
else:
if backend_path in settings.AUTHENTICATION_BACKENDS:
backend = load_backend(backend_path)
user = backend.get_user(user_id)
# Verify the session
if hasattr(user, 'get_session_auth_hash'):
session_hash = request.session.get(HASH_SESSION_KEY)
session_hash_verified = session_hash and constant_time_compare(
session_hash,
user.get_session_auth_hash()
)
if not session_hash_verified:
request.session.flush()
user = None
return user or AnonymousUser()
def get_permission_codename(action, opts):
"""
Returns the codename of the permission for the specified action.
"""
return '%s_%s' % (action, opts.model_name)
def update_session_auth_hash(request, user):
"""
Updating a user's password logs out all sessions for the user.
This function takes the current request and the updated user object from
which the new session hash will be derived and updates the session hash
appropriately to prevent a password change from logging out the session
from which the password was changed.
"""
if hasattr(user, 'get_session_auth_hash') and request.user == user:
request.session[HASH_SESSION_KEY] = user.get_session_auth_hash()
default_app_config = 'django.contrib.auth.apps.AuthConfig'
| bsd-3-clause |
SuperTux/flexlay | flexlay/objmap_sprite_object.py | 1 | 2634 | # Flexlay - A Generic 2D Game Editor
# Copyright (C) 2014 Ingo Ruhnke <grumbel@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from flexlay import ObjMapObject
from flexlay.math import Point, Origin, Size, Sizef, Rectf
class ObjMapSpriteObject(ObjMapObject):
def __init__(self, sprite, pos, metadata):
super().__init__(pos, metadata)
self.sprite = sprite
self.pos = pos
self.metadata = metadata
def draw(self, gc):
self.sprite.draw(self.pos.x, self.pos.y, gc)
def get_bound_rect(self):
align = Point(0, 0)
origin_e = Origin()
origin_e, align.x, align.y = self.sprite.get_alignment()
origin = Origin.calc_origin(origin_e, Size(self.sprite.width,
self.sprite.height))
align.x = -align.x
# FIXME: This looks a bit hacky
scale_x, scale_y = self.sprite.get_scale()
if scale_x < 0:
align.x += self.sprite.width
if scale_y < 0:
align.y += self.sprite.height
# if (scale_x > 1.0f && scale_y > 1.0f)
# return Rectf(pos - origin - align,
# Sizef(sprite.width * scale_x, sprite.height * scale_y))
# else
return Rectf(self.pos - origin - align,
Sizef(self.sprite.width, self.sprite.height))
def flip_vertical(self):
scale_x, scale_y = self.sprite.get_scale()
self.sprite.set_scale(scale_x, -scale_y)
if scale_y < 0:
self.pos.y -= self.sprite.height
else:
self.pos.y += self.sprite.height
def flip_horizontal(self):
scale_x, scale_y = self.sprite.get_scale()
self.sprite.set_scale(-scale_x, scale_y)
if scale_x < 0:
self.pos.x -= self.sprite.width
else:
self.pos.x += self.sprite.width
def set_sprite(self, sprite):
self.sprite = sprite
def set_rotate(self, angle):
self.sprite.set_angle(angle)
# EOF #
| gpl-3.0 |
jiazichenzhan/Server_Manage_Plugin | ironic-plugin-pike/ironic/tests/unit/common/test_cinder.py | 4 | 36754 | # Copyright 2016 Hewlett Packard Enterprise Development Company LP.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import json
from cinderclient import exceptions as cinder_exceptions
import cinderclient.v3 as cinderclient
import mock
from oslo_utils import uuidutils
from six.moves import http_client
from ironic.common import cinder
from ironic.common import exception
from ironic.common import keystone
from ironic.conductor import task_manager
from ironic.tests import base
from ironic.tests.unit.conductor import mgr_utils
from ironic.tests.unit.db import base as db_base
from ironic.tests.unit.objects import utils as object_utils
@mock.patch.object(keystone, 'get_auth', autospec=True)
@mock.patch.object(keystone, 'get_session', autospec=True)
class TestCinderSession(base.TestCase):
def setUp(self):
super(TestCinderSession, self).setUp()
self.config(timeout=1,
retries=2,
group='cinder')
def test__get_cinder_session(self, mock_keystone_session, mock_auth):
"""Check establishing new session when no session exists."""
mock_keystone_session.return_value = 'session1'
self.assertEqual('session1', cinder._get_cinder_session())
mock_keystone_session.assert_called_once_with(
'cinder', auth=mock_auth.return_value)
mock_auth.assert_called_once_with('cinder')
"""Check if existing session is used."""
mock_keystone_session.reset_mock()
mock_auth.reset_mock()
mock_keystone_session.return_value = 'session2'
self.assertEqual('session1', cinder._get_cinder_session())
self.assertFalse(mock_keystone_session.called)
self.assertFalse(mock_auth.called)
@mock.patch.object(cinder, '_get_cinder_session', autospec=True)
@mock.patch.object(cinderclient.Client, '__init__', autospec=True)
class TestCinderClient(base.TestCase):
def setUp(self):
super(TestCinderClient, self).setUp()
self.config(timeout=1,
retries=2,
group='cinder')
def test_get_client(self, mock_client_init, mock_session):
mock_session_obj = mock.Mock()
expected = {'connect_retries': 2,
'session': mock_session_obj}
mock_session.return_value = mock_session_obj
mock_client_init.return_value = None
cinder.get_client()
mock_session.assert_called_once_with()
mock_client_init.assert_called_once_with(mock.ANY, **expected)
def test_get_client_with_endpoint_override(
self, mock_client_init, mock_session):
self.config(url='http://test-url', group='cinder')
mock_session_obj = mock.Mock()
expected = {'connect_retries': 2,
'endpoint_override': 'http://test-url',
'session': mock_session_obj}
mock_session.return_value = mock_session_obj
mock_client_init.return_value = None
cinder.get_client()
mock_client_init.assert_called_once_with(mock.ANY, **expected)
mock_session.assert_called_once_with()
def test_get_client_with_region(self, mock_client_init, mock_session):
mock_session_obj = mock.Mock()
expected = {'connect_retries': 2,
'region_name': 'test-region',
'session': mock_session_obj}
mock_session.return_value = mock_session_obj
self.config(region_name='test-region',
group='keystone')
mock_client_init.return_value = None
cinder.get_client()
mock_client_init.assert_called_once_with(mock.ANY, **expected)
mock_session.assert_called_once_with()
class TestCinderUtils(db_base.DbTestCase):
def setUp(self):
super(TestCinderUtils, self).setUp()
mgr_utils.mock_the_extension_manager(driver='fake')
self.config(enabled_drivers=['fake'])
self.node = object_utils.create_test_node(
self.context,
instance_uuid=uuidutils.generate_uuid())
def test_is_volume_available(self):
available_volumes = [
mock.Mock(status=cinder.AVAILABLE, multiattach=False),
mock.Mock(status=cinder.IN_USE, multiattach=True)]
unavailable_volumes = [
mock.Mock(status=cinder.IN_USE, multiattach=False),
mock.Mock(status='fake-non-status', multiattach=True)]
for vol in available_volumes:
result = cinder.is_volume_available(vol)
self.assertTrue(result,
msg="Failed for status '%s'." % vol.status)
for vol in unavailable_volumes:
result = cinder.is_volume_available(vol)
self.assertFalse(result,
msg="Failed for status '%s'." % vol.status)
def test_is_volume_attached(self):
attached_vol = mock.Mock(id='foo', attachments=[
{'server_id': self.node.uuid, 'attachment_id': 'meow'}])
attached_vol2 = mock.Mock(id='bar', attachments=[
{'server_id': self.node.instance_uuid, 'attachment_id': 'meow'}],)
unattached = mock.Mock(attachments=[])
self.assertTrue(cinder.is_volume_attached(self.node, attached_vol))
self.assertTrue(cinder.is_volume_attached(self.node, attached_vol2))
self.assertFalse(cinder.is_volume_attached(self.node, unattached))
def test__get_attachment_id(self):
expectation = 'meow'
attached_vol = mock.Mock(attachments=[
{'server_id': self.node.instance_uuid, 'attachment_id': 'meow'}])
attached_vol2 = mock.Mock(attachments=[
{'server_id': self.node.uuid, 'attachment_id': 'meow'}])
unattached = mock.Mock(attachments=[])
no_attachment = mock.Mock(attachments=[
{'server_id': 'cat', 'id': 'cat'}])
self.assertEqual(expectation,
cinder._get_attachment_id(self.node, attached_vol))
self.assertEqual(expectation,
cinder._get_attachment_id(self.node, attached_vol2))
self.assertIsNone(cinder._get_attachment_id(self.node, unattached))
self.assertIsNone(cinder._get_attachment_id(self.node, no_attachment))
@mock.patch.object(datetime, 'datetime')
def test__create_metadata_dictionary(self, mock_datetime):
fake_time = '2017-06-05T00:33:26.574676'
mock_utcnow = mock.Mock()
mock_datetime.utcnow.return_value = mock_utcnow
mock_utcnow.isoformat.return_value = fake_time
expected_key = ("ironic_node_%s" % self.node.uuid)
expected_data = {
'instance_uuid': self.node.instance_uuid,
'last_seen': fake_time,
'last_action': 'meow'
}
result = cinder._create_metadata_dictionary(self.node, 'meow')
data = json.loads(result[expected_key])
self.assertEqual(expected_data, data)
@mock.patch.object(cinder, '_get_cinder_session', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'set_metadata',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'get', autospec=True)
class TestCinderActions(db_base.DbTestCase):
def setUp(self):
super(TestCinderActions, self).setUp()
mgr_utils.mock_the_extension_manager(driver='fake')
self.config(enabled_drivers=['fake'])
self.node = object_utils.create_test_node(
self.context,
instance_uuid=uuidutils.generate_uuid())
@mock.patch.object(cinderclient.volumes.VolumeManager, 'attach',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'initialize_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'reserve',
autospec=True)
@mock.patch.object(cinder, 'is_volume_attached', autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_attach_volumes(self, mock_create_meta, mock_is_attached,
mock_reserve, mock_init, mock_attach, mock_get,
mock_set_meta, mock_session):
"""Iterate once on a single volume with success."""
volume_id = '111111111-0000-0000-0000-000000000003'
expected = [{
'driver_volume_type': 'iscsi',
'data': {
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000002',
'target_portal': '127.0.0.0.1:3260',
'volume_id': volume_id,
'target_lun': 2,
'ironic_volume_uuid': '000-001'}}]
volumes = [volume_id]
connector = {'foo': 'bar'}
mock_create_meta.return_value = {'bar': 'baz'}
mock_is_attached.return_value = False
mock_get.return_value = mock.Mock(attachments=[], id='000-001')
mock_init.return_value = {
'driver_volume_type': 'iscsi',
'data': {
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000002',
'target_portal': '127.0.0.0.1:3260',
'target_lun': 2}}
with task_manager.acquire(self.context, self.node.uuid) as task:
attachments = cinder.attach_volumes(task, volumes, connector)
self.assertEqual(expected, attachments)
mock_reserve.assert_called_once_with(mock.ANY, volume_id)
mock_init.assert_called_once_with(mock.ANY, volume_id, connector)
mock_attach.assert_called_once_with(mock.ANY, volume_id,
self.node.instance_uuid, None)
mock_set_meta.assert_called_once_with(mock.ANY, volume_id,
{'bar': 'baz'})
mock_get.assert_called_once_with(mock.ANY, volume_id)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'attach',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'initialize_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'reserve',
autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_attach_volumes_one_attached(
self, mock_create_meta, mock_reserve, mock_init, mock_attach,
mock_get, mock_set_meta, mock_session):
"""Iterate with two volumes, one already attached."""
volume_id = '111111111-0000-0000-0000-000000000003'
expected = [
{'driver_volume_type': 'iscsi',
'data': {
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000002',
'target_portal': '127.0.0.0.1:3260',
'volume_id': volume_id,
'target_lun': 2,
'ironic_volume_uuid': '000-000'}},
{'already_attached': True,
'data': {
'volume_id': 'already_attached',
'ironic_volume_uuid': '000-001'}}]
volumes = [volume_id, 'already_attached']
connector = {'foo': 'bar'}
mock_create_meta.return_value = {'bar': 'baz'}
mock_get.side_effect = [
mock.Mock(attachments=[], id='000-000'),
mock.Mock(attachments=[{'server_id': self.node.uuid}],
id='000-001')
]
mock_init.return_value = {
'driver_volume_type': 'iscsi',
'data': {
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000002',
'target_portal': '127.0.0.0.1:3260',
'target_lun': 2}}
with task_manager.acquire(self.context, self.node.uuid) as task:
attachments = cinder.attach_volumes(task, volumes, connector)
self.assertEqual(expected, attachments)
mock_reserve.assert_called_once_with(mock.ANY, volume_id)
mock_init.assert_called_once_with(mock.ANY, volume_id, connector)
mock_attach.assert_called_once_with(mock.ANY, volume_id,
self.node.instance_uuid, None)
mock_set_meta.assert_called_once_with(mock.ANY, volume_id,
{'bar': 'baz'})
@mock.patch.object(cinderclient.Client, '__init__', autospec=True)
def test_attach_volumes_client_init_failure(
self, mock_client, mock_get, mock_set_meta, mock_session):
connector = {'foo': 'bar'}
volumes = ['111111111-0000-0000-0000-000000000003']
mock_client.side_effect = cinder_exceptions.BadRequest(
http_client.BAD_REQUEST)
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.StorageError,
cinder.attach_volumes,
task,
volumes,
connector)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'attach',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'initialize_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'reserve',
autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_attach_volumes_vol_not_found(
self, mock_create_meta, mock_reserve, mock_init, mock_attach,
mock_get, mock_set_meta, mock_session):
"""Raise an error if the volume lookup fails"""
def __mock_get_side_effect(client, volume_id):
if volume_id == 'not_found':
raise cinder_exceptions.NotFound(
http_client.NOT_FOUND, message='error')
else:
return mock.Mock(attachments=[], uuid='000-000')
volumes = ['111111111-0000-0000-0000-000000000003',
'not_found',
'not_reached']
connector = {'foo': 'bar'}
mock_get.side_effect = __mock_get_side_effect
mock_create_meta.return_value = {'bar': 'baz'}
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.StorageError,
cinder.attach_volumes,
task,
volumes,
connector)
mock_get.assert_any_call(mock.ANY,
'111111111-0000-0000-0000-000000000003')
mock_get.assert_any_call(mock.ANY, 'not_found')
self.assertEqual(2, mock_get.call_count)
mock_reserve.assert_called_once_with(
mock.ANY, '111111111-0000-0000-0000-000000000003')
mock_init.assert_called_once_with(
mock.ANY, '111111111-0000-0000-0000-000000000003', connector)
mock_attach.assert_called_once_with(
mock.ANY, '111111111-0000-0000-0000-000000000003',
self.node.instance_uuid, None)
mock_set_meta.assert_called_once_with(
mock.ANY, '111111111-0000-0000-0000-000000000003', {'bar': 'baz'})
@mock.patch.object(cinderclient.volumes.VolumeManager, 'reserve',
autospec=True)
@mock.patch.object(cinder, 'is_volume_attached', autospec=True)
def test_attach_volumes_reserve_failure(self, mock_is_attached,
mock_reserve, mock_get,
mock_set_meta, mock_session):
volumes = ['111111111-0000-0000-0000-000000000003']
connector = {'foo': 'bar'}
volume = mock.Mock(attachments=[])
mock_get.return_value = volume
mock_is_attached.return_value = False
mock_reserve.side_effect = cinder_exceptions.NotAcceptable(
http_client.NOT_ACCEPTABLE)
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.StorageError,
cinder.attach_volumes,
task,
volumes,
connector)
mock_is_attached.assert_called_once_with(mock.ANY, volume)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'initialize_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'reserve',
autospec=True)
@mock.patch.object(cinder, 'is_volume_attached', autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_attach_volumes_initialize_connection_failure(
self, mock_create_meta, mock_is_attached, mock_reserve, mock_init,
mock_get, mock_set_meta, mock_session):
"""Fail attachment upon an initialization failure."""
volume_id = '111111111-0000-0000-0000-000000000003'
volumes = [volume_id]
connector = {'foo': 'bar'}
mock_create_meta.return_value = {'bar': 'baz'}
mock_is_attached.return_value = False
mock_get.return_value = mock.Mock(attachments=[])
mock_init.side_effect = cinder_exceptions.NotAcceptable(
http_client.NOT_ACCEPTABLE)
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.StorageError,
cinder.attach_volumes,
task,
volumes,
connector)
mock_get.assert_called_once_with(mock.ANY, volume_id)
mock_reserve.assert_called_once_with(mock.ANY, volume_id)
mock_init.assert_called_once_with(mock.ANY, volume_id, connector)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'attach',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'initialize_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'reserve',
autospec=True)
@mock.patch.object(cinder, 'is_volume_attached', autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_attach_volumes_attach_record_failure(
self, mock_create_meta, mock_is_attached, mock_reserve,
mock_init, mock_attach, mock_get, mock_set_meta, mock_session):
"""Attach a volume and fail if final record failure occurs"""
volume_id = '111111111-0000-0000-0000-000000000003'
volumes = [volume_id]
connector = {'foo': 'bar'}
mock_create_meta.return_value = {'bar': 'baz'}
mock_is_attached.return_value = False
mock_get.return_value = mock.Mock(attachments=[], id='000-003')
mock_init.return_value = {
'driver_volume_type': 'iscsi',
'data': {
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000002',
'target_portal': '127.0.0.0.1:3260',
'target_lun': 2}}
mock_attach.side_effect = cinder_exceptions.ClientException(
http_client.NOT_ACCEPTABLE, 'error')
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.StorageError, cinder.attach_volumes,
task, volumes, connector)
mock_reserve.assert_called_once_with(mock.ANY, volume_id)
mock_init.assert_called_once_with(mock.ANY, volume_id, connector)
mock_attach.assert_called_once_with(mock.ANY, volume_id,
self.node.instance_uuid, None)
mock_get.assert_called_once_with(mock.ANY, volume_id)
mock_is_attached.assert_called_once_with(mock.ANY,
mock_get.return_value)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'attach',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'initialize_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'reserve',
autospec=True)
@mock.patch.object(cinder, 'is_volume_attached', autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
@mock.patch.object(cinder, 'LOG', autospec=True)
def test_attach_volumes_attach_set_meta_failure(
self, mock_log, mock_create_meta, mock_is_attached,
mock_reserve, mock_init, mock_attach, mock_get, mock_set_meta,
mock_session):
"""Attach a volume and tolerate set_metadata failure."""
expected = [{
'driver_volume_type': 'iscsi',
'data': {
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000002',
'target_portal': '127.0.0.0.1:3260',
'volume_id': '111111111-0000-0000-0000-000000000003',
'target_lun': 2,
'ironic_volume_uuid': '000-000'}}]
volume_id = '111111111-0000-0000-0000-000000000003'
volumes = [volume_id]
connector = {'foo': 'bar'}
mock_create_meta.return_value = {'bar': 'baz'}
mock_is_attached.return_value = False
mock_get.return_value = mock.Mock(attachments=[], id='000-000')
mock_init.return_value = {
'driver_volume_type': 'iscsi',
'data': {
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000002',
'target_portal': '127.0.0.0.1:3260',
'target_lun': 2}}
mock_set_meta.side_effect = cinder_exceptions.NotAcceptable(
http_client.NOT_ACCEPTABLE)
with task_manager.acquire(self.context, self.node.uuid) as task:
attachments = cinder.attach_volumes(task, volumes, connector)
self.assertEqual(expected, attachments)
mock_reserve.assert_called_once_with(mock.ANY, volume_id)
mock_init.assert_called_once_with(mock.ANY, volume_id, connector)
mock_attach.assert_called_once_with(mock.ANY, volume_id,
self.node.instance_uuid, None)
mock_set_meta.assert_called_once_with(mock.ANY, volume_id,
{'bar': 'baz'})
mock_get.assert_called_once_with(mock.ANY, volume_id)
mock_is_attached.assert_called_once_with(mock.ANY,
mock_get.return_value)
self.assertTrue(mock_log.warning.called)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'detach',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'terminate_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'begin_detaching',
autospec=True)
@mock.patch.object(cinder, 'is_volume_attached', autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_detach_volumes(
self, mock_create_meta, mock_is_attached, mock_begin, mock_term,
mock_detach, mock_get, mock_set_meta, mock_session):
"""Iterate once and detach a volume without issues."""
volume_id = '111111111-0000-0000-0000-000000000003'
volumes = [volume_id]
connector = {'foo': 'bar'}
mock_create_meta.return_value = {'bar': 'baz'}
mock_is_attached.return_value = True
mock_get.return_value = mock.Mock(attachments=[
{'server_id': self.node.uuid, 'attachment_id': 'qux'}])
with task_manager.acquire(self.context, self.node.uuid) as task:
cinder.detach_volumes(task, volumes, connector, allow_errors=False)
mock_begin.assert_called_once_with(mock.ANY, volume_id)
mock_term.assert_called_once_with(mock.ANY, volume_id, {'foo': 'bar'})
mock_detach.assert_called_once_with(mock.ANY, volume_id, 'qux')
mock_set_meta.assert_called_once_with(mock.ANY, volume_id,
{'bar': 'baz'})
@mock.patch.object(cinderclient.volumes.VolumeManager, 'detach',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'terminate_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'begin_detaching',
autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_detach_volumes_one_detached(
self, mock_create_meta, mock_begin, mock_term, mock_detach,
mock_get, mock_set_meta, mock_session):
"""Iterate with two volumes, one already detached."""
volume_id = '111111111-0000-0000-0000-000000000003'
volumes = [volume_id, 'detached']
connector = {'foo': 'bar'}
mock_create_meta.return_value = {'bar': 'baz'}
mock_get.side_effect = [
mock.Mock(attachments=[
{'server_id': self.node.uuid, 'attachment_id': 'qux'}]),
mock.Mock(attachments=[])
]
with task_manager.acquire(self.context, self.node.uuid) as task:
cinder.detach_volumes(task, volumes, connector, allow_errors=False)
mock_begin.assert_called_once_with(mock.ANY, volume_id)
mock_term.assert_called_once_with(mock.ANY, volume_id, {'foo': 'bar'})
mock_detach.assert_called_once_with(mock.ANY, volume_id, 'qux')
mock_set_meta.assert_called_once_with(mock.ANY, volume_id,
{'bar': 'baz'})
@mock.patch.object(cinderclient.Client, '__init__', autospec=True)
def test_detach_volumes_client_init_failure_bad_request(
self, mock_client, mock_get, mock_set_meta, mock_session):
connector = {'foo': 'bar'}
volumes = ['111111111-0000-0000-0000-000000000003']
with task_manager.acquire(self.context, self.node.uuid) as task:
mock_client.side_effect = cinder_exceptions.BadRequest(
http_client.BAD_REQUEST)
self.assertRaises(exception.StorageError,
cinder.detach_volumes,
task,
volumes,
connector)
@mock.patch.object(cinderclient.Client, '__init__', autospec=True)
def test_detach_volumes_client_init_failure_invalid_parameter_value(
self, mock_client, mock_get, mock_set_meta, mock_session):
connector = {'foo': 'bar'}
volumes = ['111111111-0000-0000-0000-000000000003']
with task_manager.acquire(self.context, self.node.uuid) as task:
# While we would be permitting failures, this is an exception that
# must be raised since the client cannot be initialized.
mock_client.side_effect = exception.InvalidParameterValue('error')
self.assertRaises(exception.StorageError,
cinder.detach_volumes, task, volumes,
connector, allow_errors=True)
def test_detach_volumes_vol_not_found(self, mock_get, mock_set_meta,
mock_session):
"""Raise an error if the volume lookup fails"""
volumes = ['vol1']
connector = {'foo': 'bar'}
mock_get.side_effect = cinder_exceptions.NotFound(
http_client.NOT_FOUND, message='error')
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.StorageError,
cinder.detach_volumes,
task,
volumes,
connector)
self.assertFalse(mock_set_meta.called)
# We should not raise any exception when issuing a command
# with errors being permitted.
cinder.detach_volumes(task, volumes, connector, allow_errors=True)
self.assertFalse(mock_set_meta.called)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'detach',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'terminate_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'begin_detaching',
autospec=True)
@mock.patch.object(cinder, 'is_volume_attached', autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_detach_volumes_begin_detaching_failure(
self, mock_create_meta, mock_is_attached, mock_begin, mock_term,
mock_detach, mock_get, mock_set_meta, mock_session):
volume_id = '111111111-0000-0000-0000-000000000003'
volumes = [volume_id]
connector = {'foo': 'bar'}
volume = mock.Mock(attachments=[])
mock_get.return_value = volume
mock_create_meta.return_value = {'bar': 'baz'}
mock_is_attached.return_value = True
mock_begin.side_effect = cinder_exceptions.NotAcceptable(
http_client.NOT_ACCEPTABLE)
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.StorageError,
cinder.detach_volumes,
task,
volumes,
connector)
mock_is_attached.assert_called_once_with(mock.ANY, volume)
cinder.detach_volumes(task, volumes, connector, allow_errors=True)
mock_term.assert_called_once_with(mock.ANY, volume_id,
{'foo': 'bar'})
mock_detach.assert_called_once_with(mock.ANY, volume_id, None)
mock_set_meta.assert_called_once_with(mock.ANY, volume_id,
{'bar': 'baz'})
@mock.patch.object(cinderclient.volumes.VolumeManager,
'terminate_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'begin_detaching',
autospec=True)
@mock.patch.object(cinder, 'is_volume_attached', autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_detach_volumes_term_failure(
self, mock_create_meta, mock_is_attached, mock_begin, mock_term,
mock_get, mock_set_meta, mock_session):
volume_id = '111111111-0000-0000-0000-000000000003'
volumes = [volume_id]
connector = {'foo': 'bar'}
mock_create_meta.return_value = {'bar': 'baz'}
mock_is_attached.return_value = True
mock_get.return_value = {'id': volume_id, 'attachments': []}
mock_term.side_effect = cinder_exceptions.NotAcceptable(
http_client.NOT_ACCEPTABLE)
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.StorageError,
cinder.detach_volumes,
task,
volumes,
connector)
mock_begin.assert_called_once_with(mock.ANY, volume_id)
mock_term.assert_called_once_with(mock.ANY, volume_id, connector)
cinder.detach_volumes(task, volumes, connector, allow_errors=True)
self.assertFalse(mock_set_meta.called)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'detach',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'terminate_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'begin_detaching',
autospec=True)
@mock.patch.object(cinder, 'is_volume_attached', autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_detach_volumes_detach_failure_errors_not_allowed(
self, mock_create_meta, mock_is_attached, mock_begin, mock_term,
mock_detach, mock_get, mock_set_meta, mock_session):
volume_id = '111111111-0000-0000-0000-000000000003'
volumes = [volume_id]
connector = {'foo': 'bar'}
mock_create_meta.return_value = {'bar': 'baz'}
mock_is_attached.return_value = True
mock_get.return_value = mock.Mock(attachments=[
{'server_id': self.node.uuid, 'attachment_id': 'qux'}])
mock_detach.side_effect = cinder_exceptions.NotAcceptable(
http_client.NOT_ACCEPTABLE)
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.StorageError,
cinder.detach_volumes,
task,
volumes,
connector,
allow_errors=False)
mock_detach.assert_called_once_with(mock.ANY, volume_id, 'qux')
self.assertFalse(mock_set_meta.called)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'detach',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'terminate_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'begin_detaching',
autospec=True)
@mock.patch.object(cinder, 'is_volume_attached', autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_detach_volumes_detach_failure_errors_allowed(
self, mock_create_meta, mock_is_attached, mock_begin, mock_term,
mock_detach, mock_get, mock_set_meta, mock_session):
volume_id = '111111111-0000-0000-0000-000000000003'
volumes = [volume_id]
connector = {'foo': 'bar'}
mock_create_meta.return_value = {'bar': 'baz'}
mock_is_attached.return_value = True
mock_get.return_value = mock.Mock(attachments=[
{'server_id': self.node.uuid, 'attachment_id': 'qux'}])
mock_set_meta.side_effect = cinder_exceptions.NotAcceptable(
http_client.NOT_ACCEPTABLE)
with task_manager.acquire(self.context, self.node.uuid) as task:
cinder.detach_volumes(task, volumes, connector, allow_errors=True)
mock_detach.assert_called_once_with(mock.ANY, volume_id, 'qux')
mock_set_meta.assert_called_once_with(mock.ANY, volume_id,
{'bar': 'baz'})
@mock.patch.object(cinderclient.volumes.VolumeManager, 'detach',
autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager,
'terminate_connection', autospec=True)
@mock.patch.object(cinderclient.volumes.VolumeManager, 'begin_detaching',
autospec=True)
@mock.patch.object(cinder, 'is_volume_attached', autospec=True)
@mock.patch.object(cinder, '_create_metadata_dictionary', autospec=True)
def test_detach_volumes_detach_meta_failure_errors_not_allowed(
self, mock_create_meta, mock_is_attached, mock_begin, mock_term,
mock_detach, mock_get, mock_set_meta, mock_session):
volume_id = '111111111-0000-0000-0000-000000000003'
volumes = [volume_id]
connector = {'foo': 'bar'}
mock_create_meta.return_value = {'bar': 'baz'}
mock_is_attached.return_value = True
mock_get.return_value = mock.Mock(attachments=[
{'server_id': self.node.uuid, 'attachment_id': 'qux'}])
mock_set_meta.side_effect = cinder_exceptions.NotAcceptable(
http_client.NOT_ACCEPTABLE)
with task_manager.acquire(self.context, self.node.uuid) as task:
cinder.detach_volumes(task, volumes, connector, allow_errors=False)
mock_detach.assert_called_once_with(mock.ANY, volume_id, 'qux')
mock_set_meta.assert_called_once_with(mock.ANY, volume_id,
{'bar': 'baz'})
| apache-2.0 |
toshywoshy/ansible | lib/ansible/modules/network/f5/bigip_device_dns.py | 23 | 16644 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_device_dns
short_description: Manage BIG-IP device DNS settings
description:
- Manage BIG-IP device DNS settings.
version_added: 2.2
options:
cache:
description:
- Specifies whether the system caches DNS lookups or performs the
operation each time a lookup is needed. Please note that this applies
only to Access Policy Manager features, such as ACLs, web application
rewrites, and authentication.
type: str
choices:
- enabled
- disabled
- enable
- disable
name_servers:
description:
- A list of name servers that the system uses to validate DNS lookups
type: list
search:
description:
- A list of domains that the system searches for local domain lookups,
to resolve local host names.
type: list
ip_version:
description:
- Specifies whether the DNS specifies IP addresses using IPv4 or IPv6.
type: int
choices:
- 4
- 6
state:
description:
- The state of the variable on the system. When C(present), guarantees
that an existing variable is set to C(value).
type: str
choices:
- absent
- present
default: present
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Set the DNS settings on the BIG-IP
bigip_device_dns:
name_servers:
- 208.67.222.222
- 208.67.220.220
search:
- localdomain
- lab.local
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
cache:
description: The new value of the DNS caching
returned: changed
type: str
sample: enabled
name_servers:
description: List of name servers that were set
returned: changed
type: list
sample: ['192.0.2.10', '172.17.12.10']
search:
description: List of search domains that were set
returned: changed
type: list
sample: ['192.0.2.10', '172.17.12.10']
ip_version:
description: IP version that was set that DNS will specify IP addresses in
returned: changed
type: int
sample: 4
warnings:
description: The list of warnings (if any) generated by module based on arguments
returned: always
type: list
sample: ['...', '...']
'''
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import is_empty_list
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import is_empty_list
class Parameters(AnsibleF5Parameters):
api_map = {
'dns.cache': 'cache',
'nameServers': 'name_servers',
'include': 'ip_version',
}
api_attributes = [
'nameServers', 'search', 'include',
]
updatables = [
'cache', 'name_servers', 'search', 'ip_version',
]
returnables = [
'cache', 'name_servers', 'search', 'ip_version',
]
absentables = [
'name_servers', 'search',
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def search(self):
search = self._values['search']
if search is None:
return None
if isinstance(search, str) and search != "":
result = list()
result.append(str(search))
return result
if is_empty_list(search):
return []
return search
@property
def name_servers(self):
name_servers = self._values['name_servers']
if name_servers is None:
return None
if isinstance(name_servers, str) and name_servers != "":
result = list()
result.append(str(name_servers))
return result
if is_empty_list(name_servers):
return []
return name_servers
@property
def cache(self):
if self._values['cache'] is None:
return None
if str(self._values['cache']) in ['enabled', 'enable']:
return 'enable'
else:
return 'disable'
@property
def ip_version(self):
if self._values['ip_version'] == 6:
return "options inet6"
elif self._values['ip_version'] == 4:
return ""
else:
return None
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
change = getattr(self, returnable)
if isinstance(change, dict):
result.update(change)
else:
result[returnable] = change
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def ip_version(self):
if self._values['ip_version'] == 'options inet6':
return 6
elif self._values['ip_version'] == "":
return 4
else:
return None
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def ip_version(self):
if self.want.ip_version is None:
return None
if self.want.ip_version == "" and self.have.ip_version is None:
return None
if self.want.ip_version == self.have.ip_version:
return None
if self.want.ip_version != self.have.ip_version:
return self.want.ip_version
@property
def name_servers(self):
state = self.want.state
if self.want.name_servers is None:
return None
if state == 'absent':
if self.have.name_servers is None and self.want.name_servers:
return None
if set(self.want.name_servers) == set(self.have.name_servers):
return []
if set(self.want.name_servers) != set(self.have.name_servers):
return list(set(self.want.name_servers).difference(self.have.name_servers))
if not self.want.name_servers:
if self.have.name_servers is None:
return None
if self.have.name_servers is not None:
return self.want.name_servers
if self.have.name_servers is None:
return self.want.name_servers
if set(self.want.name_servers) != set(self.have.name_servers):
return self.want.name_servers
@property
def search(self):
state = self.want.state
if self.want.search is None:
return None
if not self.want.search:
if self.have.search is None:
return None
if self.have.search is not None:
return self.want.search
if state == 'absent':
if self.have.search is None and self.want.search:
return None
if set(self.want.search) == set(self.have.search):
return []
if set(self.want.search) != set(self.have.search):
return list(set(self.want.search).difference(self.have.search))
if self.have.search is None:
return self.want.search
if set(self.want.search) != set(self.have.search):
return self.want.search
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.pop('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def _absent_changed_options(self):
diff = Difference(self.want, self.have)
absentables = Parameters.absentables
changed = dict()
for k in absentables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.update()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def should_absent(self):
result = self._absent_changed_options()
if result:
return True
return False
def absent(self):
self.have = self.read_current_from_device()
if not self.should_absent():
return False
if self.module.check_mode:
return True
self.absent_on_device()
return True
def read_dns_cache_setting(self):
uri = "https://{0}:{1}/mgmt/tm/sys/db/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
'dns.cache'
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response
def read_current_from_device(self):
cache = self.read_dns_cache_setting()
uri = "https://{0}:{1}/mgmt/tm/sys/dns/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if cache:
response['cache'] = cache['value']
return ApiParameters(params=response)
def update_on_device(self):
params = self.changes.api_params()
if params:
uri = "https://{0}:{1}/mgmt/tm/sys/dns/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if self.want.cache:
uri = "https://{0}:{1}/mgmt/tm/sys/db/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
'dns.cache'
)
payload = {"value": self.want.cache}
resp = self.client.api.patch(uri, json=payload)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def absent_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/dns/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
cache=dict(
choices=['disabled', 'enabled', 'disable', 'enable']
),
name_servers=dict(
type='list'
),
search=dict(
type='list'
),
ip_version=dict(
choices=[4, 6],
type='int'
),
state=dict(
default='present',
choices=['absent', 'present']
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_one_of = [
['name_servers', 'search', 'ip_version', 'cache']
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
required_one_of=spec.required_one_of
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
stuartbfox/Diamond | src/collectors/sockstat/test/testsockstat.py | 31 | 2201 | #!/usr/bin/python
# coding=utf-8
##########################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from mock import call
from collections import Iterator
from diamond.collector import Collector
from sockstat import SockstatCollector
##########################################################################
class TestSockstatCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('SockstatCollector', {
'interval': 10
})
self.collector = SockstatCollector(config, None)
def test_import(self):
self.assertTrue(SockstatCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_net_sockstat(self, publish_mock, open_mock):
class Klass(Iterator):
def close(self):
pass
def next(self):
raise StopIteration
open_mock.return_value = Klass()
self.collector.collect()
calls = [call('/proc/net/sockstat'), call('/proc/net/sockstat6')]
open_mock.assert_has_calls(calls)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
SockstatCollector.PROCS = [self.getFixturePath('proc_net_sockstat'),
self.getFixturePath('proc_net_sockstat6')]
self.collector.collect()
metrics = {
'used': 118,
'tcp_inuse': 61,
'tcp_orphan': 0,
'tcp_tw': 1,
'tcp_alloc': 13,
'tcp_mem': 1,
'udp_inuse': 6,
'udp_mem': 0
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
| mit |
Vogtinator/micropython | tests/pyb/modtime.py | 9 | 1487 | import time
DAYS_PER_MONTH = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
def is_leap(year):
return (year % 4) == 0
def test():
seconds = 0
wday = 5 # Jan 1, 2000 was a Saturday
for year in range(2000, 2034):
print("Testing %d" % year)
yday = 1
for month in range(1, 13):
if month == 2 and is_leap(year):
DAYS_PER_MONTH[2] = 29
else:
DAYS_PER_MONTH[2] = 28
for day in range(1, DAYS_PER_MONTH[month] + 1):
secs = time.mktime((year, month, day, 0, 0, 0, 0, 0))
if secs != seconds:
print("mktime failed for %d-%02d-%02d got %d expected %d" % (year, month, day, secs, seconds))
tuple = time.localtime(seconds)
secs = time.mktime(tuple)
if secs != seconds:
print("localtime failed for %d-%02d-%02d got %d expected %d" % (year, month, day, secs, seconds))
return
seconds += 86400
if yday != tuple[7]:
print("locatime for %d-%02d-%02d got yday %d, expecting %d" % (year, month, day, tuple[7], yday))
return
if wday != tuple[6]:
print("locatime for %d-%02d-%02d got wday %d, expecting %d" % (year, month, day, tuple[6], wday))
return
yday += 1
wday = (wday + 1) % 7
test()
| mit |
barseghyanartur/magic-wormhole | src/wormhole/blocking/transit.py | 4 | 15256 | from __future__ import print_function
import re, time, threading, socket, SocketServer
from binascii import hexlify, unhexlify
from nacl.secret import SecretBox
from ..util import ipaddrs
from ..util.hkdf import HKDF
class TransitError(Exception):
pass
# The beginning of each TCP connection consists of the following handshake
# messages. The sender transmits the same text regardless of whether it is on
# the initiating/connecting end of the TCP connection, or on the
# listening/accepting side. Same for the receiver.
#
# sender -> receiver: transit sender TXID_HEX ready\n\n
# receiver -> sender: transit receiver RXID_HEX ready\n\n
#
# Any deviations from this result in the socket being closed. The handshake
# messages are designed to provoke an invalid response from other sorts of
# servers (HTTP, SMTP, echo).
#
# If the sender is satisfied with the handshake, and this is the first socket
# to complete negotiation, the sender does:
#
# sender -> receiver: go\n
#
# and the next byte on the wire will be from the application.
#
# If this is not the first socket, the sender does:
#
# sender -> receiver: nevermind\n
#
# and closes the socket.
# So the receiver looks for "transit sender TXID_HEX ready\n\ngo\n" and hangs
# up upon the first wrong byte. The sender lookgs for "transit receiver
# RXID_HEX ready\n\n" and then makes a first/not-first decision about sending
# "go\n" or "nevermind\n"+close().
def build_receiver_handshake(key):
hexid = HKDF(key, 32, CTXinfo=b"transit_receiver")
return "transit receiver %s ready\n\n" % hexlify(hexid)
def build_sender_handshake(key):
hexid = HKDF(key, 32, CTXinfo=b"transit_sender")
return "transit sender %s ready\n\n" % hexlify(hexid)
def build_relay_handshake(key):
token = HKDF(key, 32, CTXinfo=b"transit_relay_token")
return "please relay %s\n" % hexlify(token)
TIMEOUT=15
# 1: sender only transmits, receiver only accepts, both wait forever
# 2: sender also accepts, receiver also transmits
# 3: timeouts / stop when no more progress can be made
# 4: add relay
# 5: accelerate shutdown of losing sockets
class BadHandshake(Exception):
pass
def force_ascii(s):
if isinstance(s, type(u"")):
return s.encode("ascii")
return s
def send_to(skt, data):
sent = 0
while sent < len(data):
sent += skt.send(data[sent:])
def wait_for(skt, expected, description):
got = b""
while len(got) < len(expected):
got += skt.recv(1)
if expected[:len(got)] != got:
raise BadHandshake("got '%r' want '%r' on %s" %
(got, expected, description))
# The hint format is: TYPE,VALUE= /^([a-zA-Z0-9]+):(.*)$/ . VALUE depends
# upon TYPE, and it can have more colons in it. For TYPE=tcp (the only one
# currently defined), ADDR,PORT = /^(.*):(\d+)$/ , so ADDR can have colons.
# ADDR can be a hostname, ipv4 dotted-quad, or ipv6 colon-hex. If the hint
# publisher wants anonymity, their only hint's ADDR will end in .onion .
def parse_hint_tcp(hint):
# return tuple or None for an unparseable hint
mo = re.search(r'^([a-zA-Z0-9]+):(.*)$', hint)
if not mo:
print("unparseable hint '%s'" % (hint,))
return None
hint_type = mo.group(1)
if hint_type != "tcp":
print("unknown hint type '%s' in '%s'" % (hint_type, hint))
return None
hint_value = mo.group(2)
mo = re.search(r'^(.*):(\d+)$', hint_value)
if not mo:
print("unparseable TCP hint '%s'" % (hint,))
return None
hint_host = mo.group(1)
try:
hint_port = int(mo.group(2))
except ValueError:
print("non-numeric port in TCP hint '%s'" % (hint,))
return None
return hint_host, hint_port
def debug(msg):
if False:
print(msg)
def since(start):
return time.time() - start
def connector(owner, hint, description,
send_handshake, expected_handshake, relay_handshake=None):
start = time.time()
parsed_hint = parse_hint_tcp(hint)
if not parsed_hint:
return # unparseable
addr,port = parsed_hint
skt = None
debug("+ connector(%s)" % hint)
try:
skt = socket.create_connection((addr,port),
TIMEOUT) # timeout or ECONNREFUSED
skt.settimeout(TIMEOUT)
debug(" - socket(%s) connected CT+%.1f" % (description, since(start)))
if relay_handshake:
debug(" - sending relay_handshake")
send_to(skt, relay_handshake)
wait_for(skt, "ok\n", description)
debug(" - relay ready CT+%.1f" % (since(start),))
send_to(skt, send_handshake)
wait_for(skt, expected_handshake, description)
debug(" + connector(%s) ready CT+%.1f" % (hint, since(start)))
except Exception as e:
debug(" - timeout(%s) CT+%.1f" % (hint, since(start)))
try:
if skt:
skt.shutdown(socket.SHUT_WR)
except socket.error:
pass
if skt:
skt.close()
# ignore socket errors, warn about coding errors
if not isinstance(e, (socket.error, socket.timeout, BadHandshake)):
raise
debug(" - notifying owner._connector_failed(%s) CT+%.1f" % (hint, since(start)))
owner._connector_failed(hint)
return
# owner is now responsible for the socket
owner._negotiation_finished(skt, description) # note thread
def handle(skt, client_address, owner, description,
send_handshake, expected_handshake):
try:
debug("handle %r" % (skt,))
skt.settimeout(TIMEOUT)
send_to(skt, send_handshake)
got = b""
# for the receiver, this includes the "go\n"
while len(got) < len(expected_handshake):
more = skt.recv(1)
if not more:
raise BadHandshake("disconnect after merely '%r'" % got)
got += more
if expected_handshake[:len(got)] != got:
raise BadHandshake("got '%r' want '%r'" %
(got, expected_handshake))
debug("handler negotiation finished %r" % (client_address,))
except Exception as e:
debug("handler failed %r" % (client_address,))
try:
# this raises socket.err(EBADF) if the socket was already closed
skt.shutdown(socket.SHUT_WR)
except socket.error:
pass
skt.close() # this appears to be idempotent
# ignore socket errors, warn about coding errors
if not isinstance(e, (socket.error, socket.timeout, BadHandshake)):
raise
return
# owner is now responsible for the socket
owner._negotiation_finished(skt, description) # note thread
class MyTCPServer(SocketServer.TCPServer):
allow_reuse_address = True
def process_request(self, request, client_address):
description = "<-tcp:%s:%d" % (client_address[0], client_address[1])
kc = self.owner._have_transit_key
kc.acquire()
while not self.owner._transit_key:
kc.wait()
# owner._transit_key is either None or set to a value. We don't
# modify it from here, so we can release the condition lock before
# grabbing the key.
kc.release()
# Once it is set, we can get handler_(send|receive)_handshake, which
# is what we actually care about.
t = threading.Thread(target=handle,
args=(request, client_address,
self.owner, description,
self.owner.handler_send_handshake,
self.owner.handler_expected_handshake))
t.daemon = True
t.start()
class TransitClosed(TransitError):
pass
class BadNonce(TransitError):
pass
class ReceiveBuffer:
def __init__(self, skt):
self.skt = skt
self.buf = b""
def read(self, count):
while len(self.buf) < count:
more = self.skt.recv(4096)
if not more:
raise TransitClosed
self.buf += more
rc = self.buf[:count]
self.buf = self.buf[count:]
return rc
class RecordPipe:
def __init__(self, skt, send_key, receive_key):
self.skt = skt
self.send_box = SecretBox(send_key)
self.send_nonce = 0
self.receive_buf = ReceiveBuffer(self.skt)
self.receive_box = SecretBox(receive_key)
self.next_receive_nonce = 0
def send_record(self, record):
assert SecretBox.NONCE_SIZE == 24
assert self.send_nonce < 2**(8*24)
assert len(record) < 2**(8*4)
nonce = unhexlify("%048x" % self.send_nonce) # big-endian
self.send_nonce += 1
encrypted = self.send_box.encrypt(record, nonce)
length = unhexlify("%08x" % len(encrypted)) # always 4 bytes long
send_to(self.skt, length)
send_to(self.skt, encrypted)
def receive_record(self):
length_buf = self.receive_buf.read(4)
length = int(hexlify(length_buf), 16)
encrypted = self.receive_buf.read(length)
nonce_buf = encrypted[:SecretBox.NONCE_SIZE] # assume it's prepended
nonce = int(hexlify(nonce_buf), 16)
if nonce != self.next_receive_nonce:
raise BadNonce("received out-of-order record")
self.next_receive_nonce += 1
record = self.receive_box.decrypt(encrypted)
return record
def close(self):
self.skt.close()
class Common:
def __init__(self, transit_relay):
self._transit_relay = transit_relay
self.winning = threading.Event()
self._negotiation_check_lock = threading.Lock()
self._have_transit_key = threading.Condition()
self._transit_key = None
self._start_server()
def _start_server(self):
server = MyTCPServer(("", 0), None)
_, port = server.server_address
self.my_direct_hints = ["tcp:%s:%d" % (addr, port)
for addr in ipaddrs.find_addresses()]
server.owner = self
server_thread = threading.Thread(target=server.serve_forever)
server_thread.daemon = True
server_thread.start()
self.listener = server
def get_direct_hints(self):
return self.my_direct_hints
def get_relay_hints(self):
return [self._transit_relay]
def add_their_direct_hints(self, hints):
self._their_direct_hints = [force_ascii(h) for h in hints]
def add_their_relay_hints(self, hints):
self._their_relay_hints = [force_ascii(h) for h in hints]
def _send_this(self):
if self.is_sender:
return build_sender_handshake(self._transit_key)
else:
return build_receiver_handshake(self._transit_key)
def _expect_this(self):
if self.is_sender:
return build_receiver_handshake(self._transit_key)
else:
return build_sender_handshake(self._transit_key) + "go\n"
def _sender_record_key(self):
if self.is_sender:
return HKDF(self._transit_key, SecretBox.KEY_SIZE,
CTXinfo=b"transit_record_sender_key")
else:
return HKDF(self._transit_key, SecretBox.KEY_SIZE,
CTXinfo=b"transit_record_receiver_key")
def _receiver_record_key(self):
if self.is_sender:
return HKDF(self._transit_key, SecretBox.KEY_SIZE,
CTXinfo=b"transit_record_receiver_key")
else:
return HKDF(self._transit_key, SecretBox.KEY_SIZE,
CTXinfo=b"transit_record_sender_key")
def set_transit_key(self, key):
# This _have_transit_key condition/lock protects us against the race
# where the sender knows the hints and the key, and connects to the
# receiver's transit socket before the receiver gets relay message
# (and thus the key).
self._have_transit_key.acquire()
self._transit_key = key
self.handler_send_handshake = self._send_this() # no "go"
self.handler_expected_handshake = self._expect_this()
self._have_transit_key.notify_all()
self._have_transit_key.release()
def _start_outbound(self):
self._active_connectors = set(self._their_direct_hints)
for hint in self._their_direct_hints:
self._start_connector(hint)
if not self._their_direct_hints:
self._start_relay_connectors()
def _start_connector(self, hint, is_relay=False):
description = "->%s" % (hint,)
if is_relay:
description = "->relay:%s" % (hint,)
args = (self, hint, description,
self._send_this(), self._expect_this())
if is_relay:
args = args + (build_relay_handshake(self._transit_key),)
t = threading.Thread(target=connector, args=args)
t.daemon = True
t.start()
def _start_relay_connectors(self):
self._active_connectors.update(self._their_direct_hints)
for hint in self._their_relay_hints:
self._start_connector(hint, is_relay=True)
def establish_socket(self):
start = time.time()
self.winning_skt = None
self.winning_skt_description = None
self._start_outbound()
# we sit here until one of our inbound or outbound sockets succeeds
flag = self.winning.wait(2*TIMEOUT)
debug("wait returned at %.1f" % (since(start),))
if not flag:
# timeout: self.winning_skt will not be set. ish. race.
pass
if self.listener:
self.listener.shutdown() # TODO: waits up to 0.5s. push to thread
if self.winning_skt:
return self.winning_skt
raise TransitError
def describe(self):
if not self.winning_skt_description:
return "not yet established"
return self.winning_skt_description
def _connector_failed(self, hint):
debug("- failed connector %s" % hint)
self._active_connectors.remove(hint)
if not self._active_connectors:
self._start_relay_connectors()
def _negotiation_finished(self, skt, description):
# inbound/outbound sockets call this when they finish negotiation.
# The first one wins and gets a "go". Any subsequent ones lose and
# get a "nevermind" before being closed.
with self._negotiation_check_lock:
if self.winning_skt:
is_winner = False
else:
is_winner = True
self.winning_skt = skt
self.winning_skt_description = description
if is_winner:
if self.is_sender:
send_to(skt, "go\n")
self.winning.set()
else:
if self.is_sender:
send_to(skt, "nevermind\n")
skt.close()
def connect(self):
skt = self.establish_socket()
return RecordPipe(skt, self._sender_record_key(),
self._receiver_record_key())
class TransitSender(Common):
is_sender = True
class TransitReceiver(Common):
is_sender = False
| mit |
smurfix/gevent-socketio | examples/twitterstream/twitterstream/serve.py | 12 | 2293 | from gevent import monkey; monkey.patch_all()
import gevent
import tweetstream
import getpass
from socketio import socketio_manage
from socketio.server import SocketIOServer
from socketio.namespace import BaseNamespace
def broadcast_msg(server, ns_name, event, *args):
pkt = dict(type="event",
name=event,
args=args,
endpoint=ns_name)
for sessid, socket in server.sockets.iteritems():
socket.send_packet(pkt)
def send_tweets(server, user, password):
stream = tweetstream.SampleStream(user, password)
for tweet in stream:
broadcast_msg(server, '/tweets', 'tweet', tweet)
def get_credentials():
user = raw_input("Twitter username: ")
password = getpass.getpass("Twitter password: ")
return (user, password)
class Application(object):
def __init__(self):
self.buffer = []
def __call__(self, environ, start_response):
path = environ['PATH_INFO'].strip('/') or 'index.html'
if path.startswith('static/') or path == 'index.html':
try:
data = open(path).read()
except Exception:
return not_found(start_response)
if path.endswith(".js"):
content_type = "text/javascript"
elif path.endswith(".css"):
content_type = "text/css"
elif path.endswith(".swf"):
content_type = "application/x-shockwave-flash"
else:
content_type = "text/html"
start_response('200 OK', [('Content-Type', content_type)])
return [data]
if path.startswith("socket.io"):
socketio_manage(environ, {'/tweets': BaseNamespace})
else:
return not_found(start_response)
def not_found(start_response):
start_response('404 Not Found', [])
return ['<h1>Not Found</h1>']
if __name__ == '__main__':
user, password = get_credentials()
print 'Listening on port http://0.0.0.0:8080 and on port 10843 (flash policy server)'
server = SocketIOServer(('0.0.0.0', 8080), Application(),
resource="socket.io", policy_server=True,
policy_listener=('0.0.0.0', 10843))
gevent.spawn(send_tweets, server, user, password)
server.serve_forever()
| bsd-3-clause |
mahak/cinder | cinder/volume/drivers/remotefs.py | 2 | 91230 | # Copyright (c) 2012 NetApp, Inc.
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import binascii
import collections
import errno
import inspect
import json
import math
import os
import re
import shutil
import string
import tempfile
import time
from castellan import key_manager
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils.secretutils import md5
from oslo_utils import units
import six
from cinder import compute
from cinder import coordination
from cinder import db
from cinder import exception
from cinder.i18n import _
from cinder.image import image_utils
from cinder import objects
from cinder.objects import fields
from cinder import utils
from cinder.volume import configuration
from cinder.volume import driver
from cinder.volume import volume_utils
LOG = logging.getLogger(__name__)
nas_opts = [
cfg.StrOpt('nas_host',
default='',
help='IP address or Hostname of NAS system.'),
cfg.StrOpt('nas_login',
default='admin',
help='User name to connect to NAS system.'),
cfg.StrOpt('nas_password',
default='',
help='Password to connect to NAS system.',
secret=True),
cfg.PortOpt('nas_ssh_port',
default=22,
help='SSH port to use to connect to NAS system.'),
cfg.StrOpt('nas_private_key',
default='',
help='Filename of private key to use for SSH authentication.'),
cfg.StrOpt('nas_secure_file_operations',
default='auto',
help=('Allow network-attached storage systems to operate in a '
'secure environment where root level access is not '
'permitted. If set to False, access is as the root user '
'and insecure. If set to True, access is not as root. '
'If set to auto, a check is done to determine if this is '
'a new installation: True is used if so, otherwise '
'False. Default is auto.')),
cfg.StrOpt('nas_secure_file_permissions',
default='auto',
help=('Set more secure file permissions on network-attached '
'storage volume files to restrict broad other/world '
'access. If set to False, volumes are created with open '
'permissions. If set to True, volumes are created with '
'permissions for the cinder user and group (660). If '
'set to auto, a check is done to determine if '
'this is a new installation: True is used if so, '
'otherwise False. Default is auto.')),
cfg.StrOpt('nas_share_path',
default='',
help=('Path to the share to use for storing Cinder volumes. '
'For example: "/srv/export1" for an NFS server export '
'available at 10.0.5.10:/srv/export1 .')),
cfg.StrOpt('nas_mount_options',
help=('Options used to mount the storage backend file system '
'where Cinder volumes are stored.')),
]
volume_opts = [
cfg.StrOpt('nas_volume_prov_type',
default='thin',
choices=['thin', 'thick'],
help=('Provisioning type that will be used when '
'creating volumes.')),
]
CONF = cfg.CONF
CONF.register_opts(nas_opts, group=configuration.SHARED_CONF_GROUP)
CONF.register_opts(volume_opts, group=configuration.SHARED_CONF_GROUP)
def locked_volume_id_operation(f):
"""Lock decorator for volume operations.
Takes a named lock prior to executing the operation. The lock is named
with the id of the volume. This lock can be used by driver methods
to prevent conflicts with other operations modifying the same volume.
May be applied to methods that take a 'volume' or 'snapshot' argument.
"""
def lvo_inner1(inst, *args, **kwargs):
lock_tag = inst.driver_prefix
call_args = inspect.getcallargs(f, inst, *args, **kwargs)
if call_args.get('volume'):
volume_id = call_args['volume'].id
elif call_args.get('snapshot'):
volume_id = call_args['snapshot'].volume.id
else:
err_msg = _('The decorated method must accept either a volume or '
'a snapshot object')
raise exception.VolumeBackendAPIException(data=err_msg)
@utils.synchronized('%s-%s' % (lock_tag, volume_id),
external=False)
def lvo_inner2():
return f(inst, *args, **kwargs)
return lvo_inner2()
return lvo_inner1
class BackingFileTemplate(string.Template):
"""Custom Template for substitutions in backing files regex strings
Changes the default delimiter from '$' to '#' in order to prevent
clashing with the regex end of line marker '$'.
"""
delimiter = '#'
idpattern = r'[a-z][_a-z0-9]*'
class RemoteFSDriver(driver.BaseVD):
"""Common base for drivers that work like NFS."""
driver_volume_type = None
driver_prefix = 'remotefs'
volume_backend_name = None
vendor_name = 'Open Source'
SHARE_FORMAT_REGEX = r'.+:/.+'
# We let the drivers inheriting this specify
# whether thin provisioning is supported or not.
_thin_provisioning_support = False
_thick_provisioning_support = False
def __init__(self, *args, **kwargs):
super(RemoteFSDriver, self).__init__(*args, **kwargs)
self.shares = {}
self._mounted_shares = []
self._execute_as_root = True
self._is_voldb_empty_at_startup = kwargs.pop('is_vol_db_empty', None)
self._supports_encryption = False
self.format = 'raw'
if self.configuration:
self.configuration.append_config_values(nas_opts)
self.configuration.append_config_values(volume_opts)
def check_for_setup_error(self):
"""Just to override parent behavior."""
pass
@volume_utils.trace
def initialize_connection(self, volume, connector):
"""Allow connection to connector and return connection info.
:param volume: volume reference
:param connector: connector reference
"""
data = {'export': volume.provider_location,
'name': volume.name}
if volume.provider_location in self.shares:
data['options'] = self.shares[volume.provider_location]
return {
'driver_volume_type': self.driver_volume_type,
'data': data,
'mount_point_base': self._get_mount_point_base()
}
def do_setup(self, context):
"""Any initialization the volume driver does while starting."""
super(RemoteFSDriver, self).do_setup(context)
# Validate the settings for our secure file options.
self.configuration.nas_secure_file_permissions = \
self.configuration.nas_secure_file_permissions.lower()
self.configuration.nas_secure_file_operations = \
self.configuration.nas_secure_file_operations.lower()
valid_secure_opts = ['auto', 'true', 'false']
secure_options = {'nas_secure_file_permissions':
self.configuration.nas_secure_file_permissions,
'nas_secure_file_operations':
self.configuration.nas_secure_file_operations}
LOG.debug('NAS config: %s', secure_options)
for opt_name, opt_value in secure_options.items():
if opt_value not in valid_secure_opts:
err_parms = {'name': opt_name, 'value': opt_value}
msg = _("NAS config '%(name)s=%(value)s' invalid. Must be "
"'auto', 'true', or 'false'") % err_parms
LOG.error(msg)
raise exception.InvalidConfigurationValue(msg)
def _get_provisioned_capacity(self):
"""Returns the provisioned capacity.
Get the sum of sizes of volumes, snapshots and any other
files on the mountpoint.
"""
provisioned_size = 0.0
for share in self.shares.keys():
mount_path = self._get_mount_point_for_share(share)
out, _ = self._execute('du', '--bytes', '-s', mount_path,
run_as_root=self._execute_as_root)
provisioned_size += int(out.split()[0])
return round(provisioned_size / units.Gi, 2)
def _get_mount_point_base(self):
"""Returns the mount point base for the remote fs.
This method facilitates returning mount point base
for the specific remote fs. Override this method
in the respective driver to return the entry to be
used while attach/detach using brick in cinder.
If not overridden then it returns None without
raising exception to continue working for cases
when not used with brick.
"""
LOG.debug("Driver specific implementation needs to return"
" mount_point_base.")
return None
@staticmethod
def _validate_state(current_state,
acceptable_states,
obj_description='volume',
invalid_exc=exception.InvalidVolume):
if current_state not in acceptable_states:
message = _('Invalid %(obj_description)s state. '
'Acceptable states for this operation: '
'%(acceptable_states)s. '
'Current %(obj_description)s state: '
'%(current_state)s.')
raise invalid_exc(
message=message %
dict(obj_description=obj_description,
acceptable_states=acceptable_states,
current_state=current_state))
@volume_utils.trace
def create_volume(self, volume):
"""Creates a volume.
:param volume: volume reference
:returns: provider_location update dict for database
"""
if volume.encryption_key_id and not self._supports_encryption:
message = _("Encryption is not yet supported.")
raise exception.VolumeDriverException(message=message)
LOG.debug('Creating volume %(vol)s', {'vol': volume.id})
self._ensure_shares_mounted()
volume.provider_location = self._find_share(volume)
LOG.info('casted to %s', volume.provider_location)
self._do_create_volume(volume)
return {'provider_location': volume.provider_location}
def _do_create_volume(self, volume):
"""Create a volume on given remote share.
:param volume: volume reference
"""
volume_path = self.local_path(volume)
volume_size = volume.size
encrypted = volume.encryption_key_id is not None
if encrypted:
encryption = volume_utils.check_encryption_provider(
self.db,
volume,
volume.obj_context)
self._create_encrypted_volume_file(volume_path,
volume_size,
encryption,
volume.obj_context)
elif getattr(self.configuration,
self.driver_prefix + '_qcow2_volumes', False):
# QCOW2 volumes are inherently sparse, so this setting
# will override the _sparsed_volumes setting.
self._create_qcow2_file(volume_path, volume_size)
self.format = 'qcow2'
elif getattr(self.configuration,
self.driver_prefix + '_sparsed_volumes', False):
self._create_sparsed_file(volume_path, volume_size)
else:
self._create_regular_file(volume_path, volume_size)
self._set_rw_permissions(volume_path)
if not volume.consistencygroup_id and not volume.group_id:
volume.admin_metadata['format'] = self.format
# This is done here because when creating a volume from image,
# while encountering other volume.save() method fails for
# non-admins
with volume.obj_as_admin():
volume.save()
def _ensure_shares_mounted(self):
"""Look for remote shares in the flags and mount them locally."""
mounted_shares = []
self._load_shares_config(getattr(self.configuration,
self.driver_prefix +
'_shares_config'))
for share in self.shares:
try:
self._ensure_share_mounted(share)
mounted_shares.append(share)
except Exception as exc:
LOG.error('Exception during mounting %s', exc)
self._mounted_shares = mounted_shares
LOG.debug('Available shares %s', self._mounted_shares)
@volume_utils.trace
def delete_volume(self, volume):
"""Deletes a logical volume.
:param volume: volume reference
"""
LOG.debug('Deleting volume %(vol)s, provider_location: %(loc)s',
{'vol': volume.id, 'loc': volume.provider_location})
if not volume.provider_location:
LOG.warning('Volume %s does not have '
'provider_location specified, '
'skipping', volume.name)
return
self._ensure_share_mounted(volume.provider_location)
mounted_path = self.local_path(volume)
self._delete(mounted_path)
def ensure_export(self, ctx, volume):
"""Synchronously recreates an export for a logical volume."""
self._ensure_share_mounted(volume.provider_location)
def create_export(self, ctx, volume, connector):
"""Exports the volume.
Can optionally return a dictionary of changes
to the volume object to be persisted.
"""
pass
def remove_export(self, ctx, volume):
"""Removes an export for a logical volume."""
pass
def delete_snapshot(self, snapshot):
"""Delete snapshot.
Do nothing for this driver, but allow manager to handle deletion
of snapshot in error state.
"""
pass
def _delete(self, path):
# Note(lpetrut): this method is needed in order to provide
# interoperability with Windows as it will be overridden.
self._execute('rm', '-f', path, run_as_root=self._execute_as_root)
def _create_sparsed_file(self, path, size):
"""Creates a sparse file of a given size in GiB."""
self._execute('truncate', '-s', '%sG' % size,
path, run_as_root=self._execute_as_root)
def _create_regular_file(self, path, size):
"""Creates a regular file of given size in GiB."""
block_size_mb = 1
block_count = size * units.Gi // (block_size_mb * units.Mi)
self._execute('dd', 'if=/dev/zero', 'of=%s' % path,
'bs=%dM' % block_size_mb,
'count=%d' % block_count,
run_as_root=self._execute_as_root)
def _create_qcow2_file(self, path, size_gb):
"""Creates a QCOW2 file of a given size in GiB."""
self._execute('qemu-img', 'create', '-f', 'qcow2',
'-o', 'preallocation=metadata',
path, str(size_gb * units.Gi),
run_as_root=self._execute_as_root)
def _create_encrypted_volume_file(self,
path,
size_gb,
encryption,
context):
"""Create an encrypted volume.
This works by creating an encrypted image locally,
and then uploading it to the volume.
"""
cipher_spec = image_utils.decode_cipher(encryption['cipher'],
encryption['key_size'])
# TODO(enriquetaso): share this code w/ the RBD driver
# Fetch the key associated with the volume and decode the passphrase
keymgr = key_manager.API(CONF)
key = keymgr.get(context, encryption['encryption_key_id'])
passphrase = binascii.hexlify(key.get_encoded()).decode('utf-8')
# create a file
tmp_dir = volume_utils.image_conversion_dir()
with tempfile.NamedTemporaryFile(dir=tmp_dir) as tmp_key:
# TODO(enriquetaso): encrypt w/ aes256 cipher text
# (qemu-img feature) ?
with open(tmp_key.name, 'w') as f:
f.write(passphrase)
self._execute(
'qemu-img', 'create', '-f', 'qcow2',
'-o',
'encrypt.format=luks,'
'encrypt.key-secret=sec1,'
'encrypt.cipher-alg=%(cipher_alg)s,'
'encrypt.cipher-mode=%(cipher_mode)s,'
'encrypt.ivgen-alg=%(ivgen_alg)s' % cipher_spec,
'--object', 'secret,id=sec1,format=raw,file=' + tmp_key.name,
path, str(size_gb * units.Gi),
run_as_root=self._execute_as_root)
def _set_rw_permissions(self, path):
"""Sets access permissions for given NFS path.
Volume file permissions are set based upon the value of
secure_file_permissions: 'true' sets secure access permissions and
'false' sets more open (insecure) access permissions.
:param path: the volume file path.
"""
if self.configuration.nas_secure_file_permissions == 'true':
permissions = '660'
LOG.debug('File path %(path)s is being set with permissions: '
'%(permissions)s',
{'path': path, 'permissions': permissions})
else:
permissions = 'ugo+rw'
LOG.warning('%(path)s is being set with open permissions: '
'%(perm)s', {'path': path, 'perm': permissions})
self._execute('chmod', permissions, path,
run_as_root=self._execute_as_root)
def _set_rw_permissions_for_all(self, path):
"""Sets 666 permissions for the path."""
self._execute('chmod', 'ugo+rw', path,
run_as_root=self._execute_as_root)
def _set_rw_permissions_for_owner(self, path):
"""Sets read-write permissions to the owner for the path."""
self._execute('chmod', 'u+rw', path,
run_as_root=self._execute_as_root)
def local_path(self, volume):
"""Get volume path (mounted locally fs path) for given volume.
:param volume: volume reference
"""
remotefs_share = volume.provider_location
return os.path.join(self._get_mount_point_for_share(remotefs_share),
volume.name)
def copy_image_to_volume(self, context, volume, image_service, image_id):
"""Fetch the image from image_service and write it to the volume."""
image_utils.fetch_to_raw(context,
image_service,
image_id,
self.local_path(volume),
self.configuration.volume_dd_blocksize,
size=volume.size,
run_as_root=self._execute_as_root)
# NOTE (leseb): Set the virtual size of the image
# the raw conversion overwrote the destination file
# (which had the correct size)
# with the fetched glance image size,
# thus the initial 'size' parameter is not honored
# this sets the size to the one asked in the first place by the user
# and then verify the final virtual size
image_utils.resize_image(self.local_path(volume), volume.size,
run_as_root=self._execute_as_root)
data = image_utils.qemu_img_info(self.local_path(volume),
run_as_root=self._execute_as_root)
virt_size = data.virtual_size // units.Gi
if virt_size != volume.size:
raise exception.ImageUnacceptable(
image_id=image_id,
reason=(_("Expected volume size was %d") % volume.size)
+ (_(" but size is now %d") % virt_size))
def copy_volume_to_image(self, context, volume, image_service, image_meta):
"""Copy the volume to the specified image."""
volume_utils.upload_volume(context,
image_service,
image_meta,
self.local_path(volume),
volume,
run_as_root=self._execute_as_root)
def _read_config_file(self, config_file):
# Returns list of lines in file
with open(config_file) as f:
return f.readlines()
def _load_shares_config(self, share_file=None):
self.shares = {}
if all((self.configuration.nas_host,
self.configuration.nas_share_path)):
LOG.debug('Using nas_host and nas_share_path configuration.')
nas_host = self.configuration.nas_host
nas_share_path = self.configuration.nas_share_path
share_address = '%s:%s' % (nas_host, nas_share_path)
if not re.match(self.SHARE_FORMAT_REGEX, share_address):
msg = (_("Share %s ignored due to invalid format. Must "
"be of form address:/export. Please check the "
"nas_host and nas_share_path settings."),
share_address)
raise exception.InvalidConfigurationValue(msg)
self.shares[share_address] = self.configuration.nas_mount_options
elif share_file is not None:
LOG.debug('Loading shares from %s.', share_file)
for share in self._read_config_file(share_file):
# A configuration line may be either:
# host:/vol_name
# or
# host:/vol_name -o options=123,rw --other
if not share.strip():
# Skip blank or whitespace-only lines
continue
if share.startswith('#'):
continue
share_info = share.split(' ', 1)
# results in share_info =
# [ 'address:/vol', '-o options=123,rw --other' ]
share_address = share_info[0].strip()
# Replace \040 with a space, to support paths with spaces
share_address = share_address.replace("\\040", " ")
share_opts = None
if len(share_info) > 1:
share_opts = share_info[1].strip()
if not re.match(self.SHARE_FORMAT_REGEX, share_address):
LOG.error("Share %s ignored due to invalid format. "
"Must be of form address:/export.",
share_address)
continue
self.shares[share_address] = share_opts
LOG.debug("shares loaded: %s", self.shares)
def _get_mount_point_for_share(self, path):
raise NotImplementedError()
def terminate_connection(self, volume, connector, **kwargs):
"""Disallow connection from connector."""
pass
def _update_volume_stats(self):
"""Retrieve stats info from volume group."""
data = {}
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or self.volume_backend_name
data['vendor_name'] = 'Open Source'
data['driver_version'] = self.get_version()
data['storage_protocol'] = self.driver_volume_type
self._ensure_shares_mounted()
global_capacity = 0
global_free = 0
for share in self._mounted_shares:
capacity, free, used = self._get_capacity_info(share)
global_capacity += capacity
global_free += free
data['total_capacity_gb'] = global_capacity / float(units.Gi)
data['free_capacity_gb'] = global_free / float(units.Gi)
data['reserved_percentage'] = self.configuration.reserved_percentage
data['QoS_support'] = False
self._stats = data
def _get_capacity_info(self, share):
raise NotImplementedError()
def _find_share(self, volume):
raise NotImplementedError()
def _ensure_share_mounted(self, share):
raise NotImplementedError()
def secure_file_operations_enabled(self):
"""Determine if driver is operating in Secure File Operations mode.
The Cinder Volume driver needs to query if this driver is operating
in a secure file mode; check our nas_secure_file_operations flag.
"""
if self.configuration.nas_secure_file_operations == 'true':
return True
return False
def set_nas_security_options(self, is_new_cinder_install):
"""Determine the setting to use for Secure NAS options.
This method must be overridden by child wishing to use secure
NAS file operations. This base method will set the NAS security
options to false.
"""
doc_html = ("https://docs.openstack.org/cinder/latest/admin"
"/blockstorage-nfs-backend.html")
self.configuration.nas_secure_file_operations = 'false'
LOG.warning("The NAS file operations will be run as root: "
"allowing root level access at the storage backend. "
"This is considered an insecure NAS environment. "
"Please see %s for information on a secure NAS "
"configuration.",
doc_html)
self.configuration.nas_secure_file_permissions = 'false'
LOG.warning("The NAS file permissions mode will be 666 (allowing "
"other/world read & write access). This is considered "
"an insecure NAS environment. Please see %s for "
"information on a secure NFS configuration.",
doc_html)
def _determine_nas_security_option_setting(self, nas_option, mount_point,
is_new_cinder_install):
"""Determine NAS security option setting when 'auto' is assigned.
This method determines the final 'true'/'false' setting of an NAS
security option when the default value of 'auto' has been detected.
If the nas option isn't 'auto' then its current value is used.
:param nas_option: The NAS security option value loaded from config.
:param mount_point: Mount where indicator file is written.
:param is_new_cinder_install: boolean for new Cinder installation.
:return string: 'true' or 'false' for new option setting.
"""
if nas_option == 'auto':
# For auto detection, we first check to see if we have been
# through this process before by checking for the existence of
# the Cinder secure environment indicator file.
file_name = '.cinderSecureEnvIndicator'
file_path = os.path.join(mount_point, file_name)
if os.path.isfile(file_path):
nas_option = 'true'
LOG.info('Cinder secure environment '
'indicator file exists.')
else:
# The indicator file does not exist. If it is a new
# installation, set to 'true' and create the indicator file.
if is_new_cinder_install:
nas_option = 'true'
try:
with open(file_path, 'w') as fh:
fh.write('Detector file for Cinder secure '
'environment usage.\n')
fh.write('Do not delete this file.\n')
# Set the permissions on our special marker file to
# protect from accidental removal (owner write only).
self._execute('chmod', '640', file_path,
run_as_root=self._execute_as_root)
LOG.info('New Cinder secure environment indicator'
' file created at path %s.', file_path)
except IOError as err:
LOG.error('Failed to created Cinder secure '
'environment indicator file: %s',
err)
if err.errno == errno.EACCES:
LOG.warning('Reverting to non-secure mode. Adjust '
'permissions at %s to allow the '
'cinder volume service write access '
'to use secure mode.',
mount_point)
nas_option = 'false'
else:
# For existing installs, we default to 'false'. The
# admin can always set the option at the driver config.
nas_option = 'false'
return nas_option
class RemoteFSSnapDriverBase(RemoteFSDriver):
"""Base class for remotefs drivers implementing qcow2 snapshots.
Driver must implement:
_local_volume_dir(self, volume)
"""
_VALID_IMAGE_EXTENSIONS = []
# The following flag may be overridden by the concrete drivers in order
# to avoid using temporary volume snapshots when creating volume clones,
# when possible.
_always_use_temp_snap_when_cloning = True
def __init__(self, *args, **kwargs):
self._remotefsclient = None
self.base = None
self._nova = None
super(RemoteFSSnapDriverBase, self).__init__(*args, **kwargs)
def do_setup(self, context):
super(RemoteFSSnapDriverBase, self).do_setup(context)
self._nova = compute.API()
def snapshot_revert_use_temp_snapshot(self):
# Considering that RemoteFS based drivers use COW images
# for storing snapshots, having chains of such images,
# creating a backup snapshot when reverting one is not
# actutally helpful.
return False
def _local_volume_dir(self, volume):
share = volume.provider_location
local_dir = self._get_mount_point_for_share(share)
return local_dir
def _local_path_volume(self, volume):
path_to_disk = os.path.join(
self._local_volume_dir(volume),
volume.name)
return path_to_disk
def _get_new_snap_path(self, snapshot):
vol_path = self.local_path(snapshot.volume)
snap_path = '%s.%s' % (vol_path, snapshot.id)
return snap_path
def _local_path_volume_info(self, volume):
return '%s%s' % (self.local_path(volume), '.info')
def _read_file(self, filename):
"""This method is to make it easier to stub out code for testing.
Returns a string representing the contents of the file.
"""
with open(filename, 'r') as f:
return f.read()
def _write_info_file(self, info_path, snap_info):
if 'active' not in snap_info.keys():
msg = _("'active' must be present when writing snap_info.")
raise exception.RemoteFSException(msg)
if not (os.path.exists(info_path) or os.name == 'nt'):
# We're not managing file permissions on Windows.
# Plus, 'truncate' is not available.
self._execute('truncate', "-s0", info_path,
run_as_root=self._execute_as_root)
self._set_rw_permissions(info_path)
with open(info_path, 'w') as f:
json.dump(snap_info, f, indent=1, sort_keys=True)
def _qemu_img_info_base(self, path, volume_name, basedir,
ext_bf_template=None,
force_share=False,
run_as_root=False):
"""Sanitize image_utils' qemu_img_info.
This code expects to deal only with relative filenames.
:param path: Path to the image file whose info is fetched
:param volume_name: Name of the volume
:param basedir: Path to backing files directory
:param ext_bf_template: Alt. string.Template for allowed backing files
:type object: BackingFileTemplate
:param force_share: Wether to force fetching img info for images in use
:param run_as_root: Wether to run with privileged permissions or not
"""
run_as_root = run_as_root or self._execute_as_root
info = image_utils.qemu_img_info(path,
force_share=force_share,
run_as_root=run_as_root)
if info.image:
info.image = os.path.basename(info.image)
if info.backing_file:
if self._VALID_IMAGE_EXTENSIONS:
valid_ext = r'(\.(%s))?' % '|'.join(
self._VALID_IMAGE_EXTENSIONS)
else:
valid_ext = ''
if ext_bf_template:
backing_file_template = ext_bf_template.substitute(
basedir=basedir, volname=volume_name, valid_ext=valid_ext
)
LOG.debug("Fetching qemu-img info with special "
"backing_file_template: %(bft)s", {
"bft": backing_file_template
})
else:
backing_file_template = \
"(%(basedir)s/[0-9a-f]+/)?%" \
"(volname)s(.(tmp-snap-)?[0-9a-f-]+)?%(valid_ext)s$" % {
'basedir': basedir,
'volname': volume_name,
'valid_ext': valid_ext,
}
if not re.match(backing_file_template, info.backing_file,
re.IGNORECASE):
raise exception.RemoteFSInvalidBackingFile(
path=path, backing_file=info.backing_file)
info.backing_file = os.path.basename(info.backing_file)
return info
def _qemu_img_info(self, path, volume_name):
raise NotImplementedError()
def _img_commit(self, path, passphrase_file=None, backing_file=None):
# TODO(eharney): this is not using the correct permissions for
# NFS snapshots
# It needs to run as root for volumes attached to instances, but
# does not when in secure mode.
cmd = ['qemu-img', 'commit']
if passphrase_file:
obj = ['--object',
'secret,id=s0,format=raw,file=%s' % passphrase_file]
image_opts = ['--image-opts']
src_opts = \
"file.filename=%(filename)s,encrypt.format=luks," \
"encrypt.key-secret=s0,backing.file.filename=%(backing)s," \
"backing.encrypt.key-secret=s0" % {
'filename': path,
'backing': backing_file,
}
path_no_to_delete = ['-d', src_opts]
cmd += obj + image_opts + path_no_to_delete
else:
cmd += ['-d', path]
self._execute(*cmd, run_as_root=self._execute_as_root)
self._delete(path)
def _rebase_img(self, image, backing_file, volume_format,
passphrase_file=None):
# qemu-img create must run as root, because it reads from the
# backing file, which will be owned by qemu:qemu if attached to an
# instance.
# TODO(erlon): Sanity check this.
command = ['qemu-img', 'rebase', '-u']
# if encrypted
if passphrase_file:
objectdef = "secret,id=s0,file=%s" % passphrase_file
filename = "encrypt.key-secret=s0,"\
"file.filename=%(filename)s" % {'filename': image}
command += ['--object', objectdef, '-b', backing_file,
'-F', volume_format, '--image-opts', filename]
# not encrypted
else:
command += ['-b', backing_file, image, '-F', volume_format]
self._execute(*command, run_as_root=self._execute_as_root)
def _read_info_file(self, info_path, empty_if_missing=False):
"""Return dict of snapshot information.
:param info_path: path to file
:param empty_if_missing: True=return empty dict if no file
"""
if not os.path.exists(info_path):
if empty_if_missing is True:
return {}
return json.loads(self._read_file(info_path))
def _get_higher_image_path(self, snapshot):
volume = snapshot.volume
info_path = self._local_path_volume_info(volume)
snap_info = self._read_info_file(info_path)
snapshot_file = snap_info[snapshot.id]
active_file = self.get_active_image_from_info(volume)
active_file_path = os.path.join(self._local_volume_dir(volume),
active_file)
backing_chain = self._get_backing_chain_for_path(
volume, active_file_path)
higher_file = next((os.path.basename(f['filename'])
for f in backing_chain
if utils.paths_normcase_equal(
f.get('backing-filename', ''),
snapshot_file)),
None)
return higher_file
def _get_backing_chain_for_path(self, volume, path):
"""Returns list of dicts containing backing-chain information.
Includes 'filename', and 'backing-filename' for each
applicable entry.
Consider converting this to use --backing-chain and --output=json
when environment supports qemu-img 1.5.0.
:param volume: volume reference
:param path: path to image file at top of chain
"""
output = []
info = self._qemu_img_info(path, volume.name)
new_info = {}
new_info['filename'] = os.path.basename(path)
new_info['backing-filename'] = info.backing_file
output.append(new_info)
while new_info['backing-filename']:
filename = new_info['backing-filename']
path = os.path.join(self._local_volume_dir(volume), filename)
info = self._qemu_img_info(path, volume.name)
backing_filename = info.backing_file
new_info = {}
new_info['filename'] = filename
new_info['backing-filename'] = backing_filename
output.append(new_info)
return output
def _get_hash_str(self, base_str):
"""Return a string that represents hash of base_str.
Returns string in a hex format.
"""
if isinstance(base_str, six.text_type):
base_str = base_str.encode('utf-8')
return md5(base_str, usedforsecurity=False).hexdigest()
def _get_mount_point_for_share(self, share):
"""Return mount point for share.
:param share: example 172.18.194.100:/var/fs
"""
return self._remotefsclient.get_mount_point(share)
def _get_available_capacity(self, share):
"""Calculate available space on the share.
:param share: example 172.18.194.100:/var/fs
"""
mount_point = self._get_mount_point_for_share(share)
out, _ = self._execute('df', '--portability', '--block-size', '1',
mount_point,
run_as_root=self._execute_as_root)
out = out.splitlines()[1]
size = int(out.split()[1])
available = int(out.split()[3])
return available, size
def _get_capacity_info(self, remotefs_share):
available, size = self._get_available_capacity(remotefs_share)
return size, available, size - available
def _get_mount_point_base(self):
return self.base
def _copy_volume_to_image(self, context, volume, image_service,
image_meta, store_id=None):
"""Copy the volume to the specified image."""
# If snapshots exist, flatten to a temporary image, and upload it
active_file = self.get_active_image_from_info(volume)
active_file_path = os.path.join(self._local_volume_dir(volume),
active_file)
info = self._qemu_img_info(active_file_path, volume.name)
backing_file = info.backing_file
root_file_fmt = info.file_format
tmp_params = {
'prefix': '%s.temp_image.%s' % (volume.id, image_meta['id']),
'suffix': '.img'
}
with image_utils.temporary_file(**tmp_params) as temp_path:
if backing_file or (root_file_fmt != 'raw'):
# Convert due to snapshots
# or volume data not being stored in raw format
# (upload_volume assumes raw format input)
image_utils.convert_image(active_file_path, temp_path, 'raw',
run_as_root=self._execute_as_root)
upload_path = temp_path
else:
upload_path = active_file_path
volume_utils.upload_volume(context,
image_service,
image_meta,
upload_path,
volume,
run_as_root=self._execute_as_root)
def get_active_image_from_info(self, volume):
"""Returns filename of the active image from the info file."""
info_file = self._local_path_volume_info(volume)
snap_info = self._read_info_file(info_file, empty_if_missing=True)
if not snap_info:
# No info file = no snapshots exist
vol_path = os.path.basename(self.local_path(volume))
return vol_path
return snap_info['active']
def _local_path_active_image(self, volume):
active_fname = self.get_active_image_from_info(volume)
vol_dir = self._local_volume_dir(volume)
active_fpath = os.path.join(vol_dir, active_fname)
return active_fpath
def _get_snapshot_backing_file(self, snapshot):
info_path = self._local_path_volume_info(snapshot.volume)
snap_info = self._read_info_file(info_path)
vol_dir = self._local_volume_dir(snapshot.volume)
forward_file = snap_info[snapshot.id]
forward_path = os.path.join(vol_dir, forward_file)
# Find the file which backs this file, which represents the point
# in which this snapshot was created.
img_info = self._qemu_img_info(forward_path)
return img_info.backing_file
def _snapshots_exist(self, volume):
if not volume.provider_location:
return False
active_fpath = self._local_path_active_image(volume)
base_vol_path = self.local_path(volume)
return not utils.paths_normcase_equal(active_fpath, base_vol_path)
def _is_volume_attached(self, volume):
return volume.attach_status == fields.VolumeAttachStatus.ATTACHED
def _create_cloned_volume(self, volume, src_vref, context):
LOG.info('Cloning volume %(src)s to volume %(dst)s',
{'src': src_vref.id,
'dst': volume.id})
acceptable_states = ['available', 'backing-up', 'downloading']
self._validate_state(src_vref.status,
acceptable_states,
obj_description='source volume')
volume_name = CONF.volume_name_template % volume.id
# Create fake volume and snapshot objects
vol_attrs = ['provider_location', 'size', 'id', 'name', 'status',
'volume_type', 'metadata', 'obj_context']
Volume = collections.namedtuple('Volume', vol_attrs)
volume_info = Volume(provider_location=src_vref.provider_location,
size=src_vref.size,
id=volume.id,
name=volume_name,
status=src_vref.status,
volume_type=src_vref.volume_type,
metadata=src_vref.metadata,
obj_context=volume.obj_context)
if (self._always_use_temp_snap_when_cloning or
self._snapshots_exist(src_vref)):
kwargs = {
'volume_id': src_vref.id,
'user_id': context.user_id,
'project_id': context.project_id,
'status': fields.SnapshotStatus.CREATING,
'progress': '0%',
'volume_size': src_vref.size,
'display_name': 'tmp-snap-%s' % volume.id,
'display_description': None,
'volume_type_id': src_vref.volume_type_id,
'encryption_key_id': src_vref.encryption_key_id,
}
temp_snapshot = objects.Snapshot(context=context,
**kwargs)
temp_snapshot.create()
self._create_snapshot(temp_snapshot)
try:
self._copy_volume_from_snapshot(
temp_snapshot,
volume_info,
volume.size,
src_encryption_key_id=src_vref.encryption_key_id,
new_encryption_key_id=volume.encryption_key_id)
# remove temp snapshot after the cloning is done
temp_snapshot.status = fields.SnapshotStatus.DELETING
temp_snapshot.context = context.elevated()
temp_snapshot.save()
finally:
self._delete_snapshot(temp_snapshot)
temp_snapshot.destroy()
else:
self._copy_volume_image(self.local_path(src_vref),
self.local_path(volume_info))
self._extend_volume(volume_info, volume.size)
if src_vref.admin_metadata and 'format' in src_vref.admin_metadata:
volume.admin_metadata['format'] = (
src_vref.admin_metadata['format'])
# This is done here because when cloning from a bootable volume,
# while encountering other volume.save() method fails
with volume.obj_as_admin():
volume.save()
return {'provider_location': src_vref.provider_location}
def _copy_volume_image(self, src_path, dest_path):
shutil.copyfile(src_path, dest_path)
self._set_rw_permissions(dest_path)
def _delete_stale_snapshot(self, snapshot):
info_path = self._local_path_volume_info(snapshot.volume)
snap_info = self._read_info_file(info_path)
snapshot_file = snap_info[snapshot.id]
active_file = self.get_active_image_from_info(snapshot.volume)
snapshot_path = os.path.join(
self._local_volume_dir(snapshot.volume), snapshot_file)
if utils.paths_normcase_equal(snapshot_file, active_file):
return
LOG.info('Deleting stale snapshot: %s', snapshot.id)
self._delete(snapshot_path)
del(snap_info[snapshot.id])
self._write_info_file(info_path, snap_info)
def _delete_snapshot(self, snapshot):
"""Delete a snapshot.
If volume status is 'available', delete snapshot here in Cinder
using qemu-img.
If volume status is 'in-use', calculate what qcow2 files need to
merge, and call to Nova to perform this operation.
:raises: InvalidVolume if status not acceptable
:raises: RemoteFSException(msg) if operation fails
:returns: None
"""
LOG.debug('Deleting %(type)s snapshot %(snap)s of volume %(vol)s',
{'snap': snapshot.id, 'vol': snapshot.volume.id,
'type': ('online'
if self._is_volume_attached(snapshot.volume)
else 'offline')})
volume_status = snapshot.volume.status
acceptable_states = ['available', 'in-use', 'backing-up', 'deleting',
'downloading']
self._validate_state(volume_status, acceptable_states)
vol_path = self._local_volume_dir(snapshot.volume)
volume_path = os.path.join(vol_path, snapshot.volume.name)
# Determine the true snapshot file for this snapshot
# based on the .info file
info_path = self._local_path_volume_info(snapshot.volume)
snap_info = self._read_info_file(info_path, empty_if_missing=True)
if snapshot.id not in snap_info:
# If snapshot info file is present, but snapshot record does not
# exist, do not attempt to delete.
# (This happens, for example, if snapshot_create failed due to lack
# of permission to write to the share.)
LOG.info('Snapshot record for %s is not present, allowing '
'snapshot_delete to proceed.', snapshot.id)
return
snapshot_file = snap_info[snapshot.id]
LOG.debug('snapshot_file for this snap is: %s', snapshot_file)
snapshot_path = os.path.join(
self._local_volume_dir(snapshot.volume),
snapshot_file)
snapshot_path_img_info = self._qemu_img_info(
snapshot_path,
snapshot.volume.name)
base_file = snapshot_path_img_info.backing_file
if base_file is None:
# There should always be at least the original volume
# file as base.
LOG.warning('No backing file found for %s, allowing '
'snapshot to be deleted.', snapshot_path)
# Snapshot may be stale, so just delete it and update the
# info file instead of blocking
return self._delete_stale_snapshot(snapshot)
base_path = os.path.join(vol_path, base_file)
base_file_img_info = self._qemu_img_info(base_path,
snapshot.volume.name)
# Find what file has this as its backing file
active_file = self.get_active_image_from_info(snapshot.volume)
if self._is_volume_attached(snapshot.volume):
# Online delete
context = snapshot._context
new_base_file = base_file_img_info.backing_file
base_id = None
for key, value in snap_info.items():
if utils.paths_normcase_equal(value,
base_file) and key != 'active':
base_id = key
break
if base_id is None:
# This means we are deleting the oldest snapshot
LOG.debug('No %(base_id)s found for %(file)s',
{'base_id': 'base_id', 'file': snapshot_file})
online_delete_info = {
'active_file': active_file,
'snapshot_file': snapshot_file,
'base_file': base_file,
'base_id': base_id,
'new_base_file': new_base_file
}
return self._delete_snapshot_online(context,
snapshot,
online_delete_info)
encrypted = snapshot.encryption_key_id is not None
if encrypted:
keymgr = key_manager.API(CONF)
encryption_key = snapshot.encryption_key_id
new_key = keymgr.get(snapshot.obj_context, encryption_key)
src_passphrase = \
binascii.hexlify(new_key.get_encoded()).decode('utf-8')
tmp_dir = volume_utils.image_conversion_dir()
if utils.paths_normcase_equal(snapshot_file, active_file):
# There is no top file
# T0 | T1 |
# base | snapshot_file | None
# (guaranteed to| (being deleted, |
# exist) | committed down) |
if encrypted:
with tempfile.NamedTemporaryFile(prefix='luks_',
dir=tmp_dir) as src_file:
with open(src_file.name, 'w') as f:
f.write(src_passphrase)
self._img_commit(snapshot_path,
passphrase_file=src_file.name,
backing_file=volume_path)
else:
self._img_commit(snapshot_path)
# Active file has changed
snap_info['active'] = base_file
else:
# T0 | T1 | T2 | T3
# base | snapshot_file | higher_file | highest_file
# (guaranteed to | (being deleted, | (guaranteed to | (may exist)
# exist, not | committed down) | exist, needs |
# used here) | | ptr update) |
# This file is guaranteed to exist since we aren't operating on
# the active file.
higher_file = self._get_higher_image_path(snapshot)
if higher_file is None:
msg = _('No file found with %s as backing file.') %\
snapshot_file
raise exception.RemoteFSException(msg)
higher_id = next((i for i in snap_info
if utils.paths_normcase_equal(snap_info[i],
higher_file)
and i != 'active'),
None)
if higher_id is None:
msg = _('No snap found with %s as backing file.') %\
higher_file
raise exception.RemoteFSException(msg)
if encrypted:
with tempfile.NamedTemporaryFile(prefix='luks_',
dir=tmp_dir) as src_file:
with open(src_file.name, 'w') as f:
f.write(src_passphrase)
self._img_commit(snapshot_path,
passphrase_file=src_file.name,
backing_file=volume_path)
higher_file_path = os.path.join(vol_path, higher_file)
base_file_fmt = base_file_img_info.file_format
self._rebase_img(higher_file_path, volume_path,
base_file_fmt, src_file.name)
else:
self._img_commit(snapshot_path)
higher_file_path = os.path.join(vol_path, higher_file)
base_file_fmt = base_file_img_info.file_format
self._rebase_img(higher_file_path, base_file, base_file_fmt)
# Remove snapshot_file from info
del(snap_info[snapshot.id])
self._write_info_file(info_path, snap_info)
def _create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot.
Snapshot must not be the active snapshot. (offline)
"""
LOG.debug('Creating volume %(vol)s from snapshot %(snap)s',
{'vol': volume.id, 'snap': snapshot.id})
status = snapshot.status
acceptable_states = ['available', 'backing-up']
self._validate_state(status, acceptable_states,
obj_description='snapshot',
invalid_exc=exception.InvalidSnapshot)
self._ensure_shares_mounted()
volume.provider_location = self._find_share(volume)
self._do_create_volume(volume)
self._copy_volume_from_snapshot(snapshot,
volume,
volume.size,
snapshot.volume.encryption_key_id,
volume.encryption_key_id)
return {'provider_location': volume.provider_location}
def _copy_volume_from_snapshot(self, snapshot, volume, volume_size,
src_encryption_key_id=None,
new_encryption_key_id=None):
raise NotImplementedError()
def _do_create_snapshot(self, snapshot, backing_filename,
new_snap_path):
"""Create a QCOW2 file backed by another file.
:param snapshot: snapshot reference
:param backing_filename: filename of file that will back the
new qcow2 file
:param new_snap_path: filename of new qcow2 file
"""
backing_path_full_path = os.path.join(
self._local_volume_dir(snapshot.volume),
backing_filename)
volume_path = os.path.join(
self._local_volume_dir(snapshot.volume),
snapshot.volume.name)
info = self._qemu_img_info(backing_path_full_path,
snapshot.volume.name)
backing_fmt = info.file_format
obj_context = snapshot.volume.obj_context
# create new qcow2 file
if snapshot.volume.encryption_key_id is None:
command = ['qemu-img', 'create', '-f', 'qcow2', '-o',
'backing_file=%s,backing_fmt=%s' %
(backing_path_full_path, backing_fmt),
new_snap_path,
"%dG" % snapshot.volume.size]
self._execute(*command, run_as_root=self._execute_as_root)
command = ['qemu-img', 'rebase', '-u',
'-b', backing_filename,
'-F', backing_fmt,
new_snap_path]
# qemu-img rebase must run as root for the same reasons as above
self._execute(*command, run_as_root=self._execute_as_root)
else:
# encrypted
keymgr = key_manager.API(CONF)
# Get key for the source volume using the context of this request.
key = keymgr.get(obj_context,
snapshot.volume.encryption_key_id)
passphrase = binascii.hexlify(key.get_encoded()).decode('utf-8')
tmp_dir = volume_utils.image_conversion_dir()
with tempfile.NamedTemporaryFile(dir=tmp_dir) as tmp_key:
with open(tmp_key.name, 'w') as f:
f.write(passphrase)
file_json_dict = {"driver": "qcow2",
"encrypt.key-secret": "s0",
"backing.encrypt.key-secret": "s0",
"backing.file.filename": volume_path,
"file": {"driver": "file",
"filename": backing_path_full_path,
}}
file_json = jsonutils.dumps(file_json_dict)
encryption = volume_utils.check_encryption_provider(
db=db,
volume=snapshot.volume,
context=obj_context)
cipher_spec = image_utils.decode_cipher(encryption['cipher'],
encryption['key_size'])
command = ('qemu-img', 'create', '-f' 'qcow2',
'-o', 'encrypt.format=luks,encrypt.key-secret=s1,'
'encrypt.cipher-alg=%(cipher_alg)s,'
'encrypt.cipher-mode=%(cipher_mode)s,'
'encrypt.ivgen-alg=%(ivgen_alg)s' % cipher_spec,
'-b', 'json:' + file_json,
'--object', 'secret,id=s0,file=' + tmp_key.name,
'--object', 'secret,id=s1,file=' + tmp_key.name,
new_snap_path)
self._execute(*command, run_as_root=self._execute_as_root)
command_path = 'encrypt.key-secret=s0,file.filename='
command = ['qemu-img', 'rebase',
'--object', 'secret,id=s0,file=' + tmp_key.name,
'--image-opts',
command_path + new_snap_path,
'-u',
'-b', backing_filename,
'-F', backing_fmt]
# qemu-img rebase must run as root for the same reasons as
# above
self._execute(*command, run_as_root=self._execute_as_root)
self._set_rw_permissions(new_snap_path)
# if in secure mode, chown new file
if self.secure_file_operations_enabled():
ref_file = backing_path_full_path
log_msg = 'Setting permissions: %(file)s -> %(user)s:%(group)s' % {
'file': ref_file, 'user': os.stat(ref_file).st_uid,
'group': os.stat(ref_file).st_gid}
LOG.debug(log_msg)
command = ['chown',
'--reference=%s' % ref_file,
new_snap_path]
self._execute(*command, run_as_root=self._execute_as_root)
def _create_snapshot(self, snapshot):
"""Create a snapshot.
If volume is attached, call to Nova to create snapshot, providing a
qcow2 file. Cinder creates and deletes qcow2 files, but Nova is
responsible for transitioning the VM between them and handling live
transfers of data between files as required.
If volume is detached, create locally with qemu-img. Cinder handles
manipulation of qcow2 files.
A file named volume-<uuid>.info is stored with the volume
data and is a JSON table which contains a mapping between
Cinder snapshot UUIDs and filenames, as these associations
will change as snapshots are deleted.
Basic snapshot operation:
1. Initial volume file:
volume-1234
2. Snapshot created:
volume-1234 <- volume-1234.aaaa
volume-1234.aaaa becomes the new "active" disk image.
If the volume is not attached, this filename will be used to
attach the volume to a VM at volume-attach time.
If the volume is attached, the VM will switch to this file as
part of the snapshot process.
Note that volume-1234.aaaa represents changes after snapshot
'aaaa' was created. So the data for snapshot 'aaaa' is actually
in the backing file(s) of volume-1234.aaaa.
This file has a qcow2 header recording the fact that volume-1234 is
its backing file. Delta changes since the snapshot was created are
stored in this file, and the backing file (volume-1234) does not
change.
info file: { 'active': 'volume-1234.aaaa',
'aaaa': 'volume-1234.aaaa' }
3. Second snapshot created:
volume-1234 <- volume-1234.aaaa <- volume-1234.bbbb
volume-1234.bbbb now becomes the "active" disk image, recording
changes made to the volume.
info file: { 'active': 'volume-1234.bbbb', (* changed!)
'aaaa': 'volume-1234.aaaa',
'bbbb': 'volume-1234.bbbb' } (* added!)
4. Snapshot deletion when volume is attached ('in-use' state):
* When first snapshot is deleted, Cinder calls Nova for online
snapshot deletion. Nova deletes snapshot with id "aaaa" and
makes snapshot with id "bbbb" point to the base image.
Snapshot with id "bbbb" is the active image.
volume-1234 <- volume-1234.bbbb
info file: { 'active': 'volume-1234.bbbb',
'bbbb': 'volume-1234.bbbb'
}
* When second snapshot is deleted, Cinder calls Nova for online
snapshot deletion. Nova deletes snapshot with id "bbbb" by
pulling volume-1234's data into volume-1234.bbbb. This
(logically) removes snapshot with id "bbbb" and the active
file remains the same.
volume-1234.bbbb
info file: { 'active': 'volume-1234.bbbb' }
TODO (deepakcs): Change this once Nova supports blockCommit for
in-use volumes.
5. Snapshot deletion when volume is detached ('available' state):
* When first snapshot is deleted, Cinder does the snapshot
deletion. volume-1234.aaaa is removed from the snapshot chain.
The data from it is merged into its parent.
volume-1234.bbbb is rebased, having volume-1234 as its new
parent.
volume-1234 <- volume-1234.bbbb
info file: { 'active': 'volume-1234.bbbb',
'bbbb': 'volume-1234.bbbb'
}
* When second snapshot is deleted, Cinder does the snapshot
deletion. volume-1234.aaaa is removed from the snapshot chain.
The base image, volume-1234 becomes the active image for this
volume again.
volume-1234
info file: { 'active': 'volume-1234' } (* changed!)
"""
LOG.debug('Creating %(type)s snapshot %(snap)s of volume %(vol)s',
{'snap': snapshot.id, 'vol': snapshot.volume.id,
'type': ('online'
if self._is_volume_attached(snapshot.volume)
else 'offline')})
status = snapshot.volume.status
acceptable_states = ['available', 'in-use', 'backing-up']
if (snapshot.display_name and
snapshot.display_name.startswith('tmp-snap-')):
# This is an internal volume snapshot. In order to support
# image caching, we'll allow creating/deleting such snapshots
# while having volumes in 'downloading' state.
acceptable_states.append('downloading')
self._validate_state(status, acceptable_states)
info_path = self._local_path_volume_info(snapshot.volume)
snap_info = self._read_info_file(info_path, empty_if_missing=True)
backing_filename = self.get_active_image_from_info(
snapshot.volume)
new_snap_path = self._get_new_snap_path(snapshot)
if self._is_volume_attached(snapshot.volume):
self._create_snapshot_online(snapshot,
backing_filename,
new_snap_path)
else:
self._do_create_snapshot(snapshot,
backing_filename,
new_snap_path)
snap_info['active'] = os.path.basename(new_snap_path)
snap_info[snapshot.id] = os.path.basename(new_snap_path)
self._write_info_file(info_path, snap_info)
def _create_snapshot_online(self, snapshot, backing_filename,
new_snap_path):
# Perform online snapshot via Nova
self._do_create_snapshot(snapshot,
backing_filename,
new_snap_path)
connection_info = {
'type': 'qcow2',
'new_file': os.path.basename(new_snap_path),
'snapshot_id': snapshot.id
}
try:
result = self._nova.create_volume_snapshot(
snapshot.obj_context,
snapshot.volume_id,
connection_info)
LOG.debug('nova call result: %s', result)
except Exception:
LOG.exception('Call to Nova to create snapshot failed')
raise
# Loop and wait for result
# Nova will call Cinderclient to update the status in the database
# An update of progress = '90%' means that Nova is done
seconds_elapsed = 0
increment = 1
timeout = 600
while True:
s = db.snapshot_get(snapshot.obj_context, snapshot.id)
LOG.debug('Status of snapshot %(id)s is now %(status)s',
{'id': snapshot['id'],
'status': s['status']})
if s['status'] == fields.SnapshotStatus.CREATING:
if s['progress'] == '90%':
# Nova tasks completed successfully
break
time.sleep(increment)
seconds_elapsed += increment
elif s['status'] == fields.SnapshotStatus.ERROR:
msg = _('Nova returned "error" status '
'while creating snapshot.')
raise exception.RemoteFSException(msg)
elif (s['status'] == fields.SnapshotStatus.DELETING or
s['status'] == fields.SnapshotStatus.ERROR_DELETING):
msg = _('Snapshot %(id)s has been asked to be deleted while '
'waiting for it to become available. Perhaps a '
'concurrent request was made.') % {'id':
snapshot.id}
raise exception.RemoteFSConcurrentRequest(msg)
if 10 < seconds_elapsed <= 20:
increment = 2
elif 20 < seconds_elapsed <= 60:
increment = 5
elif 60 < seconds_elapsed:
increment = 10
if seconds_elapsed > timeout:
msg = _('Timed out while waiting for Nova update '
'for creation of snapshot %s.') % snapshot.id
raise exception.RemoteFSException(msg)
def _delete_snapshot_online(self, context, snapshot, info):
# Update info over the course of this method
# active file never changes
info_path = self._local_path_volume_info(snapshot.volume)
snap_info = self._read_info_file(info_path)
update_format = False
if utils.paths_normcase_equal(info['active_file'],
info['snapshot_file']):
# blockRebase/Pull base into active
# info['base'] => snapshot_file
file_to_delete = info['base_file']
if info['base_id'] is None:
# Passing base=none to blockRebase ensures that
# libvirt blanks out the qcow2 backing file pointer
new_base = None
else:
new_base = info['new_base_file']
snap_info[info['base_id']] = info['snapshot_file']
delete_info = {'file_to_merge': new_base,
'merge_target_file': None, # current
'type': 'qcow2',
'volume_id': snapshot.volume.id}
del(snap_info[snapshot.id])
update_format = True
else:
# blockCommit snapshot into base
# info['base'] <= snapshot_file
# delete record of snapshot
file_to_delete = info['snapshot_file']
delete_info = {'file_to_merge': info['snapshot_file'],
'merge_target_file': info['base_file'],
'type': 'qcow2',
'volume_id': snapshot.volume.id}
del(snap_info[snapshot.id])
self._nova_assisted_vol_snap_delete(context, snapshot, delete_info)
if update_format:
snapshot.volume.admin_metadata['format'] = 'qcow2'
with snapshot.volume.obj_as_admin():
snapshot.volume.save()
# Write info file updated above
self._write_info_file(info_path, snap_info)
# Delete stale file
path_to_delete = os.path.join(
self._local_volume_dir(snapshot.volume), file_to_delete)
self._delete(path_to_delete)
def _nova_assisted_vol_snap_delete(self, context, snapshot, delete_info):
try:
self._nova.delete_volume_snapshot(
context,
snapshot.id,
delete_info)
except Exception:
LOG.exception('Call to Nova delete snapshot failed')
raise
# Loop and wait for result
# Nova will call Cinderclient to update the status in the database
# An update of progress = '90%' means that Nova is done
seconds_elapsed = 0
increment = 1
timeout = 7200
while True:
s = db.snapshot_get(context, snapshot.id)
if s['status'] == fields.SnapshotStatus.DELETING:
if s['progress'] == '90%':
# Nova tasks completed successfully
break
else:
LOG.debug('status of snapshot %s is still "deleting"... '
'waiting', snapshot.id)
time.sleep(increment)
seconds_elapsed += increment
else:
msg = _('Unable to delete snapshot %(id)s, '
'status: %(status)s.') % {'id': snapshot.id,
'status': s['status']}
raise exception.RemoteFSException(msg)
if 10 < seconds_elapsed <= 20:
increment = 2
elif 20 < seconds_elapsed <= 60:
increment = 5
elif 60 < seconds_elapsed:
increment = 10
if seconds_elapsed > timeout:
msg = _('Timed out while waiting for Nova update '
'for deletion of snapshot %(id)s.') %\
{'id': snapshot.id}
raise exception.RemoteFSException(msg)
def _extend_volume(self, volume, size_gb):
raise NotImplementedError()
def _revert_to_snapshot(self, context, volume, snapshot):
raise NotImplementedError()
class RemoteFSSnapDriver(RemoteFSSnapDriverBase):
@locked_volume_id_operation
def create_snapshot(self, snapshot):
"""Apply locking to the create snapshot operation."""
return self._create_snapshot(snapshot)
@locked_volume_id_operation
def delete_snapshot(self, snapshot):
"""Apply locking to the delete snapshot operation."""
return self._delete_snapshot(snapshot)
@locked_volume_id_operation
def create_volume_from_snapshot(self, volume, snapshot):
return self._create_volume_from_snapshot(volume, snapshot)
# TODO: should be locking on src_vref id -- bug #1852449
@locked_volume_id_operation
def create_cloned_volume(self, volume, src_vref):
"""Creates a clone of the specified volume."""
return self._create_cloned_volume(volume, src_vref,
src_vref.obj_context)
@locked_volume_id_operation
def copy_volume_to_image(self, context, volume, image_service, image_meta):
"""Copy the volume to the specified image."""
return self._copy_volume_to_image(context, volume, image_service,
image_meta)
@locked_volume_id_operation
def extend_volume(self, volume, size_gb):
return self._extend_volume(volume, size_gb)
@locked_volume_id_operation
def revert_to_snapshot(self, context, volume, snapshot):
"""Revert to specified snapshot."""
return self._revert_to_snapshot(context, volume, snapshot)
class RemoteFSSnapDriverDistributed(RemoteFSSnapDriverBase):
@coordination.synchronized('{self.driver_prefix}-{snapshot.volume.id}')
def create_snapshot(self, snapshot):
"""Apply locking to the create snapshot operation."""
return self._create_snapshot(snapshot)
@coordination.synchronized('{self.driver_prefix}-{snapshot.volume.id}')
def delete_snapshot(self, snapshot):
"""Apply locking to the delete snapshot operation."""
return self._delete_snapshot(snapshot)
@coordination.synchronized('{self.driver_prefix}-{volume.id}')
def create_volume_from_snapshot(self, volume, snapshot):
return self._create_volume_from_snapshot(volume, snapshot)
# lock the source volume id first
@coordination.synchronized('{self.driver_prefix}-{src_vref.id}')
@coordination.synchronized('{self.driver_prefix}-{volume.id}')
def create_cloned_volume(self, volume, src_vref):
"""Creates a clone of the specified volume."""
return self._create_cloned_volume(volume, src_vref,
src_vref.obj_context)
@coordination.synchronized('{self.driver_prefix}-{volume.id}')
def copy_volume_to_image(self, context, volume, image_service, image_meta):
"""Copy the volume to the specified image."""
return self._copy_volume_to_image(context, volume, image_service,
image_meta)
@coordination.synchronized('{self.driver_prefix}-{volume.id}')
def extend_volume(self, volume, size_gb):
return self._extend_volume(volume, size_gb)
@coordination.synchronized('{self.driver_prefix}-{volume.id}')
def revert_to_snapshot(self, context, volume, snapshot):
"""Revert to specified snapshot."""
return self._revert_to_snapshot(context, volume, snapshot)
class RemoteFSPoolMixin(object):
"""Drivers inheriting this will report each share as a pool."""
def _find_share(self, volume):
# We let the scheduler choose a pool for us.
pool_name = self._get_pool_name_from_volume(volume)
share = self._get_share_from_pool_name(pool_name)
return share
def _get_pool_name_from_volume(self, volume):
pool_name = volume_utils.extract_host(volume['host'],
level='pool')
return pool_name
def _get_pool_name_from_share(self, share):
raise NotImplementedError()
def _get_share_from_pool_name(self, pool_name):
# To be implemented by drivers using pools.
raise NotImplementedError()
def _update_volume_stats(self):
data = {}
pools = []
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or self.volume_backend_name
data['vendor_name'] = self.vendor_name
data['driver_version'] = self.get_version()
data['storage_protocol'] = self.driver_volume_type
self._ensure_shares_mounted()
for share in self._mounted_shares:
(share_capacity,
share_free,
total_allocated) = self._get_capacity_info(share)
pool = {'pool_name': self._get_pool_name_from_share(share),
'total_capacity_gb': share_capacity / float(units.Gi),
'free_capacity_gb': share_free / float(units.Gi),
'provisioned_capacity_gb': (
total_allocated / float(units.Gi)),
'reserved_percentage': (
self.configuration.reserved_percentage),
'max_over_subscription_ratio': (
self.configuration.max_over_subscription_ratio),
'thin_provisioning_support': (
self._thin_provisioning_support),
'thick_provisioning_support': (
self._thick_provisioning_support),
'QoS_support': False,
}
pools.append(pool)
data['total_capacity_gb'] = 0
data['free_capacity_gb'] = 0
data['pools'] = pools
self._stats = data
class RevertToSnapshotMixin(object):
def _revert_to_snapshot(self, context, volume, snapshot):
"""Revert a volume to specified snapshot
The volume must not be attached. Only the latest snapshot
can be used.
"""
status = snapshot.volume.status
acceptable_states = ['available', 'reverting']
self._validate_state(status, acceptable_states)
LOG.debug('Reverting volume %(vol)s to snapshot %(snap)s',
{'vol': snapshot.volume.id, 'snap': snapshot.id})
info_path = self._local_path_volume_info(snapshot.volume)
snap_info = self._read_info_file(info_path)
snapshot_file = snap_info[snapshot.id]
active_file = snap_info['active']
if not utils.paths_normcase_equal(snapshot_file, active_file):
msg = _("Could not revert volume '%(volume_id)s' to snapshot "
"'%(snapshot_id)s' as it does not "
"appear to be the latest snapshot. Current active "
"image: %(active_file)s.")
raise exception.InvalidSnapshot(
msg % dict(snapshot_id=snapshot.id,
active_file=active_file,
volume_id=volume.id))
snapshot_path = os.path.join(
self._local_volume_dir(snapshot.volume), snapshot_file)
backing_filename = self._qemu_img_info(
snapshot_path, volume.name).backing_file
# We revert the volume to the latest snapshot by recreating the top
# image from the chain.
# This workflow should work with most (if not all) drivers inheriting
# this class.
self._delete(snapshot_path)
self._do_create_snapshot(snapshot, backing_filename, snapshot_path)
class RemoteFSManageableVolumesMixin(object):
_SUPPORTED_IMAGE_FORMATS = ['raw', 'qcow2']
_MANAGEABLE_IMAGE_RE = None
def _get_manageable_vol_location(self, existing_ref):
if 'source-name' not in existing_ref:
reason = _('The existing volume reference '
'must contain "source-name".')
raise exception.ManageExistingInvalidReference(
existing_ref=existing_ref, reason=reason)
vol_remote_path = os.path.normcase(
os.path.normpath(existing_ref['source-name']))
for mounted_share in self._mounted_shares:
# We don't currently attempt to resolve hostnames. This could
# be troublesome for some distributed shares, which may have
# hostnames resolving to multiple addresses.
norm_share = os.path.normcase(os.path.normpath(mounted_share))
head, match, share_rel_path = vol_remote_path.partition(norm_share)
if not (match and share_rel_path.startswith(os.path.sep)):
continue
mountpoint = self._get_mount_point_for_share(mounted_share)
vol_local_path = os.path.join(mountpoint,
share_rel_path.lstrip(os.path.sep))
LOG.debug("Found mounted share referenced by %s.",
vol_remote_path)
if os.path.isfile(vol_local_path):
LOG.debug("Found volume %(path)s on share %(share)s.",
dict(path=vol_local_path, share=mounted_share))
return dict(share=mounted_share,
mountpoint=mountpoint,
vol_local_path=vol_local_path,
vol_remote_path=vol_remote_path)
else:
LOG.error("Could not find volume %s on the "
"specified share.", vol_remote_path)
break
raise exception.ManageExistingInvalidReference(
existing_ref=existing_ref, reason=_('Volume not found.'))
def _get_managed_vol_expected_path(self, volume, volume_location):
# This may be overridden by the drivers.
return os.path.join(volume_location['mountpoint'],
volume.name)
def _is_volume_manageable(self, volume_path, already_managed=False):
unmanageable_reason = None
if already_managed:
return False, _('Volume already managed.')
try:
img_info = self._qemu_img_info(volume_path, volume_name=None)
except exception.RemoteFSInvalidBackingFile:
return False, _("Backing file present.")
except Exception:
return False, _("Failed to open image.")
# We're double checking as some drivers do not validate backing
# files through '_qemu_img_info'.
if img_info.backing_file:
return False, _("Backing file present.")
if img_info.file_format not in self._SUPPORTED_IMAGE_FORMATS:
unmanageable_reason = _(
"Unsupported image format: '%s'.") % img_info.file_format
return False, unmanageable_reason
return True, None
def manage_existing(self, volume, existing_ref):
LOG.info('Managing volume %(volume_id)s with ref %(ref)s',
{'volume_id': volume.id, 'ref': existing_ref})
vol_location = self._get_manageable_vol_location(existing_ref)
vol_local_path = vol_location['vol_local_path']
manageable, unmanageable_reason = self._is_volume_manageable(
vol_local_path)
if not manageable:
raise exception.ManageExistingInvalidReference(
existing_ref=existing_ref, reason=unmanageable_reason)
expected_vol_path = self._get_managed_vol_expected_path(
volume, vol_location)
self._set_rw_permissions(vol_local_path)
# This should be the last thing we do.
if expected_vol_path != vol_local_path:
LOG.info("Renaming imported volume image %(src)s to %(dest)s",
dict(src=vol_location['vol_local_path'],
dest=expected_vol_path))
os.rename(vol_location['vol_local_path'],
expected_vol_path)
return {'provider_location': vol_location['share']}
def _get_rounded_manageable_image_size(self, image_path):
image_size = image_utils.qemu_img_info(
image_path, run_as_root=self._execute_as_root).virtual_size
return int(math.ceil(float(image_size) / units.Gi))
def manage_existing_get_size(self, volume, existing_ref):
vol_location = self._get_manageable_vol_location(existing_ref)
volume_path = vol_location['vol_local_path']
return self._get_rounded_manageable_image_size(volume_path)
def unmanage(self, volume):
pass
def _get_manageable_volume(self, share, volume_path, managed_volume=None):
manageable, unmanageable_reason = self._is_volume_manageable(
volume_path, already_managed=managed_volume is not None)
size_gb = None
if managed_volume:
# We may not be able to query in-use images.
size_gb = managed_volume.size
else:
try:
size_gb = self._get_rounded_manageable_image_size(volume_path)
except Exception:
manageable = False
unmanageable_reason = (unmanageable_reason or
_("Failed to get size."))
mountpoint = self._get_mount_point_for_share(share)
norm_mountpoint = os.path.normcase(os.path.normpath(mountpoint))
norm_vol_path = os.path.normcase(os.path.normpath(volume_path))
ref = norm_vol_path.replace(norm_mountpoint, share).replace('\\', '/')
manageable_volume = {
'reference': {'source-name': ref},
'size': size_gb,
'safe_to_manage': manageable,
'reason_not_safe': unmanageable_reason,
'cinder_id': managed_volume.id if managed_volume else None,
'extra_info': None,
}
return manageable_volume
def _get_share_manageable_volumes(self, share, managed_volumes):
manageable_volumes = []
mount_path = self._get_mount_point_for_share(share)
for dir_path, dir_names, file_names in os.walk(mount_path):
for file_name in file_names:
file_name = os.path.normcase(file_name)
img_path = os.path.join(dir_path, file_name)
# In the future, we may have the regex filtering images
# as a config option.
if (not self._MANAGEABLE_IMAGE_RE or
self._MANAGEABLE_IMAGE_RE.match(file_name)):
managed_volume = managed_volumes.get(
os.path.splitext(file_name)[0])
try:
manageable_volume = self._get_manageable_volume(
share, img_path, managed_volume)
manageable_volumes.append(manageable_volume)
except Exception as exc:
LOG.error(
"Failed to get manageable volume info: "
"'%(image_path)s'. Exception: %(exc)s.",
dict(image_path=img_path, exc=exc))
return manageable_volumes
def get_manageable_volumes(self, cinder_volumes, marker, limit, offset,
sort_keys, sort_dirs):
manageable_volumes = []
managed_volumes = {vol.name: vol for vol in cinder_volumes}
for share in self._mounted_shares:
try:
manageable_volumes += self._get_share_manageable_volumes(
share, managed_volumes)
except Exception as exc:
LOG.error("Failed to get manageable volumes for "
"share %(share)s. Exception: %(exc)s.",
dict(share=share, exc=exc))
return volume_utils.paginate_entries_list(
manageable_volumes, marker, limit, offset, sort_keys, sort_dirs)
| apache-2.0 |
galtys/odoo | addons/document/test_cindex.py | 444 | 1553 | #!/usr/bin/python
import sys
import os
import glob
import time
import logging
from optparse import OptionParser
logging.basicConfig(level=logging.DEBUG)
parser = OptionParser()
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose", default=True,
help="don't print status messages to stdout")
parser.add_option("-C", "--content",
action="store_true", dest="docontent", default=False,
help="Disect content, rather than the file.")
parser.add_option("--delay",
action="store_true", dest="delay", default=False,
help="delay after the operation, to inspect child processes")
(options, args) = parser.parse_args()
import content_index, std_index
from content_index import cntIndex
for fname in args:
try:
if options.docontent:
fp = open(fname,'rb')
content = fp.read()
fp.close()
res = cntIndex.doIndex(content, fname, None, None, True)
else:
res = cntIndex.doIndex(None, fname, None, fname,True)
if options.verbose:
for line in res[:5]:
print line
if options.delay:
time.sleep(30)
except Exception,e:
import traceback
tb_s = reduce(lambda x, y: x+y, traceback.format_exception( sys.exc_type, sys.exc_value, sys.exc_traceback))
except KeyboardInterrupt:
print "Keyboard interrupt"
#eof
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
kantai/passe-framework-prototype | django/contrib/flatpages/tests/forms.py | 155 | 1271 | from django.conf import settings
from django.contrib.flatpages.admin import FlatpageForm
from django.test import TestCase
class FlatpageAdminFormTests(TestCase):
def setUp(self):
self.form_data = {
'title': "A test page",
'content': "This is a test",
'sites': [settings.SITE_ID],
}
def test_flatpage_admin_form_url_validation(self):
"The flatpage admin form validates correctly validates urls"
self.assertTrue(FlatpageForm(data=dict(url='/new_flatpage/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url='/some.special~chars/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url='/some.very_special~chars-here/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a space/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a % char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a ! char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a & char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a ? char/', **self.form_data)).is_valid())
| bsd-3-clause |
insomnia-lab/calibre | src/calibre/ebooks/markdown/blockprocessors.py | 47 | 22371 | """
CORE MARKDOWN BLOCKPARSER
===========================================================================
This parser handles basic parsing of Markdown blocks. It doesn't concern itself
with inline elements such as **bold** or *italics*, but rather just catches
blocks, lists, quotes, etc.
The BlockParser is made up of a bunch of BlockProssors, each handling a
different type of block. Extensions may add/replace/remove BlockProcessors
as they need to alter how markdown blocks are parsed.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import logging
import re
from . import util
from .blockparser import BlockParser
logger = logging.getLogger('MARKDOWN')
def build_block_parser(md_instance, **kwargs):
""" Build the default block parser used by Markdown. """
parser = BlockParser(md_instance)
parser.blockprocessors['empty'] = EmptyBlockProcessor(parser)
parser.blockprocessors['indent'] = ListIndentProcessor(parser)
parser.blockprocessors['code'] = CodeBlockProcessor(parser)
parser.blockprocessors['hashheader'] = HashHeaderProcessor(parser)
parser.blockprocessors['setextheader'] = SetextHeaderProcessor(parser)
parser.blockprocessors['hr'] = HRProcessor(parser)
parser.blockprocessors['olist'] = OListProcessor(parser)
parser.blockprocessors['ulist'] = UListProcessor(parser)
parser.blockprocessors['quote'] = BlockQuoteProcessor(parser)
parser.blockprocessors['paragraph'] = ParagraphProcessor(parser)
return parser
class BlockProcessor:
""" Base class for block processors.
Each subclass will provide the methods below to work with the source and
tree. Each processor will need to define it's own ``test`` and ``run``
methods. The ``test`` method should return True or False, to indicate
whether the current block should be processed by this processor. If the
test passes, the parser will call the processors ``run`` method.
"""
def __init__(self, parser):
self.parser = parser
self.tab_length = parser.markdown.tab_length
def lastChild(self, parent):
""" Return the last child of an etree element. """
if len(parent):
return parent[-1]
else:
return None
def detab(self, text):
""" Remove a tab from the front of each line of the given text. """
newtext = []
lines = text.split('\n')
for line in lines:
if line.startswith(' '*self.tab_length):
newtext.append(line[self.tab_length:])
elif not line.strip():
newtext.append('')
else:
break
return '\n'.join(newtext), '\n'.join(lines[len(newtext):])
def looseDetab(self, text, level=1):
""" Remove a tab from front of lines but allowing dedented lines. """
lines = text.split('\n')
for i in range(len(lines)):
if lines[i].startswith(' '*self.tab_length*level):
lines[i] = lines[i][self.tab_length*level:]
return '\n'.join(lines)
def test(self, parent, block):
""" Test for block type. Must be overridden by subclasses.
As the parser loops through processors, it will call the ``test`` method
on each to determine if the given block of text is of that type. This
method must return a boolean ``True`` or ``False``. The actual method of
testing is left to the needs of that particular block type. It could
be as simple as ``block.startswith(some_string)`` or a complex regular
expression. As the block type may be different depending on the parent
of the block (i.e. inside a list), the parent etree element is also
provided and may be used as part of the test.
Keywords:
* ``parent``: A etree element which will be the parent of the block.
* ``block``: A block of text from the source which has been split at
blank lines.
"""
pass
def run(self, parent, blocks):
""" Run processor. Must be overridden by subclasses.
When the parser determines the appropriate type of a block, the parser
will call the corresponding processor's ``run`` method. This method
should parse the individual lines of the block and append them to
the etree.
Note that both the ``parent`` and ``etree`` keywords are pointers
to instances of the objects which should be edited in place. Each
processor must make changes to the existing objects as there is no
mechanism to return new/different objects to replace them.
This means that this method should be adding SubElements or adding text
to the parent, and should remove (``pop``) or add (``insert``) items to
the list of blocks.
Keywords:
* ``parent``: A etree element which is the parent of the current block.
* ``blocks``: A list of all remaining blocks of the document.
"""
pass
class ListIndentProcessor(BlockProcessor):
""" Process children of list items.
Example:
* a list item
process this part
or this part
"""
ITEM_TYPES = ['li']
LIST_TYPES = ['ul', 'ol']
def __init__(self, *args):
BlockProcessor.__init__(self, *args)
self.INDENT_RE = re.compile(r'^(([ ]{%s})+)'% self.tab_length)
def test(self, parent, block):
return block.startswith(' '*self.tab_length) and \
not self.parser.state.isstate('detabbed') and \
(parent.tag in self.ITEM_TYPES or \
(len(parent) and parent[-1] and \
(parent[-1].tag in self.LIST_TYPES)
)
)
def run(self, parent, blocks):
block = blocks.pop(0)
level, sibling = self.get_level(parent, block)
block = self.looseDetab(block, level)
self.parser.state.set('detabbed')
if parent.tag in self.ITEM_TYPES:
# It's possible that this parent has a 'ul' or 'ol' child list
# with a member. If that is the case, then that should be the
# parent. This is intended to catch the edge case of an indented
# list whose first member was parsed previous to this point
# see OListProcessor
if len(parent) and parent[-1].tag in self.LIST_TYPES:
self.parser.parseBlocks(parent[-1], [block])
else:
# The parent is already a li. Just parse the child block.
self.parser.parseBlocks(parent, [block])
elif sibling.tag in self.ITEM_TYPES:
# The sibling is a li. Use it as parent.
self.parser.parseBlocks(sibling, [block])
elif len(sibling) and sibling[-1].tag in self.ITEM_TYPES:
# The parent is a list (``ol`` or ``ul``) which has children.
# Assume the last child li is the parent of this block.
if sibling[-1].text:
# If the parent li has text, that text needs to be moved to a p
# The p must be 'inserted' at beginning of list in the event
# that other children already exist i.e.; a nested sublist.
p = util.etree.Element('p')
p.text = sibling[-1].text
sibling[-1].text = ''
sibling[-1].insert(0, p)
self.parser.parseChunk(sibling[-1], block)
else:
self.create_item(sibling, block)
self.parser.state.reset()
def create_item(self, parent, block):
""" Create a new li and parse the block with it as the parent. """
li = util.etree.SubElement(parent, 'li')
self.parser.parseBlocks(li, [block])
def get_level(self, parent, block):
""" Get level of indent based on list level. """
# Get indent level
m = self.INDENT_RE.match(block)
if m:
indent_level = len(m.group(1))/self.tab_length
else:
indent_level = 0
if self.parser.state.isstate('list'):
# We're in a tightlist - so we already are at correct parent.
level = 1
else:
# We're in a looselist - so we need to find parent.
level = 0
# Step through children of tree to find matching indent level.
while indent_level > level:
child = self.lastChild(parent)
if child and (child.tag in self.LIST_TYPES or child.tag in self.ITEM_TYPES):
if child.tag in self.LIST_TYPES:
level += 1
parent = child
else:
# No more child levels. If we're short of indent_level,
# we have a code block. So we stop here.
break
return level, parent
class CodeBlockProcessor(BlockProcessor):
""" Process code blocks. """
def test(self, parent, block):
return block.startswith(' '*self.tab_length)
def run(self, parent, blocks):
sibling = self.lastChild(parent)
block = blocks.pop(0)
theRest = ''
if sibling and sibling.tag == "pre" and len(sibling) \
and sibling[0].tag == "code":
# The previous block was a code block. As blank lines do not start
# new code blocks, append this block to the previous, adding back
# linebreaks removed from the split into a list.
code = sibling[0]
block, theRest = self.detab(block)
code.text = util.AtomicString('%s\n%s\n' % (code.text, block.rstrip()))
else:
# This is a new codeblock. Create the elements and insert text.
pre = util.etree.SubElement(parent, 'pre')
code = util.etree.SubElement(pre, 'code')
block, theRest = self.detab(block)
code.text = util.AtomicString('%s\n' % block.rstrip())
if theRest:
# This block contained unindented line(s) after the first indented
# line. Insert these lines as the first block of the master blocks
# list for future processing.
blocks.insert(0, theRest)
class BlockQuoteProcessor(BlockProcessor):
RE = re.compile(r'(^|\n)[ ]{0,3}>[ ]?(.*)')
def test(self, parent, block):
return bool(self.RE.search(block))
def run(self, parent, blocks):
block = blocks.pop(0)
m = self.RE.search(block)
if m:
before = block[:m.start()] # Lines before blockquote
# Pass lines before blockquote in recursively for parsing forst.
self.parser.parseBlocks(parent, [before])
# Remove ``> `` from begining of each line.
block = '\n'.join([self.clean(line) for line in
block[m.start():].split('\n')])
sibling = self.lastChild(parent)
if sibling and sibling.tag == "blockquote":
# Previous block was a blockquote so set that as this blocks parent
quote = sibling
else:
# This is a new blockquote. Create a new parent element.
quote = util.etree.SubElement(parent, 'blockquote')
# Recursively parse block with blockquote as parent.
# change parser state so blockquotes embedded in lists use p tags
self.parser.state.set('blockquote')
self.parser.parseChunk(quote, block)
self.parser.state.reset()
def clean(self, line):
""" Remove ``>`` from beginning of a line. """
m = self.RE.match(line)
if line.strip() == ">":
return ""
elif m:
return m.group(2)
else:
return line
class OListProcessor(BlockProcessor):
""" Process ordered list blocks. """
TAG = 'ol'
# Detect an item (``1. item``). ``group(1)`` contains contents of item.
RE = re.compile(r'^[ ]{0,3}\d+\.[ ]+(.*)')
# Detect items on secondary lines. they can be of either list type.
CHILD_RE = re.compile(r'^[ ]{0,3}((\d+\.)|[*+-])[ ]+(.*)')
# Detect indented (nested) items of either type
INDENT_RE = re.compile(r'^[ ]{4,7}((\d+\.)|[*+-])[ ]+.*')
# The integer (python string) with which the lists starts (default=1)
# Eg: If list is intialized as)
# 3. Item
# The ol tag will get starts="3" attribute
STARTSWITH = '1'
# List of allowed sibling tags.
SIBLING_TAGS = ['ol', 'ul']
def test(self, parent, block):
return bool(self.RE.match(block))
def run(self, parent, blocks):
# Check fr multiple items in one block.
items = self.get_items(blocks.pop(0))
sibling = self.lastChild(parent)
if sibling and sibling.tag in self.SIBLING_TAGS:
# Previous block was a list item, so set that as parent
lst = sibling
# make sure previous item is in a p- if the item has text, then it
# it isn't in a p
if lst[-1].text:
# since it's possible there are other children for this sibling,
# we can't just SubElement the p, we need to insert it as the
# first item
p = util.etree.Element('p')
p.text = lst[-1].text
lst[-1].text = ''
lst[-1].insert(0, p)
# if the last item has a tail, then the tail needs to be put in a p
# likely only when a header is not followed by a blank line
lch = self.lastChild(lst[-1])
if lch is not None and lch.tail:
p = util.etree.SubElement(lst[-1], 'p')
p.text = lch.tail.lstrip()
lch.tail = ''
# parse first block differently as it gets wrapped in a p.
li = util.etree.SubElement(lst, 'li')
self.parser.state.set('looselist')
firstitem = items.pop(0)
self.parser.parseBlocks(li, [firstitem])
self.parser.state.reset()
elif parent.tag in ['ol', 'ul']:
# this catches the edge case of a multi-item indented list whose
# first item is in a blank parent-list item:
# * * subitem1
# * subitem2
# see also ListIndentProcessor
lst = parent
else:
# This is a new list so create parent with appropriate tag.
lst = util.etree.SubElement(parent, self.TAG)
# Check if a custom start integer is set
if not self.parser.markdown.lazy_ol and self.STARTSWITH !='1':
lst.attrib['start'] = self.STARTSWITH
self.parser.state.set('list')
# Loop through items in block, recursively parsing each with the
# appropriate parent.
for item in items:
if item.startswith(' '*self.tab_length):
# Item is indented. Parse with last item as parent
self.parser.parseBlocks(lst[-1], [item])
else:
# New item. Create li and parse with it as parent
li = util.etree.SubElement(lst, 'li')
self.parser.parseBlocks(li, [item])
self.parser.state.reset()
def get_items(self, block):
""" Break a block into list items. """
items = []
for line in block.split('\n'):
m = self.CHILD_RE.match(line)
if m:
# This is a new list item
# Check first item for the start index
if not items and self.TAG=='ol':
# Detect the integer value of first list item
INTEGER_RE = re.compile('(\d+)')
self.STARTSWITH = INTEGER_RE.match(m.group(1)).group()
# Append to the list
items.append(m.group(3))
elif self.INDENT_RE.match(line):
# This is an indented (possibly nested) item.
if items[-1].startswith(' '*self.tab_length):
# Previous item was indented. Append to that item.
items[-1] = '%s\n%s' % (items[-1], line)
else:
items.append(line)
else:
# This is another line of previous item. Append to that item.
items[-1] = '%s\n%s' % (items[-1], line)
return items
class UListProcessor(OListProcessor):
""" Process unordered list blocks. """
TAG = 'ul'
RE = re.compile(r'^[ ]{0,3}[*+-][ ]+(.*)')
class HashHeaderProcessor(BlockProcessor):
""" Process Hash Headers. """
# Detect a header at start of any line in block
RE = re.compile(r'(^|\n)(?P<level>#{1,6})(?P<header>.*?)#*(\n|$)')
def test(self, parent, block):
return bool(self.RE.search(block))
def run(self, parent, blocks):
block = blocks.pop(0)
m = self.RE.search(block)
if m:
before = block[:m.start()] # All lines before header
after = block[m.end():] # All lines after header
if before:
# As the header was not the first line of the block and the
# lines before the header must be parsed first,
# recursively parse this lines as a block.
self.parser.parseBlocks(parent, [before])
# Create header using named groups from RE
h = util.etree.SubElement(parent, 'h%d' % len(m.group('level')))
h.text = m.group('header').strip()
if after:
# Insert remaining lines as first block for future parsing.
blocks.insert(0, after)
else:
# This should never happen, but just in case...
logger.warn("We've got a problem header: %r" % block)
class SetextHeaderProcessor(BlockProcessor):
""" Process Setext-style Headers. """
# Detect Setext-style header. Must be first 2 lines of block.
RE = re.compile(r'^.*?\n[=-]+[ ]*(\n|$)', re.MULTILINE)
def test(self, parent, block):
return bool(self.RE.match(block))
def run(self, parent, blocks):
lines = blocks.pop(0).split('\n')
# Determine level. ``=`` is 1 and ``-`` is 2.
if lines[1].startswith('='):
level = 1
else:
level = 2
h = util.etree.SubElement(parent, 'h%d' % level)
h.text = lines[0].strip()
if len(lines) > 2:
# Block contains additional lines. Add to master blocks for later.
blocks.insert(0, '\n'.join(lines[2:]))
class HRProcessor(BlockProcessor):
""" Process Horizontal Rules. """
RE = r'^[ ]{0,3}((-+[ ]{0,2}){3,}|(_+[ ]{0,2}){3,}|(\*+[ ]{0,2}){3,})[ ]*'
# Detect hr on any line of a block.
SEARCH_RE = re.compile(RE, re.MULTILINE)
def test(self, parent, block):
m = self.SEARCH_RE.search(block)
# No atomic grouping in python so we simulate it here for performance.
# The regex only matches what would be in the atomic group - the HR.
# Then check if we are at end of block or if next char is a newline.
if m and (m.end() == len(block) or block[m.end()] == '\n'):
# Save match object on class instance so we can use it later.
self.match = m
return True
return False
def run(self, parent, blocks):
block = blocks.pop(0)
# Check for lines in block before hr.
prelines = block[:self.match.start()].rstrip('\n')
if prelines:
# Recursively parse lines before hr so they get parsed first.
self.parser.parseBlocks(parent, [prelines])
# create hr
util.etree.SubElement(parent, 'hr')
# check for lines in block after hr.
postlines = block[self.match.end():].lstrip('\n')
if postlines:
# Add lines after hr to master blocks for later parsing.
blocks.insert(0, postlines)
class EmptyBlockProcessor(BlockProcessor):
""" Process blocks that are empty or start with an empty line. """
def test(self, parent, block):
return not block or block.startswith('\n')
def run(self, parent, blocks):
block = blocks.pop(0)
filler = '\n\n'
if block:
# Starts with empty line
# Only replace a single line.
filler = '\n'
# Save the rest for later.
theRest = block[1:]
if theRest:
# Add remaining lines to master blocks for later.
blocks.insert(0, theRest)
sibling = self.lastChild(parent)
if sibling and sibling.tag == 'pre' and len(sibling) and sibling[0].tag == 'code':
# Last block is a codeblock. Append to preserve whitespace.
sibling[0].text = util.AtomicString('%s%s' % (sibling[0].text, filler))
class ParagraphProcessor(BlockProcessor):
""" Process Paragraph blocks. """
def test(self, parent, block):
return True
def run(self, parent, blocks):
block = blocks.pop(0)
if block.strip():
# Not a blank block. Add to parent, otherwise throw it away.
if self.parser.state.isstate('list'):
# The parent is a tight-list.
#
# Check for any children. This will likely only happen in a
# tight-list when a header isn't followed by a blank line.
# For example:
#
# * # Header
# Line 2 of list item - not part of header.
sibling = self.lastChild(parent)
if sibling is not None:
# Insetrt after sibling.
if sibling.tail:
sibling.tail = '%s\n%s' % (sibling.tail, block)
else:
sibling.tail = '\n%s' % block
else:
# Append to parent.text
if parent.text:
parent.text = '%s\n%s' % (parent.text, block)
else:
parent.text = block.lstrip()
else:
# Create a regular paragraph
p = util.etree.SubElement(parent, 'p')
p.text = block.lstrip()
| gpl-3.0 |
jmesteve/openerpseda | openerp/addons/sale_stock/__init__.py | 64 | 1123 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale_stock
import stock
import report
import company
import res_config
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | agpl-3.0 |
Cal-CS-61A-Staff/ok-client | tests/sources/ok_test/load_test.py | 3 | 1874 | from client import exceptions as ex
from client.sources import ok_test
from client.sources.ok_test import models
import mock
import unittest
class LoadTest(unittest.TestCase):
NAME = 'valid'
VALID_FILE = 'test/' + NAME + '.py'
INVALID_FILE = 'invalid.ext'
def setUp(self):
self.patcherIsFile = mock.patch('os.path.isfile')
self.addCleanup(self.patcherIsFile.stop)
self.mockIsFile = self.patcherIsFile.start()
self.mockIsFile.return_value = True
self.patcherLoadModule = mock.patch('client.sources.common.importing.load_module')
self.addCleanup(self.patcherLoadModule.stop)
self.mockLoadModule = self.patcherLoadModule.start()
self.mockModule = self.mockLoadModule.return_value
self.cmd_args = mock.Mock()
def call_load(self, file=VALID_FILE):
return ok_test.load(file, '', self.cmd_args)
def testInvalidFileType(self):
self.assertRaises(ex.LoadingException, self.call_load,
file=self.INVALID_FILE)
def testNoSuchFile(self):
self.mockIsFile.return_value = False
self.assertRaises(ex.LoadingException, self.call_load)
def testImportError(self):
self.mockLoadModule.side_effect = Exception
self.assertRaises(ex.LoadingException, self.call_load)
def testSerializeError(self):
self.mockModule.test = {'bogus': 'test'}
self.assertRaises(ex.LoadingException, self.call_load)
def testUsesBasenameAsName(self):
self.mockModule.test = {
'name': 'test',
'points': 4.0,
'suites': []
}
result = self.call_load()
self.assertIsInstance(result, dict)
self.assertEqual(1, len(result))
self.assertIn(self.NAME, result)
self.assertIsInstance(result[self.NAME], models.OkTest)
| apache-2.0 |
foobnix/foobnix | foobnix/gui/treeview/virtual_tree.py | 2 | 7402 | '''
Created on Sep 29, 2010
@author: ivan
'''
import logging
from gi.repository import Gtk
from foobnix.gui.state import LoadSave
from foobnix.util.mouse_utils import is_double_left_click, is_rigth_click,\
right_click_optimization_for_trees, is_empty_click
from foobnix.helpers.menu import Popup
from foobnix.helpers.dialog_entry import one_line_dialog
from foobnix.gui.model import FModel
from foobnix.gui.treeview.common_tree import CommonTreeControl
from foobnix.fc.fc import FC
from foobnix.fc.fc_cache import FCache
from foobnix.util.key_utils import KEY_DELETE, is_key
class VirtualTreeControl(CommonTreeControl, LoadSave):
def __init__(self, controls):
CommonTreeControl.__init__(self, controls)
"""column config"""
column = Gtk.TreeViewColumn(_("Storage"), self.ellipsize_render, text=self.text[0], font=self.font[0])
column.set_resizable(True)
self.set_headers_visible(True)
self.append_column(column)
self.tree_menu = Popup()
self.configure_send_drag()
self.configure_recive_drag()
self.set_type_tree()
def on_key_release(self, w, e):
if is_key(e, KEY_DELETE):
self.delete_selected()
def on_drag_drop_finish(self):
FCache().cache_virtual_tree_beans = self.get_all_beans()
FC().save()
def on_button_press(self, w, e):
if is_empty_click(w, e):
w.get_selection().unselect_all()
if is_double_left_click(e):
selected = self.get_selected_bean()
beans = self.get_all_child_beans_by_selected()
self.controls.notetabs._append_tab(selected.text, [selected] + beans, optimization=True)
self.controls.play_first_file_in_playlist()
if is_rigth_click(e):
right_click_optimization_for_trees(w, e)
self.tree_menu.clear()
self.tree_menu.add_item(_("Add playlist"), "list-add", self.create_playlist, None)
bean = self.get_selected_bean()
if bean:
if bean.is_file:
self.tree_menu.add_item(_("Rename"), "accessories-text-editor", self.rename_selected, None)
self.tree_menu.add_item(_("Delete"), "edit-delete", self.delete_selected, None)
else:
self.tree_menu.add_item(_("Rename playlist"), "accessories-text-editor", self.rename_selected, None)
self.tree_menu.add_item(_("Delete playlist"), "edit-delete", self.delete_selected, None)
#menu.add_item(_("Save as"), "document-save"_AS, None, None)
#menu.add_item(_("Open as"), "folder-open", None, None)
self.tree_menu.show(e)
def create_playlist(self):
name = one_line_dialog(_("Create new playlist"), self.controls.main_window, message_text1=_("Enter playlist name"))
if not name:
return
bean = self.get_selected_bean()
folder_bean = FModel(name)
if bean:
if bean.is_file:
folder_bean.add_parent(bean.parent_level)
else:
folder_bean.add_parent(bean.level)
self.append(folder_bean)
def rename_selected(self):
bean = self.get_selected_bean()
name = one_line_dialog(_("Rename Dialog"), self.controls.main_window,
entry_text=bean.text, message_text1=_("Enter new name"))
if not name:
return
rows = self.find_rows_by_element(self.UUID, bean.UUID)
if rows:
rows[0][self.text[0]] = name
def on_load(self):
self.scroll.hide()
self.restore_rows(FCache().cache_virtual_tree_beans)
self.restore_expand(FC().virtual_expand_paths)
self.restore_selection(FC().virtual_selected_paths)
def set_expand_path(new_value):
FC().virtual_expand_paths = new_value
def set_selected_path(new_value):
FC().virtual_selected_paths = new_value
self.expand_updated(set_expand_path)
self.selection_changed(set_selected_path)
def on_quit(self):
self.save_rows_from_tree(FCache().cache_virtual_tree_beans)
def on_drag_data_received(self, treeview, context, x, y, selection, info, timestamp):
logging.debug('Storage on_drag_data_received')
model = self.get_model().get_model()
drop_info = self.get_dest_row_at_pos(x, y)
# ff - from_filter
ff_tree = Gtk.drag_get_source_widget(context)
ff_model, ff_paths = ff_tree.get_selection().get_selected_rows()
treerows = [ff_model[ff_path] for ff_path in ff_paths]
if drop_info:
path, position = drop_info
iter = model.get_iter(path)
if position == Gtk.TreeViewDropPosition.INTO_OR_BEFORE or position == Gtk.TreeViewDropPosition.INTO_OR_AFTER:
self.model[path][self.font[0]] = 'bold'
if self == ff_tree:
ff_row_refs = [Gtk.TreeRowReference.new(ff_model, ff_path) for ff_path in ff_paths]
def add_childs(treerow, new_iter):
for ch_row in treerow.iterchildren():
niter = model.append(new_iter, [col for col in ch_row])
add_childs(ch_row, niter)
for treerow, ref in zip(treerows, ff_row_refs):
row = [col for col in treerow]
if drop_info:
if position == Gtk.TreeViewDropPosition.BEFORE:
new_iter = model.insert_before(None, iter, row)
elif (position == Gtk.TreeViewDropPosition.INTO_OR_BEFORE or
position == Gtk.TreeViewDropPosition.INTO_OR_AFTER):
new_iter = model.append(iter, row)
else:
new_iter = model.insert_after(None, iter, row)
iter = model.iter_next(iter)
else:
new_iter = model.append(None, row)
treerow = model[ref.get_path()] # reinitialize
add_childs(treerow, new_iter)
self.remove_replaced(ff_model, ff_row_refs)
else:
for treerow in treerows:
row = [col for col in treerow]
if drop_info:
if position == Gtk.TreeViewDropPosition.BEFORE:
new_iter = model.insert_before(None, iter, row)
elif (position == Gtk.TreeViewDropPosition.INTO_OR_BEFORE or
position == Gtk.TreeViewDropPosition.INTO_OR_AFTER):
new_iter = model.append(iter, row)
else:
new_iter = model.insert_after(None, iter, row)
iter = model.iter_next(iter)
else:
new_iter = model.append(None, row)
if len(treerows) == 1 and treerow[self.font[0]] == 'bold':
while treerow.next and treerow.next[self.font[0]] != 'bold':
treerow = treerow.next
treerows.append(treerow)
drop_info = True
iter = new_iter
position = Gtk.TreeViewDropPosition.INTO_OR_AFTER
self.stop_emission('drag-data-received')
| gpl-3.0 |
DukeOfHazard/crits | crits/indicators/migrate.py | 13 | 3846 | from crits.core.crits_mongoengine import EmbeddedCampaign
def migrate_indicator(self):
"""
Migrate to the latest schema version.
"""
migrate_2_to_3(self)
def migrate_2_to_3(self):
"""
Migrate from schema 2 to 3.
"""
if self.schema_version < 2:
migrate_1_to_2(self)
if self.schema_version == 2:
from crits.core.core_migrate import migrate_analysis_results
migrate_analysis_results(self)
self.schema_version = 3
self.save()
self.reload()
def migrate_1_to_2(self):
"""
Migrate from schema 1 to 2.
"""
if self.schema_version < 1:
migrate_0_to_1(self)
if self.schema_version == 1:
old_analysis = getattr(self.unsupported_attrs, 'old_analysis', None)
self.activity = []
self.campaign = []
if old_analysis:
# activity
if 'activity' in old_analysis:
for a in old_analysis['activity']:
(analyst, description) = ('', '')
(date, start_date, end_date) = (None, None, None)
if 'analyst' in a:
analyst = a['analyst']
if 'description' in a:
description = a['description']
if 'date' in a:
date = a['date']
if 'start_date' in a:
start_date = a['start_date']
if 'end_date' in a:
end_date = a['end_date']
self.add_activity(
analyst=analyst,
start_date=start_date,
end_date=end_date,
date=date,
description=description
)
# campaign
if 'campaign' in old_analysis:
for c in old_analysis['campaign']:
(analyst, description) = ('', '')
(date, confidence, name) = (None, 'low', '')
if not 'analyst' in c:
c['analyst'] = analyst
if not 'description' in c:
c['description'] = description
if not 'date' in c:
c['date'] = date
if not 'confidence' in c:
c['confidence'] = confidence
if not 'name' in c:
c['name'] = name
ec = EmbeddedCampaign(
analyst=c['analyst'],
description=c['description'],
date=c['date'],
confidence=c['confidence'],
name=c['name']
)
self.add_campaign(ec)
# confidence
if 'confidence' in old_analysis:
confidence = old_analysis['confidence']
(analyst, rating) = ('', 'unknown')
if 'analyst' in confidence:
analyst = confidence['analyst']
if 'rating' in confidence:
rating = confidence['rating']
self.set_confidence(analyst=analyst, rating=rating)
# impact
if 'impact' in old_analysis:
impact = old_analysis['impact']
(analyst, rating) = ('', 'unknown')
if 'analyst' in impact:
analyst = impact['analyst']
if 'rating' in impact:
rating = impact['rating']
self.set_impact(analyst=analyst, rating=rating)
self.schema_version = 2
def migrate_0_to_1(self):
"""
Migrate from schema 0 to 1.
"""
if self.schema_version < 1:
self.schema_version = 1
| mit |
CRUDNS/CRUDNS | src/accounts/views.py | 1 | 2858 | from django.shortcuts import get_object_or_404
from django_rest_logger import log
from knox.auth import TokenAuthentication
from knox.models import AuthToken
from rest_framework import status
from rest_framework.authentication import BasicAuthentication
from rest_framework.decorators import api_view
from rest_framework.generics import GenericAPIView
from rest_framework.mixins import CreateModelMixin
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from accounts.models import User
from accounts.serializers import UserRegistrationSerializer, UserSerializer, UserACSerializer
from lib.utils import AtomicMixin
class UserRegisterView(AtomicMixin, CreateModelMixin, GenericAPIView):
serializer_class = UserRegistrationSerializer
authentication_classes = ()
def post(self, request):
"""User registration view."""
return self.create(request)
class UserLoginView(GenericAPIView):
serializer_class = UserSerializer
authentication_classes = (BasicAuthentication,)
permission_classes = (IsAuthenticated,)
def post(self, request):
"""User login with username and password."""
token = AuthToken.objects.create(request.user)
return Response({
'user': self.get_serializer(request.user).data,
'token': token
})
class UserConfirmEmailView(AtomicMixin, GenericAPIView):
serializer_class = None
authentication_classes = ()
def get(self, request, activation_key):
"""
View for confirm email.
Receive an activation key as parameter and confirm email.
"""
user = get_object_or_404(User, activation_key=str(activation_key))
if user.confirm_email():
return Response(status=status.HTTP_200_OK)
log.warning(message='Email confirmation key not found.',
details={'http_status_code': status.HTTP_404_NOT_FOUND})
return Response(status=status.HTTP_404_NOT_FOUND)
class UserEmailConfirmationStatusView(GenericAPIView):
serializer_class = None
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
def get(self, request):
"""Retrieve user current confirmed_email status."""
user = self.request.user
return Response({'status': user.confirmed_email}, status=status.HTTP_200_OK)
class GetUserView(GenericAPIView):
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
serializer_class = UserACSerializer
api_view(['GET', ])
queryset = User.objects.all()
def get(self, request, q):
"""List all Domains a user have."""
users = User.objects.filter(email__icontains=q)
serializer = UserACSerializer(users, many=True)
return Response(serializer.data)
| mit |
dipanjanS/text-analytics-with-python | New-Second-Edition/Ch05 - Text Classification/model_evaluation_utils.py | 2 | 9263 | # -*- coding: utf-8 -*-
"""
Created on Mon Jul 31 20:05:23 2017
@author: DIP
@Copyright: Dipanjan Sarkar
"""
from sklearn import metrics
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.preprocessing import LabelEncoder
from sklearn.base import clone
from sklearn.preprocessing import label_binarize
from scipy import interp
from sklearn.metrics import roc_curve, auc
def get_metrics(true_labels, predicted_labels):
print('Accuracy:', np.round(
metrics.accuracy_score(true_labels,
predicted_labels),
4))
print('Precision:', np.round(
metrics.precision_score(true_labels,
predicted_labels,
average='weighted'),
4))
print('Recall:', np.round(
metrics.recall_score(true_labels,
predicted_labels,
average='weighted'),
4))
print('F1 Score:', np.round(
metrics.f1_score(true_labels,
predicted_labels,
average='weighted'),
4))
def train_predict_model(classifier,
train_features, train_labels,
test_features, test_labels):
# build model
classifier.fit(train_features, train_labels)
# predict using model
predictions = classifier.predict(test_features)
return predictions
def display_confusion_matrix(true_labels, predicted_labels, classes=[1,0]):
total_classes = len(classes)
level_labels = [total_classes*[0], list(range(total_classes))]
cm = metrics.confusion_matrix(y_true=true_labels, y_pred=predicted_labels,
labels=classes)
cm_frame = pd.DataFrame(data=cm,
columns=pd.MultiIndex(levels=[['Predicted:'], classes],
labels=level_labels),
index=pd.MultiIndex(levels=[['Actual:'], classes],
labels=level_labels))
print(cm_frame)
def display_confusion_matrix_pretty(true_labels, predicted_labels, classes=[1,0]):
total_classes = len(classes)
level_labels = [total_classes*[0], list(range(total_classes))]
cm = metrics.confusion_matrix(y_true=true_labels, y_pred=predicted_labels,
labels=classes)
cm_frame = pd.DataFrame(data=cm,
columns=pd.MultiIndex(levels=[['Predicted:'], classes],
labels=level_labels),
index=pd.MultiIndex(levels=[['Actual:'], classes],
labels=level_labels))
return cm_frame
def display_classification_report(true_labels, predicted_labels, classes=[1,0]):
report = metrics.classification_report(y_true=true_labels,
y_pred=predicted_labels,
labels=classes)
print(report)
def display_model_performance_metrics(true_labels, predicted_labels, classes=[1,0]):
print('Model Performance metrics:')
print('-'*30)
get_metrics(true_labels=true_labels, predicted_labels=predicted_labels)
print('\nModel Classification report:')
print('-'*30)
display_classification_report(true_labels=true_labels, predicted_labels=predicted_labels,
classes=classes)
print('\nPrediction Confusion Matrix:')
print('-'*30)
display_confusion_matrix(true_labels=true_labels, predicted_labels=predicted_labels,
classes=classes)
def plot_model_decision_surface(clf, train_features, train_labels,
plot_step=0.02, cmap=plt.cm.RdYlBu,
markers=None, alphas=None, colors=None):
if train_features.shape[1] != 2:
raise ValueError("X_train should have exactly 2 columnns!")
x_min, x_max = train_features[:, 0].min() - plot_step, train_features[:, 0].max() + plot_step
y_min, y_max = train_features[:, 1].min() - plot_step, train_features[:, 1].max() + plot_step
xx, yy = np.meshgrid(np.arange(x_min, x_max, plot_step),
np.arange(y_min, y_max, plot_step))
clf_est = clone(clf)
clf_est.fit(train_features,train_labels)
if hasattr(clf_est, 'predict_proba'):
Z = clf_est.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:,1]
else:
Z = clf_est.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
cs = plt.contourf(xx, yy, Z, cmap=cmap)
le = LabelEncoder()
y_enc = le.fit_transform(train_labels)
n_classes = len(le.classes_)
plot_colors = ''.join(colors) if colors else [None] * n_classes
label_names = le.classes_
markers = markers if markers else [None] * n_classes
alphas = alphas if alphas else [None] * n_classes
for i, color in zip(range(n_classes), plot_colors):
idx = np.where(y_enc == i)
plt.scatter(train_features[idx, 0], train_features[idx, 1], c=color,
label=label_names[i], cmap=cmap, edgecolors='black',
marker=markers[i], alpha=alphas[i])
plt.legend()
plt.show()
def plot_model_roc_curve(clf, features, true_labels, label_encoder=None, class_names=None):
## Compute ROC curve and ROC area for each class
fpr = dict()
tpr = dict()
roc_auc = dict()
if hasattr(clf, 'classes_'):
class_labels = clf.classes_
elif label_encoder:
class_labels = label_encoder.classes_
elif class_names:
class_labels = class_names
else:
raise ValueError('Unable to derive prediction classes, please specify class_names!')
n_classes = len(class_labels)
y_test = label_binarize(true_labels, classes=class_labels)
if n_classes == 2:
if hasattr(clf, 'predict_proba'):
prob = clf.predict_proba(features)
y_score = prob[:, prob.shape[1]-1]
elif hasattr(clf, 'decision_function'):
prob = clf.decision_function(features)
y_score = prob[:, prob.shape[1]-1]
else:
raise AttributeError("Estimator doesn't have a probability or confidence scoring system!")
fpr, tpr, _ = roc_curve(y_test, y_score)
roc_auc = auc(fpr, tpr)
plt.plot(fpr, tpr, label='ROC curve (area = {0:0.2f})'
''.format(roc_auc),
linewidth=2.5)
elif n_classes > 2:
if hasattr(clf, 'predict_proba'):
y_score = clf.predict_proba(features)
elif hasattr(clf, 'decision_function'):
y_score = clf.decision_function(features)
else:
raise AttributeError("Estimator doesn't have a probability or confidence scoring system!")
for i in range(n_classes):
fpr[i], tpr[i], _ = roc_curve(y_test[:, i], y_score[:, i])
roc_auc[i] = auc(fpr[i], tpr[i])
## Compute micro-average ROC curve and ROC area
fpr["micro"], tpr["micro"], _ = roc_curve(y_test.ravel(), y_score.ravel())
roc_auc["micro"] = auc(fpr["micro"], tpr["micro"])
## Compute macro-average ROC curve and ROC area
# First aggregate all false positive rates
all_fpr = np.unique(np.concatenate([fpr[i] for i in range(n_classes)]))
# Then interpolate all ROC curves at this points
mean_tpr = np.zeros_like(all_fpr)
for i in range(n_classes):
mean_tpr += interp(all_fpr, fpr[i], tpr[i])
# Finally average it and compute AUC
mean_tpr /= n_classes
fpr["macro"] = all_fpr
tpr["macro"] = mean_tpr
roc_auc["macro"] = auc(fpr["macro"], tpr["macro"])
## Plot ROC curves
plt.figure(figsize=(6, 4))
plt.plot(fpr["micro"], tpr["micro"],
label='micro-average ROC curve (area = {0:0.2f})'
''.format(roc_auc["micro"]), linewidth=3)
plt.plot(fpr["macro"], tpr["macro"],
label='macro-average ROC curve (area = {0:0.2f})'
''.format(roc_auc["macro"]), linewidth=3)
for i, label in enumerate(class_labels):
plt.plot(fpr[i], tpr[i], label='ROC curve of class {0} (area = {1:0.2f})'
''.format(label, roc_auc[i]),
linewidth=2, linestyle=':')
else:
raise ValueError('Number of classes should be atleast 2 or more')
plt.plot([0, 1], [0, 1], 'k--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver Operating Characteristic (ROC) Curve')
plt.legend(loc="lower right")
plt.show()
| apache-2.0 |
korandabit/dresherMachine | main.py | 1 | 4223 | #mainscript
from __future__ import print_function
from collections import Counter
import csv,ast,re, itertools, glob,sys,os,datetime
import dresher_LSA as d
import main_1inventoryMover as m
import main_2inventoryParser2 as k
import main_3min_analysis as a1
import main_4efficiency_analysis as a2
"""
2017.01.04
created /r for print to screen. stitched some functions, added upper limit to perm sizes for inventories larger than their feature sets (e.g. 28>12).
possible soln to implement/test:
run efficientTree on order=false, then make order=true for only subsets where efficient trees are produced. depends on some assumption about set1 outputs.
2016.12.31
This script streamlines all functions of KDM_ to date. Running this script requires 0_inputFiles to be populated with csv files, and that's it. It works in place by 1) choosing a subset of csv's to look at, passing that info and all extractions onto subsequent scripts, and only outputting a file at the end.
TODO:
add a sys.argv at the end of a script, so things like errors can be explored before quitting.
inventory_size 6 currently produces "Error Incomplete Trees."
TEST SOLN below (it's been coded, I think)
known issue: analysis 2 currently depends on a full inventory set being run.
solution: feed analysis 2 the same input (and perhaps function from within) runTimeVars regarding subsetting.
dependent files can be rid of cloogy read/write file content. Archive/annotate/retire them.
bring over a number of runtime vars to this file from dependent files.
"""
################
# Runtime Vars #
################
analysis=True #toggle in order to turn off analyses
writer=False #toggle in order to write to file.
try:
vowel_inventory_size=int(sys.argv[1])
except:
vowel_inventory_size=7
#could be written to optionally take range. currently doesn't.
inventory_range=0 #inventory range setting (0=full, 1=first, 2=first 2). This gets passed to Parser and Analysis 2.
use_random_generator=False #toggle to turn of sampling of only a subset of features.
today=datetime.date.today()
wFilename='{} v{}'.format(today,str(vowel_inventory_size))
#########
# Mover #
#########
mover_listofFiles = m.mover(vowel_inventory_size) #vowel inventory size
#Currently written to return listofFiles. a dictionary exists which can return quantity, redundancy
##########
# Parser #
##########
kdm_parsed = k.runTimeVars(mover_listofFiles, #list of files from mover;
vowel_inventory_size, # vowel inventory size;
inventory_range=inventory_range, #inventory range setting (0=full, 1=first, 2=first 2)
randomGenerator = use_random_generator, #random % of permutations = True
writeFile=False) # write to file = false
#takes in list of files from mover.
#outputs 1_KDM_parsed.txt
#print(kdm_parsed)
#for i in range(len(kdm_parsed[0])):
# if i==3:
# for j in range(len(kdm_parsed[0][i])):
# print(kdm_parsed[0][i][j])
# if j in range(3):
# inv = d.inventoryImport(mover_listofFiles[0])
# pArray = d.arrayBuilder(inv, inv.keys(), kdm_parsed[0][i][j], binary='n')
# print(pArray)
# discp = d.findDiscriminatingPhonemes(pArray, columnLabels=[])
# print(discp)
# print(d.efficientWrapper(kdm_parsed[0][i][j], discp))
# else:
# print(kdm_parsed[0][i])
#################
# Analysis 1 & 2#
#################
if analysis:
analysis_out = a1.min_analysis(kdm_parsed) #pass in list of lists object with similar elements to .txt file.
# output is a list of these: [name,length,minVal,maxVal,bestSets.keys()]
# analysis_out[0][4] #bestSet.keys(); set of efficient hierarchies for 1st language
output=a2.efficiency_analysis(analysis_out, mover_listofFiles,inventory_range, vowel_inventory_size)
# output rows: [vowel_inventory_size,curInventory[21:27],hierarchy_rn,efficiency,features,treeSpeller,curUniqueFeatures,remainders]
# print(output[0][0])
wHeader=['#','Invty','Hierarchy_Order','Total','Feats','Vowels','Total_features','Remaining_Features']
wRows=output
if writer:
d.writeBlock(wRows, wFilename, ext='.txt', method='a', delim='\t') #For headers, insert 'wHeader' after wFilename
| mit |
BioGRID/BioGRID-Annotation | QUICK_buildOrganisms.py | 2 | 2630 |
# Create the quick lookup organism table
import Config
import sys, string
import MySQLdb
import Database
with Database.db as cursor :
# EMPTY EXISTING DATA
cursor.execute( "TRUNCATE TABLE " + Config.DB_QUICK + ".quick_organisms" )
cursor.execute( "TRUNCATE TABLE " + Config.DB_QUICK + ".quick_refseq_organisms" )
cursor.execute( "TRUNCATE TABLE " + Config.DB_QUICK + ".quick_uniprot_organisms" )
# GRAB ONLY ORGANISMS THAT HAVE GENES AVAILABLE
# IN OUR ANNOTATION
cursor.execute( "SELECT organism_id FROM " + Config.DB_NAME + ".genes WHERE gene_status='active' GROUP BY organism_id" )
orgIDs = set( )
for row in cursor.fetchall( ) :
orgIDs.add( row[0] )
orgFormat = ",".join( ['%s'] * len(orgIDs) )
cursor.execute( "INSERT INTO " + Config.DB_QUICK + ".quick_organisms SELECT organism_id, entrez_taxid, organism_common_name, organism_official_name, organism_abbreviation, organism_strain FROM " + Config.DB_NAME + ".organisms WHERE organism_id IN (%s) AND organism_status='active'" % orgFormat, tuple(orgIDs) )
Database.db.commit( )
# GRAB ONLY ORGANISMS THAT HAVE UNIPROT PROTEINS AVAILABLE
# IN OUR ANNOTATION
cursor.execute( "SELECT organism_id FROM " + Config.DB_NAME + ".uniprot WHERE uniprot_status='active' GROUP BY organism_id" )
orgIDs = set( )
for row in cursor.fetchall( ) :
orgIDs.add( row[0] )
orgFormat = ",".join( ['%s'] * len(orgIDs) )
cursor.execute( "INSERT INTO " + Config.DB_QUICK + ".quick_uniprot_organisms SELECT organism_id, entrez_taxid, organism_common_name, organism_official_name, organism_abbreviation, organism_strain FROM " + Config.DB_NAME + ".organisms WHERE organism_id IN (%s) AND organism_status='active'" % orgFormat, tuple(orgIDs) )
Database.db.commit( )
# GRAB ONLY ORGANISMS THAT HAVE REFSEQ PROTEINS AVAILABLE
# IN OUR ANNOTATION
cursor.execute( "SELECT organism_id FROM " + Config.DB_NAME + ".refseq WHERE refseq_status='active' GROUP BY organism_id" )
orgIDs = set( )
for row in cursor.fetchall( ) :
orgIDs.add( row[0] )
orgFormat = ",".join( ['%s'] * len(orgIDs) )
cursor.execute( "INSERT INTO " + Config.DB_QUICK + ".quick_refseq_organisms SELECT organism_id, entrez_taxid, organism_common_name, organism_official_name, organism_abbreviation, organism_strain FROM " + Config.DB_NAME + ".organisms WHERE organism_id IN (%s) AND organism_status='active'" % orgFormat, tuple(orgIDs) )
Database.db.commit( )
# FINISH
cursor.execute( "INSERT INTO " + Config.DB_STATS + ".update_tracker VALUES ( '0', 'QUICK_buildOrganisms', NOW( ) )" )
Database.db.commit( )
sys.exit( ) | mit |
vanpact/scipy | scipy/weave/accelerate_tools.py | 97 | 12946 | #**************************************************************************#
#* FILE ************** accelerate_tools.py ************************#
#**************************************************************************#
#* Author: Patrick Miller February 9 2002 *#
#**************************************************************************#
"""
accelerate_tools contains the interface for on-the-fly building of
C++ equivalents to Python functions.
"""
#**************************************************************************#
from __future__ import absolute_import, print_function
from types import InstanceType, XRangeType
import inspect
from hashlib import sha256
import scipy.weave as weave
from numpy.testing import assert_
from .bytecodecompiler import CXXCoder,Type_Descriptor,Function_Descriptor
def CStr(s):
"Hacky way to get legal C string from Python string"
if s is None:
return '""'
assert_(isinstance(s, str), msg="only None and string allowed")
r = repr('"'+s) # Better for embedded quotes
return '"'+r[2:-1]+'"'
##################################################################
# CLASS INSTANCE #
##################################################################
class Instance(Type_Descriptor):
cxxtype = 'PyObject*'
def __init__(self,prototype):
self.prototype = prototype
def check(self,s):
return "PyInstance_Check(%s)" % s
def inbound(self,s):
return s
def outbound(self,s):
return s,0
def get_attribute(self,name):
proto = getattr(self.prototype,name)
T = lookup_type(proto)
code = 'tempPY = PyObject_GetAttrString(%%(rhs)s,"%s");\n' % name
convert = T.inbound('tempPY')
code += '%%(lhsType)s %%(lhs)s = %s;\n' % convert
return T,code
def set_attribute(self,name):
proto = getattr(self.prototype,name)
T = lookup_type(proto)
convert,owned = T.outbound('%(rhs)s')
code = 'tempPY = %s;' % convert
if not owned:
code += ' Py_INCREF(tempPY);'
code += ' PyObject_SetAttrString(%%(lhs)s,"%s",tempPY);' % name
code += ' Py_DECREF(tempPY);\n'
return T,code
##################################################################
# CLASS BASIC #
##################################################################
class Basic(Type_Descriptor):
owned = 1
def check(self,s):
return "%s(%s)" % (self.checker,s)
def inbound(self,s):
return "%s(%s)" % (self.inbounder,s)
def outbound(self,s):
return "%s(%s)" % (self.outbounder,s),self.owned
class Basic_Number(Basic):
def literalizer(self,s):
return str(s)
def binop(self,symbol,a,b):
assert_(symbol in ['+','-','*','/'], msg=symbol)
return '%s %s %s' % (a,symbol,b),self
class Integer(Basic_Number):
cxxtype = "long"
checker = "PyInt_Check"
inbounder = "PyInt_AsLong"
outbounder = "PyInt_FromLong"
class Double(Basic_Number):
cxxtype = "double"
checker = "PyFloat_Check"
inbounder = "PyFloat_AsDouble"
outbounder = "PyFloat_FromDouble"
class String(Basic):
cxxtype = "char*"
checker = "PyString_Check"
inbounder = "PyString_AsString"
outbounder = "PyString_FromString"
def literalizer(self,s):
return CStr(s)
# -----------------------------------------------
# Singletonize the type names
# -----------------------------------------------
Integer = Integer()
Double = Double()
String = String()
import numpy as np
class Vector(Type_Descriptor):
cxxtype = 'PyArrayObject*'
refcount = 1
dims = 1
module_init_code = 'import_array();\n'
inbounder = "(PyArrayObject*)"
outbounder = "(PyObject*)"
owned = 0 # Conversion is by casting!
prerequisites = Type_Descriptor.prerequisites + \
['#include "numpy/arrayobject.h"']
dims = 1
def check(self,s):
return "PyArray_Check(%s) && ((PyArrayObject*)%s)->nd == %d && ((PyArrayObject*)%s)->descr->type_num == %s" % (
s,s,self.dims,s,self.typecode)
def inbound(self,s):
return "%s(%s)" % (self.inbounder,s)
def outbound(self,s):
return "%s(%s)" % (self.outbounder,s),self.owned
def getitem(self,A,v,t):
assert_(self.dims == len(v), msg='Expect dimension %d' % self.dims)
code = '*((%s*)(%s->data' % (self.cxxbase,A)
for i in range(self.dims):
# assert that ''t[i]'' is an integer
code += '+%s*%s->strides[%d]' % (v[i],A,i)
code += '))'
return code,self.pybase
def setitem(self,A,v,t):
return self.getitem(A,v,t)
class matrix(Vector):
dims = 2
class IntegerVector(Vector):
typecode = 'PyArray_INT'
cxxbase = 'int'
pybase = Integer
class Integermatrix(matrix):
typecode = 'PyArray_INT'
cxxbase = 'int'
pybase = Integer
class LongVector(Vector):
typecode = 'PyArray_LONG'
cxxbase = 'long'
pybase = Integer
class Longmatrix(matrix):
typecode = 'PyArray_LONG'
cxxbase = 'long'
pybase = Integer
class DoubleVector(Vector):
typecode = 'PyArray_DOUBLE'
cxxbase = 'double'
pybase = Double
class Doublematrix(matrix):
typecode = 'PyArray_DOUBLE'
cxxbase = 'double'
pybase = Double
##################################################################
# CLASS XRANGE #
##################################################################
class XRange(Type_Descriptor):
cxxtype = 'XRange'
prerequisites = ['''
class XRange {
public:
XRange(long aLow, long aHigh, long aStep=1)
: low(aLow),high(aHigh),step(aStep)
{
}
XRange(long aHigh)
: low(0),high(aHigh),step(1)
{
}
long low;
long high;
long step;
};''']
# -----------------------------------------------
# Singletonize the type names
# -----------------------------------------------
IntegerVector = IntegerVector()
Integermatrix = Integermatrix()
LongVector = LongVector()
Longmatrix = Longmatrix()
DoubleVector = DoubleVector()
Doublematrix = Doublematrix()
XRange = XRange()
typedefs = {
int: Integer,
float: Double,
str: String,
(np.ndarray,1,int): IntegerVector,
(np.ndarray,2,int): Integermatrix,
(np.ndarray,1,np.long): LongVector,
(np.ndarray,2,np.long): Longmatrix,
(np.ndarray,1,float): DoubleVector,
(np.ndarray,2,float): Doublematrix,
XRangeType: XRange,
}
import math
functiondefs = {
(len,(String,)):
Function_Descriptor(code='strlen(%s)',return_type=Integer),
(len,(LongVector,)):
Function_Descriptor(code='PyArray_Size((PyObject*)%s)',return_type=Integer),
(float,(Integer,)):
Function_Descriptor(code='(double)(%s)',return_type=Double),
(range,(Integer,Integer)):
Function_Descriptor(code='XRange(%s)',return_type=XRange),
(range,(Integer)):
Function_Descriptor(code='XRange(%s)',return_type=XRange),
(math.sin,(Double,)):
Function_Descriptor(code='sin(%s)',return_type=Double),
(math.cos,(Double,)):
Function_Descriptor(code='cos(%s)',return_type=Double),
(math.sqrt,(Double,)):
Function_Descriptor(code='sqrt(%s)',return_type=Double),
}
##################################################################
# FUNCTION LOOKUP_TYPE #
##################################################################
def lookup_type(x):
T = type(x)
try:
return typedefs[T]
except:
if isinstance(T,np.ndarray):
return typedefs[(T,len(x.shape),x.dtype.char)]
elif issubclass(T, InstanceType):
return Instance(x)
else:
raise NotImplementedError(T)
##################################################################
# class ACCELERATE #
##################################################################
class accelerate(object):
def __init__(self, function, *args, **kw):
assert_(inspect.isfunction(function))
self.function = function
self.module = inspect.getmodule(function)
if self.module is None:
import __main__
self.module = __main__
self.__call_map = {}
def __cache(self,*args):
raise TypeError
def __call__(self,*args):
try:
return self.__cache(*args)
except TypeError:
# Figure out type info -- Do as tuple so its hashable
signature = tuple(map(lookup_type,args))
# If we know the function, call it
try:
fast = self.__call_map[signature]
except:
fast = self.singleton(signature)
self.__cache = fast
self.__call_map[signature] = fast
return fast(*args)
def signature(self,*args):
# Figure out type info -- Do as tuple so its hashable
signature = tuple(map(lookup_type,args))
return self.singleton(signature)
def singleton(self,signature):
identifier = self.identifier(signature)
# Generate a new function, then call it
f = self.function
# See if we have an accelerated version of module
try:
print('lookup',self.module.__name__+'_weave')
accelerated_module = __import__(self.module.__name__+'_weave')
print('have accelerated',self.module.__name__+'_weave')
fast = getattr(accelerated_module,identifier)
return fast
except ImportError:
accelerated_module = None
except AttributeError:
pass
P = self.accelerate(signature,identifier)
E = weave.ext_tools.ext_module(self.module.__name__+'_weave')
E.add_function(P)
E.generate_file()
weave.build_tools.build_extension(self.module.__name__+'_weave.cpp',verbose=2)
if accelerated_module:
raise NotImplementedError('Reload')
else:
accelerated_module = __import__(self.module.__name__+'_weave')
fast = getattr(accelerated_module,identifier)
return fast
def identifier(self,signature):
# Build a (truncated, see gh-3216) SHA-256 checksum
f = self.function
co = f.func_code
identifier = str(signature) + \
str(co.co_argcount) + \
str(co.co_consts) + \
str(co.co_varnames) + \
co.co_code
return 'F' + sha256(identifier).hexdigest()[:32]
def accelerate(self,signature,identifier):
P = Python2CXX(self.function,signature,name=identifier)
return P
def code(self,*args):
if len(args) != self.function.func_code.co_argcount:
raise TypeError('%s() takes exactly %d arguments (%d given)' %
(self.function.__name__,
self.function.func_code.co_argcount,
len(args)))
signature = tuple(map(lookup_type,args))
ident = self.function.__name__
return self.accelerate(signature,ident).function_code()
##################################################################
# CLASS PYTHON2CXX #
##################################################################
class Python2CXX(CXXCoder):
def typedef_by_value(self,v):
T = lookup_type(v)
if T not in self.used:
self.used.append(T)
return T
def function_by_signature(self,signature):
descriptor = functiondefs[signature]
if descriptor.return_type not in self.used:
self.used.append(descriptor.return_type)
return descriptor
def __init__(self,f,signature,name=None):
# Make sure function is a function
assert_(inspect.isfunction(f))
# and check the input type signature
assert_(reduce(lambda x,y: x and y,
map(lambda x: isinstance(x,Type_Descriptor),
signature),
1), msg='%s not all type objects' % signature)
self.arg_specs = []
self.customize = weave.base_info.custom_info()
CXXCoder.__init__(self,f,signature,name)
return
def function_code(self):
code = self.wrapped_code()
for T in self.used:
if T is not None and T.module_init_code:
self.customize.add_module_init_code(T.module_init_code)
return code
def python_function_definition_code(self):
return '{ "%s", wrapper_%s, METH_VARARGS, %s },\n' % (
self.name,
self.name,
CStr(self.function.__doc__))
| bsd-3-clause |
smarkwell/asuswrt-merlin | release/src/router/samba-3.0.25b/source/stf/smbcontrol.py | 137 | 7998 | #!/usr/bin/python
#
# Test for smbcontrol command line argument handling.
#
import comfychair
class NoArgs(comfychair.TestCase):
"""Test no arguments produces usage message."""
def runtest(self):
out = self.runcmd("smbcontrol", expectedResult = 1)
self.assert_re_match("Usage: smbcontrol", out[1])
class OneArg(comfychair.TestCase):
"""Test single argument produces usage message."""
def runtest(self):
out = self.runcmd("smbcontrol foo", expectedResult = 1)
self.assert_re_match("Usage: smbcontrol", out[1])
class SmbdDest(comfychair.TestCase):
"""Test the broadcast destination 'smbd'."""
def runtest(self):
out = self.runcmd("smbcontrol smbd noop")
class NmbdDest(comfychair.TestCase):
"""Test the destination 'nmbd'."""
def runtest(self):
# We need a way to start/stop/whatever nmbd
raise comfychair.NotRunError, "not implemented"
class PidDest(comfychair.TestCase):
"""Test a pid number destination'."""
def runtest(self):
out = self.runcmd("smbcontrol 1234 noop")
class SelfDest(comfychair.TestCase):
"""Test the destination 'self'."""
def runtest(self):
out = self.runcmd("smbcontrol self noop")
class WinbinddDest(comfychair.TestCase):
"""Test the destination 'winbindd'."""
def runtest(self):
# We need a way to start/stop/whatever winbindd
raise comfychair.NotRunError, "not implemented"
class BadDest(comfychair.TestCase):
"""Test a bad destination."""
def runtest(self):
out = self.runcmd("smbcontrol foo noop", expectedResult = 1)
class BadCmd(comfychair.TestCase):
"""Test a bad command."""
def runtest(self):
out = self.runcmd("smbcontrol self spottyfoot", expectedResult = 1)
self.assert_re_match("smbcontrol: unknown command", out[1]);
class NoArgCmdTest(comfychair.TestCase):
"""A test class that tests a command with no argument."""
def runtest(self):
self.require_root()
out = self.runcmd("smbcontrol self %s" % self.cmd)
out = self.runcmd("smbcontrol self %s spottyfoot" % self.cmd,
expectedResult = 1)
class ForceElection(NoArgCmdTest):
"""Test a force-election message."""
def setup(self):
self.cmd = "force-election"
class SamSync(NoArgCmdTest):
"""Test a samsync message."""
def setup(self):
self.cmd = "samsync"
class SamRepl(NoArgCmdTest):
"""Test a samrepl message."""
def setup(self):
self.cmd = "samrepl"
class DmallocChanged(NoArgCmdTest):
"""Test a dmalloc-changed message."""
def setup(self):
self.cmd = "dmalloc-log-changed"
class DmallocMark(NoArgCmdTest):
"""Test a dmalloc-mark message."""
def setup(self):
self.cmd = "dmalloc-mark"
class Shutdown(NoArgCmdTest):
"""Test a shutdown message."""
def setup(self):
self.cmd = "shutdown"
class Ping(NoArgCmdTest):
"""Test a ping message."""
def setup(self):
self.cmd = "ping"
class Debuglevel(NoArgCmdTest):
"""Test a debuglevel message."""
def setup(self):
self.cmd = "debuglevel"
class OneArgCmdTest(comfychair.TestCase):
"""A test class that tests a command with one argument."""
def runtest(self):
self.require_root()
out = self.runcmd("smbcontrol self %s spottyfoot" % self.cmd)
out = self.runcmd("smbcontrol self %s" % self.cmd, expectedResult = 1)
class DrvUpgrade(OneArgCmdTest):
"""Test driver upgrade message."""
def setup(self):
self.cmd = "drvupgrade"
class CloseShare(OneArgCmdTest):
"""Test close share message."""
def setup(self):
self.cmd = "close-share"
class Debug(OneArgCmdTest):
"""Test a debug message."""
def setup(self):
self.cmd = "debug"
class PrintNotify(comfychair.TestCase):
"""Test print notification commands."""
def runtest(self):
# No subcommand
out = self.runcmd("smbcontrol self printnotify", expectedResult = 1)
self.assert_re_match("Must specify subcommand", out[1]);
# Invalid subcommand name
out = self.runcmd("smbcontrol self printnotify spottyfoot",
expectedResult = 1)
self.assert_re_match("Invalid subcommand", out[1]);
# Queue commands
for cmd in ["queuepause", "queueresume"]:
out = self.runcmd("smbcontrol self printnotify %s" % cmd,
expectedResult = 1)
self.assert_re_match("Usage:", out[1])
out = self.runcmd("smbcontrol self printnotify %s spottyfoot"
% cmd)
# Job commands
for cmd in ["jobpause", "jobresume", "jobdelete"]:
out = self.runcmd("smbcontrol self printnotify %s" % cmd,
expectedResult = 1)
self.assert_re_match("Usage:", out[1])
out = self.runcmd("smbcontrol self printnotify %s spottyfoot"
% cmd, expectedResult = 1)
self.assert_re_match("Usage:", out[1])
out = self.runcmd("smbcontrol self printnotify %s spottyfoot 123"
% cmd)
# Printer properties
out = self.runcmd("smbcontrol self printnotify printer",
expectedResult = 1)
self.assert_re_match("Usage", out[1])
out = self.runcmd("smbcontrol self printnotify printer spottyfoot",
expectedResult = 1)
self.assert_re_match("Usage", out[1])
for cmd in ["comment", "port", "driver"]:
out = self.runcmd("smbcontrol self printnotify printer spottyfoot "
"%s" % cmd, expectedResult = 1)
self.assert_re_match("Usage", out[1])
out = self.runcmd("smbcontrol self printnotify printer spottyfoot "
"%s value" % cmd)
class Profile(comfychair.TestCase):
"""Test setting the profiling level."""
def runtest(self):
self.require_root()
out = self.runcmd("smbcontrol self profile", expectedResult = 1)
self.assert_re_match("Usage", out[1])
out = self.runcmd("smbcontrol self profile spottyfoot",
expectedResult = 1)
self.assert_re_match("Unknown", out[1])
for cmd in ["off", "count", "on", "flush"]:
out = self.runcmd("smbcontrol self profile %s" % cmd)
class ProfileLevel(comfychair.TestCase):
"""Test requesting the current profiling level."""
def runtest(self):
self.require_root()
out = self.runcmd("smbcontrol self profilelevel spottyfoot",
expectedResult = 1)
self.assert_re_match("Usage", out[1])
out = self.runcmd("smbcontrol self profilelevel")
class TimeoutArg(comfychair.TestCase):
"""Test the --timeout argument."""
def runtest(self):
out = self.runcmd("smbcontrol --timeout 5 self noop")
out = self.runcmd("smbcontrol --timeout spottyfoot self noop",
expectedResult = 1)
class ConfigFileArg(comfychair.TestCase):
"""Test the --configfile argument."""
def runtest(self):
out = self.runcmd("smbcontrol --configfile /dev/null self noop")
class BogusArg(comfychair.TestCase):
"""Test a bogus command line argument."""
def runtest(self):
out = self.runcmd("smbcontrol --bogus self noop", expectedResult = 1)
tests = [NoArgs, OneArg, SmbdDest, NmbdDest, WinbinddDest, PidDest,
SelfDest, BadDest, BadCmd, Debug, ForceElection, SamSync,
SamRepl, DmallocMark, DmallocChanged, Shutdown, DrvUpgrade,
CloseShare, Ping, Debuglevel, PrintNotify, Profile, ProfileLevel,
TimeoutArg, ConfigFileArg, BogusArg]
# Handle execution of this file as a main program
if __name__ == '__main__':
comfychair.main(tests)
| gpl-2.0 |
yasn77/whitepy | whitepy/lexer.py | 1 | 2383 | from .lexerconstants import CHAR_MAP, HAS_ARGS, IMP_CONST, \
STACK_MANIPULATION_CONST, ARITHMETIC_CONST, HEAP_ACCESS_CONST, \
FLOW_CONTROL_CONST, IO_CONST, NUM_CONST
from .ws_token import Tokeniser
class IntError(ValueError):
'''Exception when invalid integer is found'''
class Lexer(object):
def __init__(self, line, debug=False):
self.line = self._strip_non_ws(line)
self.pos = 0
self.tokens = [[]]
self.debug = debug
def _strip_non_ws(self, line):
# remove all characters not defined in CHAR_MAP
return ''.join([i for i in line if i in CHAR_MAP.values()])
def _get_const(self, token_type):
const = None
if token_type == 'STACK_MANIPULATION':
const = STACK_MANIPULATION_CONST
elif token_type == 'ARITHMETIC':
const = ARITHMETIC_CONST
elif token_type == 'HEAP_ACCESS':
const = HEAP_ACCESS_CONST
elif token_type == 'FLOW_CONTROL':
const = FLOW_CONTROL_CONST
elif token_type == 'IO':
const = IO_CONST
elif token_type == 'NUM':
const = NUM_CONST
return const
def _get_int(self, t):
token = Tokeniser(debug=self.debug)
const = 'SIGNED_INT' if t == 'PUSH' else 'LABEL'
token.scan(self.line, self.pos, const)
return token
def _get_token(self, const):
token = Tokeniser(debug=self.debug)
token.scan(self.line, self.pos, const)
return token
def get_all_tokens(self):
while self.pos < len(self.line):
req_tokens = 2
const = IMP_CONST if len(self.tokens[-1]) == 0 else \
self._get_const(self.tokens[-1][0].type)
token = self._get_token(const)
self.pos = self.pos + len(token.value)
self.tokens[-1].append(token)
if token.type in HAS_ARGS:
self.tokens[-1].append(self._get_int(token.type))
# Add additional one to cater for lf at the end of int
self.pos = self.pos + len(self.tokens[-1][-1].value) + 1
# Increment the need number of tokens for the list
req_tokens += 1
if len(self.tokens[-1]) == req_tokens:
self.tokens.append([])
# Remove empty token list at the end
del self.tokens[-1]
| apache-2.0 |
Denisolt/IEEE-NYIT-MA | local/lib/python2.7/site-packages/unidecode/x081.py | 252 | 4673 | data = (
'Cheng ', # 0x00
'Tiao ', # 0x01
'Zhi ', # 0x02
'Cui ', # 0x03
'Mei ', # 0x04
'Xie ', # 0x05
'Cui ', # 0x06
'Xie ', # 0x07
'Mo ', # 0x08
'Mai ', # 0x09
'Ji ', # 0x0a
'Obiyaakasu ', # 0x0b
'[?] ', # 0x0c
'Kuai ', # 0x0d
'Sa ', # 0x0e
'Zang ', # 0x0f
'Qi ', # 0x10
'Nao ', # 0x11
'Mi ', # 0x12
'Nong ', # 0x13
'Luan ', # 0x14
'Wan ', # 0x15
'Bo ', # 0x16
'Wen ', # 0x17
'Guan ', # 0x18
'Qiu ', # 0x19
'Jiao ', # 0x1a
'Jing ', # 0x1b
'Rou ', # 0x1c
'Heng ', # 0x1d
'Cuo ', # 0x1e
'Lie ', # 0x1f
'Shan ', # 0x20
'Ting ', # 0x21
'Mei ', # 0x22
'Chun ', # 0x23
'Shen ', # 0x24
'Xie ', # 0x25
'De ', # 0x26
'Zui ', # 0x27
'Cu ', # 0x28
'Xiu ', # 0x29
'Xin ', # 0x2a
'Tuo ', # 0x2b
'Pao ', # 0x2c
'Cheng ', # 0x2d
'Nei ', # 0x2e
'Fu ', # 0x2f
'Dou ', # 0x30
'Tuo ', # 0x31
'Niao ', # 0x32
'Noy ', # 0x33
'Pi ', # 0x34
'Gu ', # 0x35
'Gua ', # 0x36
'Li ', # 0x37
'Lian ', # 0x38
'Zhang ', # 0x39
'Cui ', # 0x3a
'Jie ', # 0x3b
'Liang ', # 0x3c
'Zhou ', # 0x3d
'Pi ', # 0x3e
'Biao ', # 0x3f
'Lun ', # 0x40
'Pian ', # 0x41
'Guo ', # 0x42
'Kui ', # 0x43
'Chui ', # 0x44
'Dan ', # 0x45
'Tian ', # 0x46
'Nei ', # 0x47
'Jing ', # 0x48
'Jie ', # 0x49
'La ', # 0x4a
'Yi ', # 0x4b
'An ', # 0x4c
'Ren ', # 0x4d
'Shen ', # 0x4e
'Chuo ', # 0x4f
'Fu ', # 0x50
'Fu ', # 0x51
'Ju ', # 0x52
'Fei ', # 0x53
'Qiang ', # 0x54
'Wan ', # 0x55
'Dong ', # 0x56
'Pi ', # 0x57
'Guo ', # 0x58
'Zong ', # 0x59
'Ding ', # 0x5a
'Wu ', # 0x5b
'Mei ', # 0x5c
'Ruan ', # 0x5d
'Zhuan ', # 0x5e
'Zhi ', # 0x5f
'Cou ', # 0x60
'Gua ', # 0x61
'Ou ', # 0x62
'Di ', # 0x63
'An ', # 0x64
'Xing ', # 0x65
'Nao ', # 0x66
'Yu ', # 0x67
'Chuan ', # 0x68
'Nan ', # 0x69
'Yun ', # 0x6a
'Zhong ', # 0x6b
'Rou ', # 0x6c
'E ', # 0x6d
'Sai ', # 0x6e
'Tu ', # 0x6f
'Yao ', # 0x70
'Jian ', # 0x71
'Wei ', # 0x72
'Jiao ', # 0x73
'Yu ', # 0x74
'Jia ', # 0x75
'Duan ', # 0x76
'Bi ', # 0x77
'Chang ', # 0x78
'Fu ', # 0x79
'Xian ', # 0x7a
'Ni ', # 0x7b
'Mian ', # 0x7c
'Wa ', # 0x7d
'Teng ', # 0x7e
'Tui ', # 0x7f
'Bang ', # 0x80
'Qian ', # 0x81
'Lu ', # 0x82
'Wa ', # 0x83
'Sou ', # 0x84
'Tang ', # 0x85
'Su ', # 0x86
'Zhui ', # 0x87
'Ge ', # 0x88
'Yi ', # 0x89
'Bo ', # 0x8a
'Liao ', # 0x8b
'Ji ', # 0x8c
'Pi ', # 0x8d
'Xie ', # 0x8e
'Gao ', # 0x8f
'Lu ', # 0x90
'Bin ', # 0x91
'Ou ', # 0x92
'Chang ', # 0x93
'Lu ', # 0x94
'Guo ', # 0x95
'Pang ', # 0x96
'Chuai ', # 0x97
'Piao ', # 0x98
'Jiang ', # 0x99
'Fu ', # 0x9a
'Tang ', # 0x9b
'Mo ', # 0x9c
'Xi ', # 0x9d
'Zhuan ', # 0x9e
'Lu ', # 0x9f
'Jiao ', # 0xa0
'Ying ', # 0xa1
'Lu ', # 0xa2
'Zhi ', # 0xa3
'Tara ', # 0xa4
'Chun ', # 0xa5
'Lian ', # 0xa6
'Tong ', # 0xa7
'Peng ', # 0xa8
'Ni ', # 0xa9
'Zha ', # 0xaa
'Liao ', # 0xab
'Cui ', # 0xac
'Gui ', # 0xad
'Xiao ', # 0xae
'Teng ', # 0xaf
'Fan ', # 0xb0
'Zhi ', # 0xb1
'Jiao ', # 0xb2
'Shan ', # 0xb3
'Wu ', # 0xb4
'Cui ', # 0xb5
'Run ', # 0xb6
'Xiang ', # 0xb7
'Sui ', # 0xb8
'Fen ', # 0xb9
'Ying ', # 0xba
'Tan ', # 0xbb
'Zhua ', # 0xbc
'Dan ', # 0xbd
'Kuai ', # 0xbe
'Nong ', # 0xbf
'Tun ', # 0xc0
'Lian ', # 0xc1
'Bi ', # 0xc2
'Yong ', # 0xc3
'Jue ', # 0xc4
'Chu ', # 0xc5
'Yi ', # 0xc6
'Juan ', # 0xc7
'La ', # 0xc8
'Lian ', # 0xc9
'Sao ', # 0xca
'Tun ', # 0xcb
'Gu ', # 0xcc
'Qi ', # 0xcd
'Cui ', # 0xce
'Bin ', # 0xcf
'Xun ', # 0xd0
'Ru ', # 0xd1
'Huo ', # 0xd2
'Zang ', # 0xd3
'Xian ', # 0xd4
'Biao ', # 0xd5
'Xing ', # 0xd6
'Kuan ', # 0xd7
'La ', # 0xd8
'Yan ', # 0xd9
'Lu ', # 0xda
'Huo ', # 0xdb
'Zang ', # 0xdc
'Luo ', # 0xdd
'Qu ', # 0xde
'Zang ', # 0xdf
'Luan ', # 0xe0
'Ni ', # 0xe1
'Zang ', # 0xe2
'Chen ', # 0xe3
'Qian ', # 0xe4
'Wo ', # 0xe5
'Guang ', # 0xe6
'Zang ', # 0xe7
'Lin ', # 0xe8
'Guang ', # 0xe9
'Zi ', # 0xea
'Jiao ', # 0xeb
'Nie ', # 0xec
'Chou ', # 0xed
'Ji ', # 0xee
'Gao ', # 0xef
'Chou ', # 0xf0
'Mian ', # 0xf1
'Nie ', # 0xf2
'Zhi ', # 0xf3
'Zhi ', # 0xf4
'Ge ', # 0xf5
'Jian ', # 0xf6
'Die ', # 0xf7
'Zhi ', # 0xf8
'Xiu ', # 0xf9
'Tai ', # 0xfa
'Zhen ', # 0xfb
'Jiu ', # 0xfc
'Xian ', # 0xfd
'Yu ', # 0xfe
'Cha ', # 0xff
)
| gpl-3.0 |
gpocentek/python-gitlab | gitlab/client.py | 1 | 35035 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2017 Gauvain Pocentek <gauvain@pocentek.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Wrapper for the GitLab API."""
import time
from typing import cast, Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union
import requests
import requests.utils
import gitlab.config
import gitlab.const
import gitlab.exceptions
from gitlab import utils
from requests_toolbelt.multipart.encoder import MultipartEncoder # type: ignore
REDIRECT_MSG = (
"python-gitlab detected an http to https redirection. You "
"must update your GitLab URL to use https:// to avoid issues."
)
class Gitlab(object):
"""Represents a GitLab server connection.
Args:
url (str): The URL of the GitLab server.
private_token (str): The user private token
oauth_token (str): An oauth token
job_token (str): A CI job token
ssl_verify (bool|str): Whether SSL certificates should be validated. If
the value is a string, it is the path to a CA file used for
certificate validation.
timeout (float): Timeout to use for requests to the GitLab server.
http_username (str): Username for HTTP authentication
http_password (str): Password for HTTP authentication
api_version (str): Gitlab API version to use (support for 4 only)
pagination (str): Can be set to 'keyset' to use keyset pagination
order_by (str): Set order_by globally
user_agent (str): A custom user agent to use for making HTTP requests.
"""
def __init__(
self,
url: str,
private_token: Optional[str] = None,
oauth_token: Optional[str] = None,
job_token: Optional[str] = None,
ssl_verify: Union[bool, str] = True,
http_username: Optional[str] = None,
http_password: Optional[str] = None,
timeout: Optional[float] = None,
api_version: str = "4",
session: Optional[requests.Session] = None,
per_page: Optional[int] = None,
pagination: Optional[str] = None,
order_by: Optional[str] = None,
user_agent: str = gitlab.const.USER_AGENT,
) -> None:
self._api_version = str(api_version)
self._server_version: Optional[str] = None
self._server_revision: Optional[str] = None
self._base_url = url.rstrip("/")
self._url = "%s/api/v%s" % (self._base_url, api_version)
#: Timeout to use for requests to gitlab server
self.timeout = timeout
#: Headers that will be used in request to GitLab
self.headers = {"User-Agent": user_agent}
#: Whether SSL certificates should be validated
self.ssl_verify = ssl_verify
self.private_token = private_token
self.http_username = http_username
self.http_password = http_password
self.oauth_token = oauth_token
self.job_token = job_token
self._set_auth_info()
#: Create a session object for requests
self.session = session or requests.Session()
self.per_page = per_page
self.pagination = pagination
self.order_by = order_by
# We only support v4 API at this time
if self._api_version not in ("4",):
raise ModuleNotFoundError(name="gitlab.v%s.objects" % self._api_version)
# NOTE: We must delay import of gitlab.v4.objects until now or
# otherwise it will cause circular import errors
import gitlab.v4.objects
objects = gitlab.v4.objects
self._objects = objects
self.broadcastmessages = objects.BroadcastMessageManager(self)
self.deploykeys = objects.DeployKeyManager(self)
self.deploytokens = objects.DeployTokenManager(self)
self.geonodes = objects.GeoNodeManager(self)
self.gitlabciymls = objects.GitlabciymlManager(self)
self.gitignores = objects.GitignoreManager(self)
self.groups = objects.GroupManager(self)
self.hooks = objects.HookManager(self)
self.issues = objects.IssueManager(self)
self.ldapgroups = objects.LDAPGroupManager(self)
self.licenses = objects.LicenseManager(self)
self.namespaces = objects.NamespaceManager(self)
self.mergerequests = objects.MergeRequestManager(self)
self.notificationsettings = objects.NotificationSettingsManager(self)
self.projects = objects.ProjectManager(self)
self.runners = objects.RunnerManager(self)
self.settings = objects.ApplicationSettingsManager(self)
self.appearance = objects.ApplicationAppearanceManager(self)
self.sidekiq = objects.SidekiqManager(self)
self.snippets = objects.SnippetManager(self)
self.users = objects.UserManager(self)
self.todos = objects.TodoManager(self)
self.dockerfiles = objects.DockerfileManager(self)
self.events = objects.EventManager(self)
self.audit_events = objects.AuditEventManager(self)
self.features = objects.FeatureManager(self)
self.pagesdomains = objects.PagesDomainManager(self)
self.user_activities = objects.UserActivitiesManager(self)
self.applications = objects.ApplicationManager(self)
self.variables = objects.VariableManager(self)
self.personal_access_tokens = objects.PersonalAccessTokenManager(self)
def __enter__(self) -> "Gitlab":
return self
def __exit__(self, *args: Any) -> None:
self.session.close()
def __getstate__(self) -> Dict[str, Any]:
state = self.__dict__.copy()
state.pop("_objects")
return state
def __setstate__(self, state: Dict[str, Any]) -> None:
self.__dict__.update(state)
# We only support v4 API at this time
if self._api_version not in ("4",):
raise ModuleNotFoundError(name="gitlab.v%s.objects" % self._api_version)
# NOTE: We must delay import of gitlab.v4.objects until now or
# otherwise it will cause circular import errors
import gitlab.v4.objects
self._objects = gitlab.v4.objects
@property
def url(self) -> str:
"""The user-provided server URL."""
return self._base_url
@property
def api_url(self) -> str:
"""The computed API base URL."""
return self._url
@property
def api_version(self) -> str:
"""The API version used (4 only)."""
return self._api_version
@classmethod
def from_config(
cls, gitlab_id: Optional[str] = None, config_files: Optional[List[str]] = None
) -> "Gitlab":
"""Create a Gitlab connection from configuration files.
Args:
gitlab_id (str): ID of the configuration section.
config_files list[str]: List of paths to configuration files.
Returns:
(gitlab.Gitlab): A Gitlab connection.
Raises:
gitlab.config.GitlabDataError: If the configuration is not correct.
"""
config = gitlab.config.GitlabConfigParser(
gitlab_id=gitlab_id, config_files=config_files
)
return cls(
config.url,
private_token=config.private_token,
oauth_token=config.oauth_token,
job_token=config.job_token,
ssl_verify=config.ssl_verify,
timeout=config.timeout,
http_username=config.http_username,
http_password=config.http_password,
api_version=config.api_version,
per_page=config.per_page,
pagination=config.pagination,
order_by=config.order_by,
user_agent=config.user_agent,
)
def auth(self) -> None:
"""Performs an authentication using private token.
The `user` attribute will hold a `gitlab.objects.CurrentUser` object on
success.
"""
self.user = self._objects.CurrentUserManager(self).get()
def version(self) -> Tuple[str, str]:
"""Returns the version and revision of the gitlab server.
Note that self.version and self.revision will be set on the gitlab
object.
Returns:
tuple (str, str): The server version and server revision.
('unknown', 'unknwown') if the server doesn't
perform as expected.
"""
if self._server_version is None:
try:
data = self.http_get("/version")
if isinstance(data, dict):
self._server_version = data["version"]
self._server_revision = data["revision"]
else:
self._server_version = "unknown"
self._server_revision = "unknown"
except Exception:
self._server_version = "unknown"
self._server_revision = "unknown"
return cast(str, self._server_version), cast(str, self._server_revision)
@gitlab.exceptions.on_http_error(gitlab.exceptions.GitlabVerifyError)
def lint(self, content: str, **kwargs: Any) -> Tuple[bool, List[str]]:
"""Validate a gitlab CI configuration.
Args:
content (txt): The .gitlab-ci.yml content
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabVerifyError: If the validation could not be done
Returns:
tuple: (True, []) if the file is valid, (False, errors(list))
otherwise
"""
post_data = {"content": content}
data = self.http_post("/ci/lint", post_data=post_data, **kwargs)
if TYPE_CHECKING:
assert not isinstance(data, requests.Response)
return (data["status"] == "valid", data["errors"])
@gitlab.exceptions.on_http_error(gitlab.exceptions.GitlabMarkdownError)
def markdown(
self, text: str, gfm: bool = False, project: Optional[str] = None, **kwargs: Any
) -> str:
"""Render an arbitrary Markdown document.
Args:
text (str): The markdown text to render
gfm (bool): Render text using GitLab Flavored Markdown. Default is
False
project (str): Full path of a project used a context when `gfm` is
True
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabMarkdownError: If the server cannot perform the request
Returns:
str: The HTML rendering of the markdown text.
"""
post_data = {"text": text, "gfm": gfm}
if project is not None:
post_data["project"] = project
data = self.http_post("/markdown", post_data=post_data, **kwargs)
if TYPE_CHECKING:
assert not isinstance(data, requests.Response)
return data["html"]
@gitlab.exceptions.on_http_error(gitlab.exceptions.GitlabLicenseError)
def get_license(self, **kwargs: Any) -> Dict[str, Any]:
"""Retrieve information about the current license.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server cannot perform the request
Returns:
dict: The current license information
"""
result = self.http_get("/license", **kwargs)
if isinstance(result, dict):
return result
return {}
@gitlab.exceptions.on_http_error(gitlab.exceptions.GitlabLicenseError)
def set_license(self, license: str, **kwargs: Any) -> Dict[str, Any]:
"""Add a new license.
Args:
license (str): The license string
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabPostError: If the server cannot perform the request
Returns:
dict: The new license information
"""
data = {"license": license}
result = self.http_post("/license", post_data=data, **kwargs)
if TYPE_CHECKING:
assert not isinstance(result, requests.Response)
return result
def _set_auth_info(self) -> None:
tokens = [
token
for token in [self.private_token, self.oauth_token, self.job_token]
if token
]
if len(tokens) > 1:
raise ValueError(
"Only one of private_token, oauth_token or job_token should "
"be defined"
)
if (self.http_username and not self.http_password) or (
not self.http_username and self.http_password
):
raise ValueError(
"Both http_username and http_password should " "be defined"
)
if self.oauth_token and self.http_username:
raise ValueError(
"Only one of oauth authentication or http "
"authentication should be defined"
)
self._http_auth = None
if self.private_token:
self.headers.pop("Authorization", None)
self.headers["PRIVATE-TOKEN"] = self.private_token
self.headers.pop("JOB-TOKEN", None)
if self.oauth_token:
self.headers["Authorization"] = "Bearer %s" % self.oauth_token
self.headers.pop("PRIVATE-TOKEN", None)
self.headers.pop("JOB-TOKEN", None)
if self.job_token:
self.headers.pop("Authorization", None)
self.headers.pop("PRIVATE-TOKEN", None)
self.headers["JOB-TOKEN"] = self.job_token
if self.http_username:
self._http_auth = requests.auth.HTTPBasicAuth(
self.http_username, self.http_password
)
def enable_debug(self) -> None:
import logging
from http.client import HTTPConnection # noqa
HTTPConnection.debuglevel = 1 # type: ignore
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
def _create_headers(self, content_type: Optional[str] = None) -> Dict[str, Any]:
request_headers = self.headers.copy()
if content_type is not None:
request_headers["Content-type"] = content_type
return request_headers
def _get_session_opts(self, content_type: str) -> Dict[str, Any]:
return {
"headers": self._create_headers(content_type),
"auth": self._http_auth,
"timeout": self.timeout,
"verify": self.ssl_verify,
}
def _build_url(self, path: str) -> str:
"""Returns the full url from path.
If path is already a url, return it unchanged. If it's a path, append
it to the stored url.
Returns:
str: The full URL
"""
if path.startswith("http://") or path.startswith("https://"):
return path
else:
return "%s%s" % (self._url, path)
def _check_redirects(self, result: requests.Response) -> None:
# Check the requests history to detect http to https redirections.
# If the initial verb is POST, the next request will use a GET request,
# leading to an unwanted behaviour.
# If the initial verb is PUT, the data will not be send with the next
# request.
# If we detect a redirection to https with a POST or a PUT request, we
# raise an exception with a useful error message.
if result.history and self._base_url.startswith("http:"):
for item in result.history:
if item.status_code not in (301, 302):
continue
# GET methods can be redirected without issue
if item.request.method == "GET":
continue
# Did we end-up with an https:// URL?
location = item.headers.get("Location", None)
if location and location.startswith("https://"):
raise gitlab.exceptions.RedirectError(REDIRECT_MSG)
def http_request(
self,
verb: str,
path: str,
query_data: Optional[Dict[str, Any]] = None,
post_data: Optional[Dict[str, Any]] = None,
streamed: bool = False,
files: Optional[Dict[str, Any]] = None,
timeout: Optional[float] = None,
obey_rate_limit: bool = True,
retry_transient_errors: bool = False,
max_retries: int = 10,
**kwargs: Any,
) -> requests.Response:
"""Make an HTTP request to the Gitlab server.
Args:
verb (str): The HTTP method to call ('get', 'post', 'put',
'delete')
path (str): Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
query_data (dict): Data to send as query parameters
post_data (dict): Data to send in the body (will be converted to
json)
streamed (bool): Whether the data should be streamed
files (dict): The files to send to the server
timeout (float): The timeout, in seconds, for the request
obey_rate_limit (bool): Whether to obey 429 Too Many Request
responses. Defaults to True.
retry_transient_errors (bool): Whether to retry after 500, 502,
503, or 504 responses. Defaults
to False.
max_retries (int): Max retries after 429 or transient errors,
set to -1 to retry forever. Defaults to 10.
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
A requests result object.
Raises:
GitlabHttpError: When the return code is not 2xx
"""
query_data = query_data or {}
url = self._build_url(path)
params: Dict[str, Any] = {}
utils.copy_dict(params, query_data)
# Deal with kwargs: by default a user uses kwargs to send data to the
# gitlab server, but this generates problems (python keyword conflicts
# and python-gitlab/gitlab conflicts).
# So we provide a `query_parameters` key: if it's there we use its dict
# value as arguments for the gitlab server, and ignore the other
# arguments, except pagination ones (per_page and page)
if "query_parameters" in kwargs:
utils.copy_dict(params, kwargs["query_parameters"])
for arg in ("per_page", "page"):
if arg in kwargs:
params[arg] = kwargs[arg]
else:
utils.copy_dict(params, kwargs)
opts = self._get_session_opts(content_type="application/json")
verify = opts.pop("verify")
opts_timeout = opts.pop("timeout")
# If timeout was passed into kwargs, allow it to override the default
if timeout is None:
timeout = opts_timeout
# We need to deal with json vs. data when uploading files
if files:
json = None
if post_data is None:
post_data = {}
else:
# booleans does not exists for data (neither for MultipartEncoder):
# cast to string int to avoid: 'bool' object has no attribute 'encode'
for k, v in post_data.items():
if isinstance(v, bool):
post_data[k] = str(int(v))
post_data["file"] = files.get("file")
post_data["avatar"] = files.get("avatar")
data = MultipartEncoder(post_data)
opts["headers"]["Content-type"] = data.content_type
else:
json = post_data
data = None
# Requests assumes that `.` should not be encoded as %2E and will make
# changes to urls using this encoding. Using a prepped request we can
# get the desired behavior.
# The Requests behavior is right but it seems that web servers don't
# always agree with this decision (this is the case with a default
# gitlab installation)
req = requests.Request(verb, url, json=json, data=data, params=params, **opts)
prepped = self.session.prepare_request(req)
prepped.url = utils.sanitized_url(prepped.url)
settings = self.session.merge_environment_settings(
prepped.url, {}, streamed, verify, None
)
cur_retries = 0
while True:
result = self.session.send(prepped, timeout=timeout, **settings)
self._check_redirects(result)
if 200 <= result.status_code < 300:
return result
if (429 == result.status_code and obey_rate_limit) or (
result.status_code in [500, 502, 503, 504] and retry_transient_errors
):
if max_retries == -1 or cur_retries < max_retries:
wait_time = 2 ** cur_retries * 0.1
if "Retry-After" in result.headers:
wait_time = int(result.headers["Retry-After"])
cur_retries += 1
time.sleep(wait_time)
continue
error_message = result.content
try:
error_json = result.json()
for k in ("message", "error"):
if k in error_json:
error_message = error_json[k]
except (KeyError, ValueError, TypeError):
pass
if result.status_code == 401:
raise gitlab.exceptions.GitlabAuthenticationError(
response_code=result.status_code,
error_message=error_message,
response_body=result.content,
)
raise gitlab.exceptions.GitlabHttpError(
response_code=result.status_code,
error_message=error_message,
response_body=result.content,
)
def http_get(
self,
path: str,
query_data: Optional[Dict[str, Any]] = None,
streamed: bool = False,
raw: bool = False,
**kwargs: Any,
) -> Union[Dict[str, Any], requests.Response]:
"""Make a GET request to the Gitlab server.
Args:
path (str): Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
query_data (dict): Data to send as query parameters
streamed (bool): Whether the data should be streamed
raw (bool): If True do not try to parse the output as json
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
A requests result object is streamed is True or the content type is
not json.
The parsed json data otherwise.
Raises:
GitlabHttpError: When the return code is not 2xx
GitlabParsingError: If the json data could not be parsed
"""
query_data = query_data or {}
result = self.http_request(
"get", path, query_data=query_data, streamed=streamed, **kwargs
)
if (
result.headers["Content-Type"] == "application/json"
and not streamed
and not raw
):
try:
return result.json()
except Exception as e:
raise gitlab.exceptions.GitlabParsingError(
error_message="Failed to parse the server message"
) from e
else:
return result
def http_list(
self,
path: str,
query_data: Optional[Dict[str, Any]] = None,
as_list: Optional[bool] = None,
**kwargs: Any,
) -> Union["GitlabList", List[Dict[str, Any]]]:
"""Make a GET request to the Gitlab server for list-oriented queries.
Args:
path (str): Path or full URL to query ('/projects' or
'http://whatever/v4/api/projects')
query_data (dict): Data to send as query parameters
**kwargs: Extra options to send to the server (e.g. sudo, page,
per_page)
Returns:
list: A list of the objects returned by the server. If `as_list` is
False and no pagination-related arguments (`page`, `per_page`,
`all`) are defined then a GitlabList object (generator) is returned
instead. This object will make API calls when needed to fetch the
next items from the server.
Raises:
GitlabHttpError: When the return code is not 2xx
GitlabParsingError: If the json data could not be parsed
"""
query_data = query_data or {}
# In case we want to change the default behavior at some point
as_list = True if as_list is None else as_list
get_all = kwargs.pop("all", False)
url = self._build_url(path)
page = kwargs.get("page")
if get_all is True and as_list is True:
return list(GitlabList(self, url, query_data, **kwargs))
if page or as_list is True:
# pagination requested, we return a list
return list(GitlabList(self, url, query_data, get_next=False, **kwargs))
# No pagination, generator requested
return GitlabList(self, url, query_data, **kwargs)
def http_post(
self,
path: str,
query_data: Optional[Dict[str, Any]] = None,
post_data: Optional[Dict[str, Any]] = None,
files: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Union[Dict[str, Any], requests.Response]:
"""Make a POST request to the Gitlab server.
Args:
path (str): Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
query_data (dict): Data to send as query parameters
post_data (dict): Data to send in the body (will be converted to
json)
files (dict): The files to send to the server
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The parsed json returned by the server if json is return, else the
raw content
Raises:
GitlabHttpError: When the return code is not 2xx
GitlabParsingError: If the json data could not be parsed
"""
query_data = query_data or {}
post_data = post_data or {}
result = self.http_request(
"post",
path,
query_data=query_data,
post_data=post_data,
files=files,
**kwargs,
)
try:
if result.headers.get("Content-Type", None) == "application/json":
return result.json()
except Exception as e:
raise gitlab.exceptions.GitlabParsingError(
error_message="Failed to parse the server message"
) from e
return result
def http_put(
self,
path: str,
query_data: Optional[Dict[str, Any]] = None,
post_data: Optional[Dict[str, Any]] = None,
files: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Union[Dict[str, Any], requests.Response]:
"""Make a PUT request to the Gitlab server.
Args:
path (str): Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
query_data (dict): Data to send as query parameters
post_data (dict): Data to send in the body (will be converted to
json)
files (dict): The files to send to the server
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The parsed json returned by the server.
Raises:
GitlabHttpError: When the return code is not 2xx
GitlabParsingError: If the json data could not be parsed
"""
query_data = query_data or {}
post_data = post_data or {}
result = self.http_request(
"put",
path,
query_data=query_data,
post_data=post_data,
files=files,
**kwargs,
)
try:
return result.json()
except Exception as e:
raise gitlab.exceptions.GitlabParsingError(
error_message="Failed to parse the server message"
) from e
def http_delete(self, path: str, **kwargs: Any) -> requests.Response:
"""Make a PUT request to the Gitlab server.
Args:
path (str): Path or full URL to query ('/projects' or
'http://whatever/v4/api/projecs')
**kwargs: Extra options to send to the server (e.g. sudo)
Returns:
The requests object.
Raises:
GitlabHttpError: When the return code is not 2xx
"""
return self.http_request("delete", path, **kwargs)
@gitlab.exceptions.on_http_error(gitlab.exceptions.GitlabSearchError)
def search(
self, scope: str, search: str, **kwargs: Any
) -> Union["GitlabList", List[Dict[str, Any]]]:
"""Search GitLab resources matching the provided string.'
Args:
scope (str): Scope of the search
search (str): Search string
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabSearchError: If the server failed to perform the request
Returns:
GitlabList: A list of dicts describing the resources found.
"""
data = {"scope": scope, "search": search}
return self.http_list("/search", query_data=data, **kwargs)
class GitlabList(object):
"""Generator representing a list of remote objects.
The object handles the links returned by a query to the API, and will call
the API again when needed.
"""
def __init__(
self,
gl: Gitlab,
url: str,
query_data: Dict[str, Any],
get_next: bool = True,
**kwargs: Any,
) -> None:
self._gl = gl
# Preserve kwargs for subsequent queries
self._kwargs = kwargs.copy()
self._query(url, query_data, **self._kwargs)
self._get_next = get_next
# Remove query_parameters from kwargs, which are saved via the `next` URL
self._kwargs.pop("query_parameters", None)
def _query(
self, url: str, query_data: Optional[Dict[str, Any]] = None, **kwargs: Any
) -> None:
query_data = query_data or {}
result = self._gl.http_request("get", url, query_data=query_data, **kwargs)
try:
links = result.links
if links:
next_url = links["next"]["url"]
else:
next_url = requests.utils.parse_header_links(result.headers["links"])[
0
]["url"]
self._next_url = next_url
except KeyError:
self._next_url = None
self._current_page: Optional[Union[str, int]] = result.headers.get("X-Page")
self._prev_page: Optional[Union[str, int]] = result.headers.get("X-Prev-Page")
self._next_page: Optional[Union[str, int]] = result.headers.get("X-Next-Page")
self._per_page: Optional[Union[str, int]] = result.headers.get("X-Per-Page")
self._total_pages: Optional[Union[str, int]] = result.headers.get(
"X-Total-Pages"
)
self._total: Optional[Union[str, int]] = result.headers.get("X-Total")
try:
self._data: List[Dict[str, Any]] = result.json()
except Exception as e:
raise gitlab.exceptions.GitlabParsingError(
error_message="Failed to parse the server message"
) from e
self._current = 0
@property
def current_page(self) -> int:
"""The current page number."""
if TYPE_CHECKING:
assert self._current_page is not None
return int(self._current_page)
@property
def prev_page(self) -> Optional[int]:
"""The previous page number.
If None, the current page is the first.
"""
return int(self._prev_page) if self._prev_page else None
@property
def next_page(self) -> Optional[int]:
"""The next page number.
If None, the current page is the last.
"""
return int(self._next_page) if self._next_page else None
@property
def per_page(self) -> int:
"""The number of items per page."""
if TYPE_CHECKING:
assert self._per_page is not None
return int(self._per_page)
@property
def total_pages(self) -> int:
"""The total number of pages."""
if TYPE_CHECKING:
assert self._total_pages is not None
return int(self._total_pages)
@property
def total(self) -> int:
"""The total number of items."""
if TYPE_CHECKING:
assert self._total is not None
return int(self._total)
def __iter__(self) -> "GitlabList":
return self
def __len__(self) -> int:
if self._total is None:
return 0
return int(self._total)
def __next__(self) -> Dict[str, Any]:
return self.next()
def next(self) -> Dict[str, Any]:
try:
item = self._data[self._current]
self._current += 1
return item
except IndexError:
pass
if self._next_url and self._get_next is True:
self._query(self._next_url, **self._kwargs)
return self.next()
raise StopIteration
| lgpl-3.0 |
UniversalMasterEgg8679/ansible | lib/ansible/module_utils/docker_common.py | 21 | 18341 | #
# Copyright 2016 Red Hat | Ansible
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import json
import sys
import copy
from distutils.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule, BOOLEANS_TRUE, BOOLEANS_FALSE
from ansible.module_utils.six.moves.urllib.parse import urlparse
HAS_DOCKER_PY = True
HAS_DOCKER_PY_2 = False
HAS_DOCKER_ERROR = None
try:
from requests.exceptions import SSLError
from docker import __version__ as docker_version
from docker.errors import APIError, TLSParameterError, NotFound
from docker.tls import TLSConfig
from docker.constants import DEFAULT_TIMEOUT_SECONDS, DEFAULT_DOCKER_API_VERSION
from docker import auth
if LooseVersion(docker_version) >= LooseVersion('2.0.0'):
HAS_DOCKER_PY_2 = True
from docker import APIClient as Client
from docker.types import Ulimit, LogConfig
else:
from docker import Client
from docker.utils.types import Ulimit, LogConfig
except ImportError as exc:
HAS_DOCKER_ERROR = str(exc)
HAS_DOCKER_PY = False
DEFAULT_DOCKER_HOST = 'unix://var/run/docker.sock'
DEFAULT_TLS = False
DEFAULT_TLS_VERIFY = False
MIN_DOCKER_VERSION = "1.7.0"
DOCKER_COMMON_ARGS = dict(
docker_host=dict(type='str', aliases=['docker_url']),
tls_hostname=dict(type='str'),
api_version=dict(type='str', aliases=['docker_api_version']),
timeout=dict(type='int'),
cacert_path=dict(type='str', aliases=['tls_ca_cert']),
cert_path=dict(type='str', aliases=['tls_client_cert']),
key_path=dict(type='str', aliases=['tls_client_key']),
ssl_version=dict(type='str'),
tls=dict(type='bool'),
tls_verify=dict(type='bool'),
debug=dict(type='bool', default=False),
filter_logger=dict(type='bool', default=False),
)
DOCKER_MUTUALLY_EXCLUSIVE = [
['tls', 'tls_verify']
]
DOCKER_REQUIRED_TOGETHER = [
['cert_path', 'key_path']
]
DEFAULT_DOCKER_REGISTRY = 'https://index.docker.io/v1/'
EMAIL_REGEX = '[^@]+@[^@]+\.[^@]+'
BYTE_SUFFIXES = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
if not HAS_DOCKER_PY:
# No docker-py. Create a place holder client to allow
# instantiation of AnsibleModule and proper error handing
class Client(object):
def __init__(self, **kwargs):
pass
class DockerBaseClass(object):
def __init__(self):
self.debug = False
def log(self, msg, pretty_print=False):
pass
# if self.debug:
# log_file = open('docker.log', 'a')
# if pretty_print:
# log_file.write(json.dumps(msg, sort_keys=True, indent=4, separators=(',', ': ')))
# log_file.write(u'\n')
# else:
# log_file.write(msg + u'\n')
class AnsibleDockerClient(Client):
def __init__(self, argument_spec=None, supports_check_mode=False, mutually_exclusive=None,
required_together=None, required_if=None):
merged_arg_spec = dict()
merged_arg_spec.update(DOCKER_COMMON_ARGS)
if argument_spec:
merged_arg_spec.update(argument_spec)
self.arg_spec = merged_arg_spec
mutually_exclusive_params = []
mutually_exclusive_params += DOCKER_MUTUALLY_EXCLUSIVE
if mutually_exclusive:
mutually_exclusive_params += mutually_exclusive
required_together_params = []
required_together_params += DOCKER_REQUIRED_TOGETHER
if required_together:
required_together_params += required_together
self.module = AnsibleModule(
argument_spec=merged_arg_spec,
supports_check_mode=supports_check_mode,
mutually_exclusive=mutually_exclusive_params,
required_together=required_together_params,
required_if=required_if)
if not HAS_DOCKER_PY:
self.fail("Failed to import docker-py - %s. Try `pip install docker-py`" % HAS_DOCKER_ERROR)
if LooseVersion(docker_version) < LooseVersion(MIN_DOCKER_VERSION):
self.fail("Error: docker-py version is %s. Minimum version required is %s." % (docker_version,
MIN_DOCKER_VERSION))
self.debug = self.module.params.get('debug')
self.check_mode = self.module.check_mode
self._connect_params = self._get_connect_params()
try:
super(AnsibleDockerClient, self).__init__(**self._connect_params)
except APIError as exc:
self.fail("Docker API error: %s" % exc)
except Exception as exc:
self.fail("Error connecting: %s" % exc)
def log(self, msg, pretty_print=False):
pass
# if self.debug:
# log_file = open('docker.log', 'a')
# if pretty_print:
# log_file.write(json.dumps(msg, sort_keys=True, indent=4, separators=(',', ': ')))
# log_file.write(u'\n')
# else:
# log_file.write(msg + u'\n')
def fail(self, msg):
self.module.fail_json(msg=msg)
@staticmethod
def _get_value(param_name, param_value, env_variable, default_value):
if param_value is not None:
# take module parameter value
if param_value in BOOLEANS_TRUE:
return True
if param_value in BOOLEANS_FALSE:
return False
return param_value
if env_variable is not None:
env_value = os.environ.get(env_variable)
if env_value is not None:
# take the env variable value
if param_name == 'cert_path':
return os.path.join(env_value, 'cert.pem')
if param_name == 'cacert_path':
return os.path.join(env_value, 'ca.pem')
if param_name == 'key_path':
return os.path.join(env_value, 'key.pem')
if env_value in BOOLEANS_TRUE:
return True
if env_value in BOOLEANS_FALSE:
return False
return env_value
# take the default
return default_value
@property
def auth_params(self):
# Get authentication credentials.
# Precedence: module parameters-> environment variables-> defaults.
self.log('Getting credentials')
params = dict()
for key in DOCKER_COMMON_ARGS:
params[key] = self.module.params.get(key)
if self.module.params.get('use_tls'):
# support use_tls option in docker_image.py. This will be deprecated.
use_tls = self.module.params.get('use_tls')
if use_tls == 'encrypt':
params['tls'] = True
if use_tls == 'verify':
params['tls_verify'] = True
result = dict(
docker_host=self._get_value('docker_host', params['docker_host'], 'DOCKER_HOST',
DEFAULT_DOCKER_HOST),
tls_hostname=self._get_value('tls_hostname', params['tls_hostname'],
'DOCKER_TLS_HOSTNAME', 'localhost'),
api_version=self._get_value('api_version', params['api_version'], 'DOCKER_API_VERSION',
'auto'),
cacert_path=self._get_value('cacert_path', params['cacert_path'], 'DOCKER_CERT_PATH', None),
cert_path=self._get_value('cert_path', params['cert_path'], 'DOCKER_CERT_PATH', None),
key_path=self._get_value('key_path', params['key_path'], 'DOCKER_CERT_PATH', None),
ssl_version=self._get_value('ssl_version', params['ssl_version'], 'DOCKER_SSL_VERSION', None),
tls=self._get_value('tls', params['tls'], 'DOCKER_TLS', DEFAULT_TLS),
tls_verify=self._get_value('tls_verfy', params['tls_verify'], 'DOCKER_TLS_VERIFY',
DEFAULT_TLS_VERIFY),
timeout=self._get_value('timeout', params['timeout'], 'DOCKER_TIMEOUT',
DEFAULT_TIMEOUT_SECONDS),
)
if result['tls_hostname'] is None:
# get default machine name from the url
parsed_url = urlparse(result['docker_host'])
if ':' in parsed_url.netloc:
result['tls_hostname'] = parsed_url.netloc[:parsed_url.netloc.rindex(':')]
else:
result['tls_hostname'] = parsed_url
return result
def _get_tls_config(self, **kwargs):
self.log("get_tls_config:")
for key in kwargs:
self.log(" %s: %s" % (key, kwargs[key]))
try:
tls_config = TLSConfig(**kwargs)
return tls_config
except TLSParameterError as exc:
self.fail("TLS config error: %s" % exc)
def _get_connect_params(self):
auth = self.auth_params
self.log("connection params:")
for key in auth:
self.log(" %s: %s" % (key, auth[key]))
if auth['tls'] or auth['tls_verify']:
auth['docker_host'] = auth['docker_host'].replace('tcp://', 'https://')
if auth['tls'] and auth['cert_path'] and auth['key_path']:
# TLS with certs and no host verification
tls_config = self._get_tls_config(client_cert=(auth['cert_path'], auth['key_path']),
verify=False,
ssl_version=auth['ssl_version'])
return dict(base_url=auth['docker_host'],
tls=tls_config,
version=auth['api_version'],
timeout=auth['timeout'])
if auth['tls']:
# TLS with no certs and not host verification
tls_config = self._get_tls_config(verify=False,
ssl_version=auth['ssl_version'])
return dict(base_url=auth['docker_host'],
tls=tls_config,
version=auth['api_version'],
timeout=auth['timeout'])
if auth['tls_verify'] and auth['cert_path'] and auth['key_path']:
# TLS with certs and host verification
if auth['cacert_path']:
tls_config = self._get_tls_config(client_cert=(auth['cert_path'], auth['key_path']),
ca_cert=auth['cacert_path'],
verify=True,
assert_hostname=auth['tls_hostname'],
ssl_version=auth['ssl_version'])
else:
tls_config = self._get_tls_config(client_cert=(auth['cert_path'], auth['key_path']),
verify=True,
assert_hostname=auth['tls_hostname'],
ssl_version=auth['ssl_version'])
return dict(base_url=auth['docker_host'],
tls=tls_config,
version=auth['api_version'],
timeout=auth['timeout'])
if auth['tls_verify'] and auth['cacert_path']:
# TLS with cacert only
tls_config = self._get_tls_config(ca_cert=auth['cacert_path'],
assert_hostname=auth['tls_hostname'],
verify=True,
ssl_version=auth['ssl_version'])
return dict(base_url=auth['docker_host'],
tls=tls_config,
version=auth['api_version'],
timeout=auth['timeout'])
if auth['tls_verify']:
# TLS with verify and no certs
tls_config = self._get_tls_config(verify=True,
assert_hostname=auth['tls_hostname'],
ssl_version=auth['ssl_version'])
return dict(base_url=auth['docker_host'],
tls=tls_config,
version=auth['api_version'],
timeout=auth['timeout'])
# No TLS
return dict(base_url=auth['docker_host'],
version=auth['api_version'],
timeout=auth['timeout'])
def _handle_ssl_error(self, error):
match = re.match(r"hostname.*doesn\'t match (\'.*\')", str(error))
if match:
msg = "You asked for verification that Docker host name matches %s. The actual hostname is %s. " \
"Most likely you need to set DOCKER_TLS_HOSTNAME or pass tls_hostname with a value of %s. " \
"You may also use TLS without verification by setting the tls parameter to true." \
% (self.auth_params['tls_hostname'], match.group(1), match.group(1))
self.fail(msg)
self.fail("SSL Exception: %s" % (error))
def get_container(self, name=None):
'''
Lookup a container and return the inspection results.
'''
if name is None:
return None
search_name = name
if not name.startswith('/'):
search_name = '/' + name
result = None
try:
for container in self.containers(all=True):
self.log("testing container: %s" % (container['Names']))
if isinstance(container['Names'], list) and search_name in container['Names']:
result = container
break
if container['Id'].startswith(name):
result = container
break
if container['Id'] == name:
result = container
break
except SSLError as exc:
self._handle_ssl_error(exc)
except Exception as exc:
self.fail("Error retrieving container list: %s" % exc)
if result is not None:
try:
self.log("Inspecting container Id %s" % result['Id'])
result = self.inspect_container(container=result['Id'])
self.log("Completed container inspection")
except Exception as exc:
self.fail("Error inspecting container: %s" % exc)
return result
def find_image(self, name, tag):
'''
Lookup an image and return the inspection results.
'''
if not name:
return None
self.log("Find image %s:%s" % (name, tag))
images = self._image_lookup(name, tag)
if len(images) == 0:
# In API <= 1.20 seeing 'docker.io/<name>' as the name of images pulled from docker hub
registry, repo_name = auth.resolve_repository_name(name)
if registry == 'docker.io':
# the name does not contain a registry, so let's see if docker.io works
lookup = "docker.io/%s" % name
self.log("Check for docker.io image: %s" % lookup)
images = self._image_lookup(lookup, tag)
if len(images) > 1:
self.fail("Registry returned more than one result for %s:%s" % (name, tag))
if len(images) == 1:
try:
inspection = self.inspect_image(images[0]['Id'])
except Exception as exc:
self.fail("Error inspecting image %s:%s - %s" % (name, tag, str(exc)))
return inspection
self.log("Image %s:%s not found." % (name, tag))
return None
def _image_lookup(self, name, tag):
'''
Including a tag in the name parameter sent to the docker-py images method does not
work consistently. Instead, get the result set for name and manually check if the tag
exists.
'''
try:
response = self.images(name=name)
except Exception as exc:
self.fail("Error searching for image %s - %s" % (name, str(exc)))
images = response
if tag:
lookup = "%s:%s" % (name, tag)
images = []
for image in response:
tags = image.get('RepoTags')
if tags and lookup in tags:
images = [image]
break
return images
def pull_image(self, name, tag="latest"):
'''
Pull an image
'''
self.log("Pulling image %s:%s" % (name, tag))
alreadyToLatest = False
try:
for line in self.pull(name, tag=tag, stream=True, decode=True):
self.log(line, pretty_print=True)
if line.get('status'):
if line.get('status').startswith('Status: Image is up to date for'):
alreadyToLatest = True
if line.get('error'):
if line.get('errorDetail'):
error_detail = line.get('errorDetail')
self.fail("Error pulling %s - code: %s message: %s" % (name,
error_detail.get('code'),
error_detail.get('message')))
else:
self.fail("Error pulling %s - %s" % (name, line.get('error')))
except Exception as exc:
self.fail("Error pulling image %s:%s - %s" % (name, tag, str(exc)))
return self.find_image(name, tag), alreadyToLatest
| gpl-3.0 |
tjanez/ansible | lib/ansible/modules/cloud/softlayer/sl_vm.py | 14 | 11641 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: sl_vm
short_description: create or cancel a virtual instance in SoftLayer
description:
- Creates or cancels SoftLayer instances. When created, optionally waits for it to be 'running'.
version_added: "2.1"
options:
instance_id:
description:
- Instance Id of the virtual instance to perform action option
required: false
default: null
hostname:
description:
- Hostname to be provided to a virtual instance
required: false
default: null
domain:
description:
- Domain name to be provided to a virtual instance
required: false
default: null
datacenter:
description:
- Datacenter for the virtual instance to be deployed
required: false
default: null
tags:
description:
- Tag or list of tags to be provided to a virtual instance
required: false
default: null
hourly:
description:
- Flag to determine if the instance should be hourly billed
required: false
default: true
private:
description:
- Flag to determine if the instance should be private only
required: false
default: false
dedicated:
description:
- Falg to determine if the instance should be deployed in dedicated space
required: false
default: false
local_disk:
description:
- Flag to determine if local disk should be used for the new instance
required: false
default: true
cpus:
description:
- Count of cpus to be assigned to new virtual instance
required: true
default: null
memory:
description:
- Amount of memory to be assigned to new virtual instance
required: true
default: null
disks:
description:
- List of disk sizes to be assigned to new virtual instance
required: true
default: [25]
os_code:
description:
- OS Code to be used for new virtual instance
required: false
default: null
image_id:
description:
- Image Template to be used for new virtual instance
required: false
default: null
nic_speed:
description:
- NIC Speed to be assigned to new virtual instance
required: false
default: 10
public_vlan:
description:
- VLAN by its Id to be assigned to the public NIC
required: false
default: null
private_vlan:
description:
- VLAN by its Id to be assigned to the private NIC
required: false
default: null
ssh_keys:
description:
- List of ssh keys by their Id to be assigned to a virtual instance
required: false
default: null
post_uri:
description:
- URL of a post provisioning script to be loaded and executed on virtual instance
required: false
default: null
state:
description:
- Create, or cancel a virtual instance. Specify "present" for create, "absent" to cancel.
required: false
default: 'present'
wait:
description:
- Flag used to wait for active status before returning
required: false
default: true
wait_timeout:
description:
- time in seconds before wait returns
required: false
default: 600
requirements:
- "python >= 2.6"
- "softlayer >= 4.1.1"
author: "Matt Colton (@mcltn)"
'''
EXAMPLES = '''
- name: Build instance
hosts: localhost
gather_facts: False
tasks:
- name: Build instance request
sl_vm:
hostname: instance-1
domain: anydomain.com
datacenter: dal09
tags: ansible-module-test
hourly: True
private: False
dedicated: False
local_disk: True
cpus: 1
memory: 1024
disks: [25]
os_code: UBUNTU_LATEST
wait: False
- name: Build additional instances
hosts: localhost
gather_facts: False
tasks:
- name: Build instances request
sl_vm:
hostname: "{{ item.hostname }}"
domain: "{{ item.domain }}"
datacenter: "{{ item.datacenter }}"
tags: "{{ item.tags }}"
hourly: "{{ item.hourly }}"
private: "{{ item.private }}"
dedicated: "{{ item.dedicated }}"
local_disk: "{{ item.local_disk }}"
cpus: "{{ item.cpus }}"
memory: "{{ item.memory }}"
disks: "{{ item.disks }}"
os_code: "{{ item.os_code }}"
ssh_keys: "{{ item.ssh_keys }}"
wait: "{{ item.wait }}"
with_items:
- hostname: instance-2
domain: anydomain.com
datacenter: dal09
tags:
- ansible-module-test
- ansible-module-test-slaves
hourly: True
private: False
dedicated: False
local_disk: True
cpus: 1
memory: 1024
disks:
- 25
- 100
os_code: UBUNTU_LATEST
ssh_keys: []
wait: True
- hostname: instance-3
domain: anydomain.com
datacenter: dal09
tags:
- ansible-module-test
- ansible-module-test-slaves
hourly: True
private: False
dedicated: False
local_disk: True
cpus: 1
memory: 1024
disks:
- 25
- 100
os_code: UBUNTU_LATEST
ssh_keys: []
wait: True
- name: Cancel instances
hosts: localhost
gather_facts: False
tasks:
- name: Cancel by tag
sl_vm:
state: absent
tags: ansible-module-test
'''
# TODO: Disabled RETURN as it is breaking the build for docs. Needs to be fixed.
RETURN = '''# '''
import time
#TODO: get this info from API
STATES = ['present', 'absent']
DATACENTERS = ['ams01','ams03','che01','dal01','dal05','dal06','dal09','dal10','fra02','hkg02','hou02','lon02','mel01','mex01','mil01','mon01','osl01','par01','sjc01','sjc03','sao01','sea01','sng01','syd01','tok02','tor01','wdc01','wdc04']
CPU_SIZES = [1,2,4,8,16,32,56]
MEMORY_SIZES = [1024,2048,4096,6144,8192,12288,16384,32768,49152,65536,131072,247808]
INITIALDISK_SIZES = [25,100]
LOCALDISK_SIZES = [25,100,150,200,300]
SANDISK_SIZES = [10,20,25,30,40,50,75,100,125,150,175,200,250,300,350,400,500,750,1000,1500,2000]
NIC_SPEEDS = [10,100,1000]
try:
import SoftLayer
from SoftLayer import VSManager
HAS_SL = True
vsManager = VSManager(SoftLayer.create_client_from_env())
except ImportError:
HAS_SL = False
def create_virtual_instance(module):
instances = vsManager.list_instances(
hostname = module.params.get('hostname'),
domain = module.params.get('domain'),
datacenter = module.params.get('datacenter')
)
if instances:
return False, None
# Check if OS or Image Template is provided (Can't be both, defaults to OS)
if (module.params.get('os_code') is not None and module.params.get('os_code') != ''):
module.params['image_id'] = ''
elif (module.params.get('image_id') is not None and module.params.get('image_id') != ''):
module.params['os_code'] = ''
module.params['disks'] = [] # Blank out disks since it will use the template
else:
return False, None
tags = module.params.get('tags')
if isinstance(tags, list):
tags = ','.join(map(str, module.params.get('tags')))
instance = vsManager.create_instance(
hostname = module.params.get('hostname'),
domain = module.params.get('domain'),
cpus = module.params.get('cpus'),
memory = module.params.get('memory'),
hourly = module.params.get('hourly'),
datacenter = module.params.get('datacenter'),
os_code = module.params.get('os_code'),
image_id = module.params.get('image_id'),
local_disk = module.params.get('local_disk'),
disks = module.params.get('disks'),
ssh_keys = module.params.get('ssh_keys'),
nic_speed = module.params.get('nic_speed'),
private = module.params.get('private'),
public_vlan = module.params.get('public_vlan'),
private_vlan = module.params.get('private_vlan'),
dedicated = module.params.get('dedicated'),
post_uri = module.params.get('post_uri'),
tags = tags)
if instance is not None and instance['id'] > 0:
return True, instance
else:
return False, None
def wait_for_instance(module,id):
instance = None
completed = False
wait_timeout = time.time() + module.params.get('wait_time')
while not completed and wait_timeout > time.time():
try:
completed = vsManager.wait_for_ready(id, 10, 2)
if completed:
instance = vsManager.get_instance(id)
except:
completed = False
return completed, instance
def cancel_instance(module):
canceled = True
if module.params.get('instance_id') is None and (module.params.get('tags') or module.params.get('hostname') or module.params.get('domain')):
tags = module.params.get('tags')
if isinstance(tags, basestring):
tags = [module.params.get('tags')]
instances = vsManager.list_instances(tags = tags, hostname = module.params.get('hostname'), domain = module.params.get('domain'))
for instance in instances:
try:
vsManager.cancel_instance(instance['id'])
except:
canceled = False
elif module.params.get('instance_id') and module.params.get('instance_id') != 0:
try:
vsManager.cancel_instance(instance['id'])
except:
canceled = False
else:
return False, None
return canceled, None
def main():
module = AnsibleModule(
argument_spec=dict(
instance_id=dict(),
hostname=dict(),
domain=dict(),
datacenter=dict(choices=DATACENTERS),
tags=dict(),
hourly=dict(type='bool', default=True),
private=dict(type='bool', default=False),
dedicated=dict(type='bool', default=False),
local_disk=dict(type='bool', default=True),
cpus=dict(type='int', choices=CPU_SIZES),
memory=dict(type='int', choices=MEMORY_SIZES),
disks=dict(type='list', default=[25]),
os_code=dict(),
image_id=dict(),
nic_speed=dict(type='int', choices=NIC_SPEEDS),
public_vlan=dict(),
private_vlan=dict(),
ssh_keys=dict(type='list', default=[]),
post_uri=dict(),
state=dict(default='present', choices=STATES),
wait=dict(type='bool', default=True),
wait_time=dict(type='int', default=600)
)
)
if not HAS_SL:
module.fail_json(msg='softlayer python library required for this module')
if module.params.get('state') == 'absent':
(changed, instance) = cancel_instance(module)
elif module.params.get('state') == 'present':
(changed, instance) = create_virtual_instance(module)
if module.params.get('wait') == True and instance:
(changed, instance) = wait_for_instance(module, instance['id'])
module.exit_json(changed=changed, instance=json.loads(json.dumps(instance, default=lambda o: o.__dict__)))
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
MycChiu/tensorflow | tensorflow/python/framework/file_system_test.py | 36 | 1896 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import load_library
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import io_ops
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import test
from tensorflow.python.util import compat
class FileSystemTest(test.TestCase):
def setUp(self):
file_system_library = os.path.join(resource_loader.get_data_files_path(),
"test_file_system.so")
load_library.load_file_system_library(file_system_library)
def testBasic(self):
with self.test_session() as sess:
reader = io_ops.WholeFileReader("test_reader")
queue = data_flow_ops.FIFOQueue(99, [dtypes.string], shapes=())
queue.enqueue_many([["test://foo"]]).run()
queue.close().run()
key, value = sess.run(reader.read(queue))
self.assertEqual(key, compat.as_bytes("test://foo"))
self.assertEqual(value, compat.as_bytes("AAAAAAAAAA"))
if __name__ == "__main__":
test.main()
| apache-2.0 |
freedomtan/tensorflow | tensorflow/python/keras/engine/deferred_sequential_test.py | 4 | 8673 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests specific to deferred-build `Sequential` models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import unittest
import numpy as np
from tensorflow.python import keras
from tensorflow.python.compat import v2_compat
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
h5py = None
class TestDeferredSequential(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_build_behavior(self):
# Test graph network creation after __call__
model = get_model()
model(np.random.random((2, 6)))
self.assertLen(model.weights, 4)
self.assertTrue(model._is_graph_network)
self.assertLen(model.inputs, 1)
self.assertLen(model.outputs, 1)
self.assertEqual(model.inputs[0].shape.as_list(), [2, 6])
self.assertEqual(model.outputs[0].shape.as_list(), [2, 2])
# Test effect of new __call__ with a different shape
model(np.random.random((3, 6)))
self.assertLen(model.inputs, 1)
self.assertLen(model.outputs, 1)
self.assertEqual(model.inputs[0].shape.as_list(), [None, 6])
self.assertEqual(model.outputs[0].shape.as_list(), [None, 2])
model(np.random.random((4, 6)))
self.assertLen(model.inputs, 1)
self.assertLen(model.outputs, 1)
self.assertEqual(model.inputs[0].shape.as_list(), [None, 6])
self.assertEqual(model.outputs[0].shape.as_list(), [None, 2])
# Test graph network creation after build
model = get_model()
model.build((None, 6))
self.assertLen(model.weights, 4)
self.assertTrue(model._is_graph_network)
self.assertLen(model.inputs, 1)
self.assertLen(model.outputs, 1)
self.assertEqual(model.inputs[0].shape.as_list(), [None, 6])
self.assertEqual(model.outputs[0].shape.as_list(), [None, 2])
# Test graph network creation after compile/fit
model = get_model()
model.compile(
loss='mse',
optimizer='rmsprop',
metrics=[keras.metrics.CategoricalAccuracy()],
run_eagerly=testing_utils.should_run_eagerly())
model.fit(np.zeros((2, 6)), np.zeros((2, 2)))
self.assertLen(model.weights, 4)
self.assertTrue(model._is_graph_network)
self.assertLen(model.inputs, 1)
self.assertLen(model.outputs, 1)
# Inconsistency here: with eager `fit`, the model is built with shape
# (2, 6), but with graph function `fit`, it is built with shape `(None, 6)`.
# This is likely due to our assumption "the batch size should be dynamic"
# at the level of `Model`. TODO(fchollet): investigate and resolve.
self.assertEqual(model.inputs[0].shape.as_list()[-1], 6)
self.assertEqual(model.outputs[0].shape.as_list()[-1], 2)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_add_and_pop(self):
model = get_model()
model.build((None, 6))
self.assertTrue(model.built)
self.assertTrue(model._is_graph_network)
self.assertLen(model.layers, 3)
self.assertLen(model.weights, 4)
model.pop()
self.assertTrue(model.built)
self.assertTrue(model._is_graph_network)
self.assertLen(model.layers, 2)
self.assertLen(model.weights, 2)
model.add(keras.layers.Dense(2))
self.assertTrue(model.built)
self.assertTrue(model._is_graph_network)
self.assertLen(model.layers, 3)
self.assertLen(model.weights, 4)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_feature_extraction(self):
# This tests layer connectivity reset when rebuilding
model = get_model()
model(np.random.random((3, 6))) # First build
model(np.random.random((4, 6))) # Triggers a rebuild
# Classic feature extractor pattern
extractor = keras.Model(inputs=model.inputs,
outputs=[layer.output for layer in model.layers])
# Check that inputs and outputs are connected
_ = extractor(np.random.random((4, 6)))
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_saving_savedmodel(self):
model = get_model()
model(np.random.random((3, 6))) # Build model
path = os.path.join(self.get_temp_dir(), 'model_path')
model.save(path)
new_model = keras.models.load_model(path)
model_layers = model._flatten_layers(include_self=True, recursive=False)
new_model_layers = new_model._flatten_layers(
include_self=True, recursive=False)
for layer1, layer2 in zip(model_layers, new_model_layers):
self.assertEqual(layer1.name, layer2.name)
for w1, w2 in zip(layer1.weights, layer2.weights):
self.assertAllClose(w1, w2)
@unittest.skipIf(h5py is None, 'Test requires h5py')
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_saving_h5(self):
path = os.path.join(self.get_temp_dir(), 'model_path.h5')
model = get_model()
model(np.random.random((3, 6))) # Build model
path = os.path.join(self.get_temp_dir(), 'model_path.h5')
model.save(path)
new_model = keras.models.load_model(path)
model_layers = model._flatten_layers(include_self=True, recursive=False)
new_model_layers = new_model._flatten_layers(
include_self=True, recursive=False)
for layer1, layer2 in zip(model_layers, new_model_layers):
self.assertEqual(layer1.name, layer2.name)
for w1, w2 in zip(layer1.weights, layer2.weights):
self.assertAllClose(w1, w2)
@keras_parameterized.run_all_keras_modes
def test_shared_layer(self):
# This tests that preexisting layer connectivity is preserved
# when auto-building graph networks
shared_layer = keras.layers.Dense(2)
m1 = keras.Sequential([shared_layer])
m1(np.random.random((3, 6)))
m2 = keras.Sequential([shared_layer])
m2(np.random.random((3, 6)))
# Nesting case
shared_layer = keras.layers.Dense(2)
m1 = keras.Sequential([shared_layer])
m2 = keras.Sequential([shared_layer, m1])
m2(np.random.random((3, 2)))
@keras_parameterized.run_all_keras_modes
def test_loss_layer(self):
class LossLayer(keras.layers.Layer):
def call(self, inputs):
self.add_loss(math_ops.reduce_sum(inputs))
return inputs
# Test loss layer alone
model = keras.Sequential([LossLayer()])
model.compile('rmsprop', run_eagerly=testing_utils.should_run_eagerly())
loss = model.train_on_batch(np.ones((2, 2)))
self.assertAllClose(loss, 4.)
model(np.random.random((4, 2))) # Triggers a rebuild
loss = model.train_on_batch(np.ones((1, 2)))
self.assertAllClose(loss, 2.)
# Test loss layer combined with another layer
model = keras.Sequential([
keras.layers.Dense(1, kernel_initializer='ones'),
LossLayer()])
model.compile('rmsprop', run_eagerly=testing_utils.should_run_eagerly())
loss = model.train_on_batch(np.ones((2, 2)))
self.assertAllClose(loss, 4.)
model(np.random.random((4, 2))) # Triggers a rebuild
loss = model.train_on_batch(np.ones((1, 2)))
self.assertLess(loss, 2.)
# Test loss layer combined with external loss
model = keras.Sequential([
keras.layers.Dense(1, kernel_initializer='ones'),
LossLayer()])
model.compile('rmsprop', 'mse',
run_eagerly=testing_utils.should_run_eagerly())
loss = model.train_on_batch(np.ones((2, 2)), np.ones((2, 2)))
model(np.random.random((4, 2))) # Triggers a rebuild
loss = model.train_on_batch(np.ones((1, 2)), np.ones((1, 2)))
def get_model():
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, name='first_layer'))
model.add(keras.layers.Dropout(0.3, name='dp'))
model.add(keras.layers.Dense(2, name='last_layer'))
return model
if __name__ == '__main__':
v2_compat.enable_v2_behavior()
test.main()
| apache-2.0 |
kxepal/simpleubjson | simpleubjson/tools/inspect.py | 4 | 4487 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2011-2014 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file LICENSE, which
# you should have received as part of this distribution.
#
import sys
import simpleubjson
from ..draft8 import Draft8Decoder
from ..draft9 import Draft9Decoder
from ..exceptions import EarlyEndOfStreamError
def pprint(data, output=sys.stdout, allow_noop=True,
indent=' ' * 4, max_level=None, spec='draft-9'):
"""Pretty prints ubjson data using the handy [ ]-notation to represent it in
readable form. Example::
[{]
[S] [i] [2] [id]
[I] [1234567890]
[S] [i] [4] [name]
[S] [i] [3] [bob]
[}]
:param data: `.read([size])`-able object or source string with ubjson data.
:param output: `.write([data])`-able object.
:param allow_noop: Allow emit :const:`~simpleubjson.NOOP` or not.
:param indent: Indention string.
:param max_level: Max level of inspection nested containers. By default
there is no limit, but you may hit system recursion limit.
:param spec: UBJSON specification. Supported Draft-8 and Draft-9
specifications by ``draft-8`` or ``draft-9`` keys.
:type spec: str
"""
def maybe_write(data, level):
if max_level is None or level <= max_level:
output.write('%s' % (indent * level))
output.write(data)
output.flush()
def inspect_draft8(decoder, level, container_size):
while 1:
try:
tag, length, value = decoder.next_tlv()
utag = tag.decode()
except EarlyEndOfStreamError:
break
# standalone markers
if length is None and value is None:
if utag == 'E':
maybe_write('[%s]\n' % (utag,), level - 1)
return
else:
maybe_write('[%s]\n' % (utag,), level)
# sized containers
elif length is not None and value is None:
maybe_write('[%s] [%s]\n' % (utag, length), level)
if utag in 'oO':
length = length == 255 and length or length * 2
inspect_draft8(decoder, level + 1, length)
# plane values
elif length is None and value is not None:
value = decoder.dispatch[tag](decoder, tag, length, value)
maybe_write('[%s] [%s]\n' % (utag, value), level)
# sized values
else:
value = decoder.dispatch[tag](decoder, tag, length, value)
maybe_write('[%s] [%s] [%s]\n' % (utag, length, value), level)
if container_size != 255:
container_size -= 1
if not container_size:
return
def inspect_draft9(decoder, level, *args):
while 1:
try:
tag, length, value = decoder.next_tlv()
utag = tag.decode()
except EarlyEndOfStreamError:
break
# standalone markers
if length is None and value is None:
if utag in ']}':
level -= 1
maybe_write('[%s]\n' % (utag,), level)
if utag in '{[':
level += 1
# plane values
elif length is None and value is not None:
value = decoder.dispatch[tag](decoder, tag, length, value)
maybe_write('[%s] [%s]\n' % (utag, value), level)
# sized values
else:
value = decoder.dispatch[tag](decoder, tag, length, value)
pattern = '[%s] [%s] [%s] [%s]\n'
# very dirty hack to show size as marker and value
_decoder = Draft9Decoder(simpleubjson.encode(length, spec=spec))
tlv = _decoder.next_tlv()
args = tuple([utag, tlv[0].decode(), tlv[2], value])
maybe_write(pattern % args, level)
if spec.lower() in ['draft8', 'draft-8']:
decoder = Draft8Decoder(data, allow_noop)
inspect = inspect_draft8
elif spec.lower() in ['draft9', 'draft-9']:
decoder = Draft9Decoder(data, allow_noop)
inspect = inspect_draft9
else:
raise ValueError('Unknown or unsupported specification %s' % spec)
inspect(decoder, 0, 255)
| bsd-2-clause |
josecanciani/pyparser | lang/php/grammar/pclass.py | 1 | 2429 |
from pyparser.grammar.pclass import Class as BaseClass, Extractor as BaseExtractor
from pyparser.grammar.exception import InvalidSyntax
from pyparser.lang.php.grammar.method import Extractor as MethodExtractor
from pyparser.lang.php.grammar.classprop import Extractor as PropExtractor
keywords = ['abstract', 'class']
class Class(BaseClass):
def getName(self):
line = self.getFirstLine()
find = False
for word in line.lstrip().split(' '):
if find:
return word.strip().split('(')[0]
if word.strip() and word.strip() == 'class':
find = True
raise InvalidSyntax('Could not find class name in line: ' + line)
def isAbstract(self):
line = self.getFirstLine()
for word in line.lstrip().split(' '):
if word.strip() and word.strip() == 'abstract':
return True
return False
def getExtendsFromName(self):
line = self.getFirstLine()
next = False
for word in line.lstrip().split(' '):
if next:
return word.strip()
if word == 'extends':
next = True
return None
def getMethods(self):
extractor = MethodExtractor(self)
return extractor.getMethods()
def getProperties(self):
extractor = PropExtractor(self)
return extractor.getProperties()
def getExtension(self):
return 'php'
class Extractor(BaseExtractor):
def createClass(self, code, startLineNumber):
return Class(code, self.parent, startLineNumber)
def getClasses(self, code):
findClosure = None
classCode = ''
classes = []
lineNumber = -1
for line in code.splitlines(True):
lineNumber += 1
if findClosure == None:
for keyword in keywords:
if line.lstrip().startswith(keyword):
findClosure = line.find(keyword)
classCode = line
startLineNumber = lineNumber
else:
classCode = classCode + line
strippedLine = line.lstrip()
if len(strippedLine) and strippedLine[0] == '}' and line.find('}') == findClosure:
classes.append(self.createClass(classCode, startLineNumber))
findClosure = None
return classes
| gpl-3.0 |
jinzo27/infoGrabr | lib/cpp/scons/scons-local-2.0.0.final.0/SCons/Conftest.py | 118 | 27708 | """SCons.Conftest
Autoconf-like configuration support; low level implementation of tests.
"""
#
# Copyright (c) 2003 Stichting NLnet Labs
# Copyright (c) 2001, 2002, 2003 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#
# The purpose of this module is to define how a check is to be performed.
# Use one of the Check...() functions below.
#
#
# A context class is used that defines functions for carrying out the tests,
# logging and messages. The following methods and members must be present:
#
# context.Display(msg) Function called to print messages that are normally
# displayed for the user. Newlines are explicitly used.
# The text should also be written to the logfile!
#
# context.Log(msg) Function called to write to a log file.
#
# context.BuildProg(text, ext)
# Function called to build a program, using "ext" for the
# file extention. Must return an empty string for
# success, an error message for failure.
# For reliable test results building should be done just
# like an actual program would be build, using the same
# command and arguments (including configure results so
# far).
#
# context.CompileProg(text, ext)
# Function called to compile a program, using "ext" for
# the file extention. Must return an empty string for
# success, an error message for failure.
# For reliable test results compiling should be done just
# like an actual source file would be compiled, using the
# same command and arguments (including configure results
# so far).
#
# context.AppendLIBS(lib_name_list)
# Append "lib_name_list" to the value of LIBS.
# "lib_namelist" is a list of strings.
# Return the value of LIBS before changing it (any type
# can be used, it is passed to SetLIBS() later.)
#
# context.PrependLIBS(lib_name_list)
# Prepend "lib_name_list" to the value of LIBS.
# "lib_namelist" is a list of strings.
# Return the value of LIBS before changing it (any type
# can be used, it is passed to SetLIBS() later.)
#
# context.SetLIBS(value)
# Set LIBS to "value". The type of "value" is what
# AppendLIBS() returned.
# Return the value of LIBS before changing it (any type
# can be used, it is passed to SetLIBS() later.)
#
# context.headerfilename
# Name of file to append configure results to, usually
# "confdefs.h".
# The file must not exist or be empty when starting.
# Empty or None to skip this (some tests will not work!).
#
# context.config_h (may be missing). If present, must be a string, which
# will be filled with the contents of a config_h file.
#
# context.vardict Dictionary holding variables used for the tests and
# stores results from the tests, used for the build
# commands.
# Normally contains "CC", "LIBS", "CPPFLAGS", etc.
#
# context.havedict Dictionary holding results from the tests that are to
# be used inside a program.
# Names often start with "HAVE_". These are zero
# (feature not present) or one (feature present). Other
# variables may have any value, e.g., "PERLVERSION" can
# be a number and "SYSTEMNAME" a string.
#
import re
from types import IntType
#
# PUBLIC VARIABLES
#
LogInputFiles = 1 # Set that to log the input files in case of a failed test
LogErrorMessages = 1 # Set that to log Conftest-generated error messages
#
# PUBLIC FUNCTIONS
#
# Generic remarks:
# - When a language is specified which is not supported the test fails. The
# message is a bit different, because not all the arguments for the normal
# message are available yet (chicken-egg problem).
def CheckBuilder(context, text = None, language = None):
"""
Configure check to see if the compiler works.
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
"text" may be used to specify the code to be build.
Returns an empty string for success, an error message for failure.
"""
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("%s\n" % msg)
return msg
if not text:
text = """
int main() {
return 0;
}
"""
context.Display("Checking if building a %s file works... " % lang)
ret = context.BuildProg(text, suffix)
_YesNoResult(context, ret, None, text)
return ret
def CheckCC(context):
"""
Configure check for a working C compiler.
This checks whether the C compiler, as defined in the $CC construction
variable, can compile a C source file. It uses the current $CCCOM value
too, so that it can test against non working flags.
"""
context.Display("Checking whether the C compiler works")
text = """
int main()
{
return 0;
}
"""
ret = _check_empty_program(context, 'CC', text, 'C')
_YesNoResult(context, ret, None, text)
return ret
def CheckSHCC(context):
"""
Configure check for a working shared C compiler.
This checks whether the C compiler, as defined in the $SHCC construction
variable, can compile a C source file. It uses the current $SHCCCOM value
too, so that it can test against non working flags.
"""
context.Display("Checking whether the (shared) C compiler works")
text = """
int foo()
{
return 0;
}
"""
ret = _check_empty_program(context, 'SHCC', text, 'C', use_shared = True)
_YesNoResult(context, ret, None, text)
return ret
def CheckCXX(context):
"""
Configure check for a working CXX compiler.
This checks whether the CXX compiler, as defined in the $CXX construction
variable, can compile a CXX source file. It uses the current $CXXCOM value
too, so that it can test against non working flags.
"""
context.Display("Checking whether the C++ compiler works")
text = """
int main()
{
return 0;
}
"""
ret = _check_empty_program(context, 'CXX', text, 'C++')
_YesNoResult(context, ret, None, text)
return ret
def CheckSHCXX(context):
"""
Configure check for a working shared CXX compiler.
This checks whether the CXX compiler, as defined in the $SHCXX construction
variable, can compile a CXX source file. It uses the current $SHCXXCOM value
too, so that it can test against non working flags.
"""
context.Display("Checking whether the (shared) C++ compiler works")
text = """
int main()
{
return 0;
}
"""
ret = _check_empty_program(context, 'SHCXX', text, 'C++', use_shared = True)
_YesNoResult(context, ret, None, text)
return ret
def _check_empty_program(context, comp, text, language, use_shared = False):
"""Return 0 on success, 1 otherwise."""
if comp not in context.env or not context.env[comp]:
# The compiler construction variable is not set or empty
return 1
lang, suffix, msg = _lang2suffix(language)
if msg:
return 1
if use_shared:
return context.CompileSharedObject(text, suffix)
else:
return context.CompileProg(text, suffix)
def CheckFunc(context, function_name, header = None, language = None):
"""
Configure check for a function "function_name".
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
Optional "header" can be defined to define a function prototype, include a
header file or anything else that comes before main().
Sets HAVE_function_name in context.havedict according to the result.
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
Returns an empty string for success, an error message for failure.
"""
# Remarks from autoconf:
# - Don't include <ctype.h> because on OSF/1 3.0 it includes <sys/types.h>
# which includes <sys/select.h> which contains a prototype for select.
# Similarly for bzero.
# - assert.h is included to define __stub macros and hopefully few
# prototypes, which can conflict with char $1(); below.
# - Override any gcc2 internal prototype to avoid an error.
# - We use char for the function declaration because int might match the
# return type of a gcc2 builtin and then its argument prototype would
# still apply.
# - The GNU C library defines this for functions which it implements to
# always fail with ENOSYS. Some functions are actually named something
# starting with __ and the normal name is an alias.
if context.headerfilename:
includetext = '#include "%s"' % context.headerfilename
else:
includetext = ''
if not header:
header = """
#ifdef __cplusplus
extern "C"
#endif
char %s();""" % function_name
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for %s(): %s\n" % (function_name, msg))
return msg
text = """
%(include)s
#include <assert.h>
%(hdr)s
int main() {
#if defined (__stub_%(name)s) || defined (__stub___%(name)s)
fail fail fail
#else
%(name)s();
#endif
return 0;
}
""" % { 'name': function_name,
'include': includetext,
'hdr': header }
context.Display("Checking for %s function %s()... " % (lang, function_name))
ret = context.BuildProg(text, suffix)
_YesNoResult(context, ret, "HAVE_" + function_name, text,
"Define to 1 if the system has the function `%s'." %\
function_name)
return ret
def CheckHeader(context, header_name, header = None, language = None,
include_quotes = None):
"""
Configure check for a C or C++ header file "header_name".
Optional "header" can be defined to do something before including the
header file (unusual, supported for consistency).
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
Sets HAVE_header_name in context.havedict according to the result.
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS and $CPPFLAGS are set correctly.
Returns an empty string for success, an error message for failure.
"""
# Why compile the program instead of just running the preprocessor?
# It is possible that the header file exists, but actually using it may
# fail (e.g., because it depends on other header files). Thus this test is
# more strict. It may require using the "header" argument.
#
# Use <> by default, because the check is normally used for system header
# files. SCons passes '""' to overrule this.
# Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
if context.headerfilename:
includetext = '#include "%s"\n' % context.headerfilename
else:
includetext = ''
if not header:
header = ""
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for header file %s: %s\n"
% (header_name, msg))
return msg
if not include_quotes:
include_quotes = "<>"
text = "%s%s\n#include %s%s%s\n\n" % (includetext, header,
include_quotes[0], header_name, include_quotes[1])
context.Display("Checking for %s header file %s... " % (lang, header_name))
ret = context.CompileProg(text, suffix)
_YesNoResult(context, ret, "HAVE_" + header_name, text,
"Define to 1 if you have the <%s> header file." % header_name)
return ret
def CheckType(context, type_name, fallback = None,
header = None, language = None):
"""
Configure check for a C or C++ type "type_name".
Optional "header" can be defined to include a header file.
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
Sets HAVE_type_name in context.havedict according to the result.
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
Returns an empty string for success, an error message for failure.
"""
# Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
if context.headerfilename:
includetext = '#include "%s"' % context.headerfilename
else:
includetext = ''
if not header:
header = ""
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for %s type: %s\n" % (type_name, msg))
return msg
# Remarks from autoconf about this test:
# - Grepping for the type in include files is not reliable (grep isn't
# portable anyway).
# - Using "TYPE my_var;" doesn't work for const qualified types in C++.
# Adding an initializer is not valid for some C++ classes.
# - Using the type as parameter to a function either fails for K&$ C or for
# C++.
# - Using "TYPE *my_var;" is valid in C for some types that are not
# declared (struct something).
# - Using "sizeof(TYPE)" is valid when TYPE is actually a variable.
# - Using the previous two together works reliably.
text = """
%(include)s
%(header)s
int main() {
if ((%(name)s *) 0)
return 0;
if (sizeof (%(name)s))
return 0;
}
""" % { 'include': includetext,
'header': header,
'name': type_name }
context.Display("Checking for %s type %s... " % (lang, type_name))
ret = context.BuildProg(text, suffix)
_YesNoResult(context, ret, "HAVE_" + type_name, text,
"Define to 1 if the system has the type `%s'." % type_name)
if ret and fallback and context.headerfilename:
f = open(context.headerfilename, "a")
f.write("typedef %s %s;\n" % (fallback, type_name))
f.close()
return ret
def CheckTypeSize(context, type_name, header = None, language = None, expect = None):
"""This check can be used to get the size of a given type, or to check whether
the type is of expected size.
Arguments:
- type : str
the type to check
- includes : sequence
list of headers to include in the test code before testing the type
- language : str
'C' or 'C++'
- expect : int
if given, will test wether the type has the given number of bytes.
If not given, will automatically find the size.
Returns:
status : int
0 if the check failed, or the found size of the type if the check succeeded."""
# Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
if context.headerfilename:
includetext = '#include "%s"' % context.headerfilename
else:
includetext = ''
if not header:
header = ""
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for %s type: %s\n" % (type_name, msg))
return msg
src = includetext + header
if not expect is None:
# Only check if the given size is the right one
context.Display('Checking %s is %d bytes... ' % (type_name, expect))
# test code taken from autoconf: this is a pretty clever hack to find that
# a type is of a given size using only compilation. This speeds things up
# quite a bit compared to straightforward code using TryRun
src = src + r"""
typedef %s scons_check_type;
int main()
{
static int test_array[1 - 2 * !(((long int) (sizeof(scons_check_type))) == %d)];
test_array[0] = 0;
return 0;
}
"""
st = context.CompileProg(src % (type_name, expect), suffix)
if not st:
context.Display("yes\n")
_Have(context, "SIZEOF_%s" % type_name, expect,
"The size of `%s', as computed by sizeof." % type_name)
return expect
else:
context.Display("no\n")
_LogFailed(context, src, st)
return 0
else:
# Only check if the given size is the right one
context.Message('Checking size of %s ... ' % type_name)
# We have to be careful with the program we wish to test here since
# compilation will be attempted using the current environment's flags.
# So make sure that the program will compile without any warning. For
# example using: 'int main(int argc, char** argv)' will fail with the
# '-Wall -Werror' flags since the variables argc and argv would not be
# used in the program...
#
src = src + """
#include <stdlib.h>
#include <stdio.h>
int main() {
printf("%d", (int)sizeof(""" + type_name + """));
return 0;
}
"""
st, out = context.RunProg(src, suffix)
try:
size = int(out)
except ValueError:
# If cannot convert output of test prog to an integer (the size),
# something went wront, so just fail
st = 1
size = 0
if not st:
context.Display("yes\n")
_Have(context, "SIZEOF_%s" % type_name, size,
"The size of `%s', as computed by sizeof." % type_name)
return size
else:
context.Display("no\n")
_LogFailed(context, src, st)
return 0
return 0
def CheckDeclaration(context, symbol, includes = None, language = None):
"""Checks whether symbol is declared.
Use the same test as autoconf, that is test whether the symbol is defined
as a macro or can be used as an r-value.
Arguments:
symbol : str
the symbol to check
includes : str
Optional "header" can be defined to include a header file.
language : str
only C and C++ supported.
Returns:
status : bool
True if the check failed, False if succeeded."""
# Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
if context.headerfilename:
includetext = '#include "%s"' % context.headerfilename
else:
includetext = ''
if not includes:
includes = ""
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for declaration %s: %s\n" % (type_name, msg))
return msg
src = includetext + includes
context.Display('Checking whether %s is declared... ' % symbol)
src = src + r"""
int main()
{
#ifndef %s
(void) %s;
#endif
;
return 0;
}
""" % (symbol, symbol)
st = context.CompileProg(src, suffix)
_YesNoResult(context, st, "HAVE_DECL_" + symbol, src,
"Set to 1 if %s is defined." % symbol)
return st
def CheckLib(context, libs, func_name = None, header = None,
extra_libs = None, call = None, language = None, autoadd = 1,
append = True):
"""
Configure check for a C or C++ libraries "libs". Searches through
the list of libraries, until one is found where the test succeeds.
Tests if "func_name" or "call" exists in the library. Note: if it exists
in another library the test succeeds anyway!
Optional "header" can be defined to include a header file. If not given a
default prototype for "func_name" is added.
Optional "extra_libs" is a list of library names to be added after
"lib_name" in the build command. To be used for libraries that "lib_name"
depends on.
Optional "call" replaces the call to "func_name" in the test code. It must
consist of complete C statements, including a trailing ";".
Both "func_name" and "call" arguments are optional, and in that case, just
linking against the libs is tested.
"language" should be "C" or "C++" and is used to select the compiler.
Default is "C".
Note that this uses the current value of compiler and linker flags, make
sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
Returns an empty string for success, an error message for failure.
"""
# Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
if context.headerfilename:
includetext = '#include "%s"' % context.headerfilename
else:
includetext = ''
if not header:
header = ""
text = """
%s
%s""" % (includetext, header)
# Add a function declaration if needed.
if func_name and func_name != "main":
if not header:
text = text + """
#ifdef __cplusplus
extern "C"
#endif
char %s();
""" % func_name
# The actual test code.
if not call:
call = "%s();" % func_name
# if no function to test, leave main() blank
text = text + """
int
main() {
%s
return 0;
}
""" % (call or "")
if call:
i = call.find("\n")
if i > 0:
calltext = call[:i] + ".."
elif call[-1] == ';':
calltext = call[:-1]
else:
calltext = call
for lib_name in libs:
lang, suffix, msg = _lang2suffix(language)
if msg:
context.Display("Cannot check for library %s: %s\n" % (lib_name, msg))
return msg
# if a function was specified to run in main(), say it
if call:
context.Display("Checking for %s in %s library %s... "
% (calltext, lang, lib_name))
# otherwise, just say the name of library and language
else:
context.Display("Checking for %s library %s... "
% (lang, lib_name))
if lib_name:
l = [ lib_name ]
if extra_libs:
l.extend(extra_libs)
if append:
oldLIBS = context.AppendLIBS(l)
else:
oldLIBS = context.PrependLIBS(l)
sym = "HAVE_LIB" + lib_name
else:
oldLIBS = -1
sym = None
ret = context.BuildProg(text, suffix)
_YesNoResult(context, ret, sym, text,
"Define to 1 if you have the `%s' library." % lib_name)
if oldLIBS != -1 and (ret or not autoadd):
context.SetLIBS(oldLIBS)
if not ret:
return ret
return ret
#
# END OF PUBLIC FUNCTIONS
#
def _YesNoResult(context, ret, key, text, comment = None):
"""
Handle the result of a test with a "yes" or "no" result.
"ret" is the return value: empty if OK, error message when not.
"key" is the name of the symbol to be defined (HAVE_foo).
"text" is the source code of the program used for testing.
"comment" is the C comment to add above the line defining the symbol (the
comment is automatically put inside a /* */). If None, no comment is added.
"""
if key:
_Have(context, key, not ret, comment)
if ret:
context.Display("no\n")
_LogFailed(context, text, ret)
else:
context.Display("yes\n")
def _Have(context, key, have, comment = None):
"""
Store result of a test in context.havedict and context.headerfilename.
"key" is a "HAVE_abc" name. It is turned into all CAPITALS and non-
alphanumerics are replaced by an underscore.
The value of "have" can be:
1 - Feature is defined, add "#define key".
0 - Feature is not defined, add "/* #undef key */".
Adding "undef" is what autoconf does. Not useful for the
compiler, but it shows that the test was done.
number - Feature is defined to this number "#define key have".
Doesn't work for 0 or 1, use a string then.
string - Feature is defined to this string "#define key have".
Give "have" as is should appear in the header file, include quotes
when desired and escape special characters!
"""
key_up = key.upper()
key_up = re.sub('[^A-Z0-9_]', '_', key_up)
context.havedict[key_up] = have
if have == 1:
line = "#define %s 1\n" % key_up
elif have == 0:
line = "/* #undef %s */\n" % key_up
elif isinstance(have, IntType):
line = "#define %s %d\n" % (key_up, have)
else:
line = "#define %s %s\n" % (key_up, str(have))
if comment is not None:
lines = "\n/* %s */\n" % comment + line
else:
lines = "\n" + line
if context.headerfilename:
f = open(context.headerfilename, "a")
f.write(lines)
f.close()
elif hasattr(context,'config_h'):
context.config_h = context.config_h + lines
def _LogFailed(context, text, msg):
"""
Write to the log about a failed program.
Add line numbers, so that error messages can be understood.
"""
if LogInputFiles:
context.Log("Failed program was:\n")
lines = text.split('\n')
if len(lines) and lines[-1] == '':
lines = lines[:-1] # remove trailing empty line
n = 1
for line in lines:
context.Log("%d: %s\n" % (n, line))
n = n + 1
if LogErrorMessages:
context.Log("Error message: %s\n" % msg)
def _lang2suffix(lang):
"""
Convert a language name to a suffix.
When "lang" is empty or None C is assumed.
Returns a tuple (lang, suffix, None) when it works.
For an unrecognized language returns (None, None, msg).
Where:
lang = the unified language name
suffix = the suffix, including the leading dot
msg = an error message
"""
if not lang or lang in ["C", "c"]:
return ("C", ".c", None)
if lang in ["c++", "C++", "cpp", "CXX", "cxx"]:
return ("C++", ".cpp", None)
return None, None, "Unsupported language: %s" % lang
# vim: set sw=4 et sts=4 tw=79 fo+=l:
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit |
ujjvala-addsol/addsol_hr | openerp/addons/purchase/company.py | 383 | 1576 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv,fields
class company(osv.osv):
_inherit = 'res.company'
_columns = {
'po_lead': fields.float(
'Purchase Lead Time', required=True,
help="Margin of error for supplier lead times. When the system"\
"generates Purchase Orders for procuring products,"\
"they will be scheduled that many days earlier "\
"to cope with unexpected supplier delays."),
}
_defaults = {
'po_lead': lambda *a: 1.0,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
bhilburn/gnuradio | gr-digital/python/digital/qa_clock_recovery_mm.py | 45 | 5378 | #!/usr/bin/env python
#
# Copyright 2011-2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import random
import cmath
from gnuradio import gr, gr_unittest, digital, blocks
class test_clock_recovery_mm(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test01(self):
# Test complex/complex version
omega = 2
gain_omega = 0.001
mu = 0.5
gain_mu = 0.01
omega_rel_lim = 0.001
self.test = digital.clock_recovery_mm_cc(omega, gain_omega,
mu, gain_mu,
omega_rel_lim)
data = 100*[complex(1, 1),]
self.src = blocks.vector_source_c(data, False)
self.snk = blocks.vector_sink_c()
self.tb.connect(self.src, self.test, self.snk)
self.tb.run()
expected_result = 100*[complex(0.99972, 0.99972)] # doesn't quite get to 1.0
dst_data = self.snk.data()
# Only compare last Ncmp samples
Ncmp = 30
len_e = len(expected_result)
len_d = len(dst_data)
expected_result = expected_result[len_e - Ncmp:]
dst_data = dst_data[len_d - Ncmp:]
#print expected_result
#print dst_data
self.assertComplexTuplesAlmostEqual(expected_result, dst_data, 5)
def test02(self):
# Test float/float version
omega = 2
gain_omega = 0.01
mu = 0.5
gain_mu = 0.01
omega_rel_lim = 0.001
self.test = digital.clock_recovery_mm_ff(omega, gain_omega,
mu, gain_mu,
omega_rel_lim)
data = 100*[1,]
self.src = blocks.vector_source_f(data, False)
self.snk = blocks.vector_sink_f()
self.tb.connect(self.src, self.test, self.snk)
self.tb.run()
expected_result = 100*[0.9997, ] # doesn't quite get to 1.0
dst_data = self.snk.data()
# Only compare last Ncmp samples
Ncmp = 30
len_e = len(expected_result)
len_d = len(dst_data)
expected_result = expected_result[len_e - Ncmp:]
dst_data = dst_data[len_d - Ncmp:]
#print expected_result
#print dst_data
self.assertFloatTuplesAlmostEqual(expected_result, dst_data, 4)
def test03(self):
# Test complex/complex version with varying input
omega = 2
gain_omega = 0.01
mu = 0.25
gain_mu = 0.01
omega_rel_lim = 0.0001
self.test = digital.clock_recovery_mm_cc(omega, gain_omega,
mu, gain_mu,
omega_rel_lim)
data = 1000*[complex(1, 1), complex(1, 1), complex(-1, -1), complex(-1, -1)]
self.src = blocks.vector_source_c(data, False)
self.snk = blocks.vector_sink_c()
self.tb.connect(self.src, self.test, self.snk)
self.tb.run()
expected_result = 1000*[complex(-1.2, -1.2), complex(1.2, 1.2)]
dst_data = self.snk.data()
# Only compare last Ncmp samples
Ncmp = 100
len_e = len(expected_result)
len_d = len(dst_data)
expected_result = expected_result[len_e - Ncmp:]
dst_data = dst_data[len_d - Ncmp:]
#print expected_result
#print dst_data
self.assertComplexTuplesAlmostEqual(expected_result, dst_data, 1)
def test04(self):
# Test float/float version
omega = 2
gain_omega = 0.01
mu = 0.25
gain_mu = 0.1
omega_rel_lim = 0.001
self.test = digital.clock_recovery_mm_ff(omega, gain_omega,
mu, gain_mu,
omega_rel_lim)
data = 1000*[1, 1, -1, -1]
self.src = blocks.vector_source_f(data, False)
self.snk = blocks.vector_sink_f()
self.tb.connect(self.src, self.test, self.snk)
self.tb.run()
expected_result = 1000*[-1.2, 1.2]
dst_data = self.snk.data()
# Only compare last Ncmp samples
Ncmp = 100
len_e = len(expected_result)
len_d = len(dst_data)
expected_result = expected_result[len_e - Ncmp:]
dst_data = dst_data[len_d - Ncmp:]
#print expected_result
#print dst_data
self.assertFloatTuplesAlmostEqual(expected_result, dst_data, 1)
if __name__ == '__main__':
gr_unittest.run(test_clock_recovery_mm, "test_clock_recovery_mm.xml")
| gpl-3.0 |
Fokko/incubator-airflow | airflow/example_dags/example_branch_python_dop_operator_3.py | 1 | 2116 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating the usage of BranchPythonOperator with depends_on_past=True, where tasks may be run
or skipped on alternating runs.
"""
import airflow
from airflow.models import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import BranchPythonOperator
args = {
'owner': 'Airflow',
'start_date': airflow.utils.dates.days_ago(2),
'depends_on_past': True,
}
dag = DAG(
dag_id='example_branch_dop_operator_v3',
schedule_interval='*/1 * * * *',
default_args=args,
)
def should_run(**kwargs):
"""
Determine which dummy_task should be run based on if the execution date minute is even or odd.
:param dict kwargs: Context
:return: Id of the task to run
:rtype: str
"""
print('------------- exec dttm = {} and minute = {}'.
format(kwargs['execution_date'], kwargs['execution_date'].minute))
if kwargs['execution_date'].minute % 2 == 0:
return "dummy_task_1"
else:
return "dummy_task_2"
cond = BranchPythonOperator(
task_id='condition',
python_callable=should_run,
dag=dag,
)
dummy_task_1 = DummyOperator(task_id='dummy_task_1', dag=dag)
dummy_task_2 = DummyOperator(task_id='dummy_task_2', dag=dag)
cond >> [dummy_task_1, dummy_task_2]
| apache-2.0 |
iivic/BoiseStateX | common/lib/capa/capa/safe_exec/lazymod.py | 193 | 1200 | """A module proxy for delayed importing of modules.
From http://barnesc.blogspot.com/2006/06/automatic-python-imports-with-autoimp.html,
in the public domain.
"""
import sys
class LazyModule(object):
"""A lazy module proxy."""
def __init__(self, modname):
self.__dict__['__name__'] = modname
self._set_mod(None)
def _set_mod(self, mod):
if mod is not None:
self.__dict__ = mod.__dict__
self.__dict__['_lazymod_mod'] = mod
def _load_mod(self):
__import__(self.__name__)
self._set_mod(sys.modules[self.__name__])
def __getattr__(self, name):
if self.__dict__['_lazymod_mod'] is None:
self._load_mod()
mod = self.__dict__['_lazymod_mod']
if hasattr(mod, name):
return getattr(mod, name)
else:
try:
subname = '%s.%s' % (self.__name__, name)
__import__(subname)
submod = getattr(mod, name)
except ImportError:
raise AttributeError("'module' object has no attribute %r" % name)
self.__dict__[name] = LazyModule(subname)
return self.__dict__[name]
| agpl-3.0 |
AndrewLvov/django-registration | registration/forms.py | 38 | 4537 | """
Forms and validation code for user registration.
Note that all of these forms assume Django's bundle default ``User``
model; since it's not possible for a form to anticipate in advance the
needs of custom user models, you will need to write your own forms if
you're using a custom model.
"""
from django.contrib.auth.models import User
from django import forms
from django.utils.translation import ugettext_lazy as _
class RegistrationForm(forms.Form):
"""
Form for registering a new user account.
Validates that the requested username is not already in use, and
requires the password to be entered twice to catch typos.
Subclasses should feel free to add any additional validation they
need, but should avoid defining a ``save()`` method -- the actual
saving of collected user data is delegated to the active
registration backend.
"""
required_css_class = 'required'
username = forms.RegexField(regex=r'^[\w.@+-]+$',
max_length=30,
label=_("Username"),
error_messages={'invalid': _("This value may contain only letters, numbers and @/./+/-/_ characters.")})
email = forms.EmailField(label=_("E-mail"))
password1 = forms.CharField(widget=forms.PasswordInput,
label=_("Password"))
password2 = forms.CharField(widget=forms.PasswordInput,
label=_("Password (again)"))
def clean_username(self):
"""
Validate that the username is alphanumeric and is not already
in use.
"""
existing = User.objects.filter(username__iexact=self.cleaned_data['username'])
if existing.exists():
raise forms.ValidationError(_("A user with that username already exists."))
else:
return self.cleaned_data['username']
def clean(self):
"""
Verifiy that the values entered into the two password fields
match. Note that an error here will end up in
``non_field_errors()`` because it doesn't apply to a single
field.
"""
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(_("The two password fields didn't match."))
return self.cleaned_data
class RegistrationFormTermsOfService(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which adds a required checkbox
for agreeing to a site's Terms of Service.
"""
tos = forms.BooleanField(widget=forms.CheckboxInput,
label=_(u'I have read and agree to the Terms of Service'),
error_messages={'required': _("You must agree to the terms to register")})
class RegistrationFormUniqueEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which enforces uniqueness of
email addresses.
"""
def clean_email(self):
"""
Validate that the supplied email address is unique for the
site.
"""
if User.objects.filter(email__iexact=self.cleaned_data['email']):
raise forms.ValidationError(_("This email address is already in use. Please supply a different email address."))
return self.cleaned_data['email']
class RegistrationFormNoFreeEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which disallows registration with
email addresses from popular free webmail services; moderately
useful for preventing automated spam registrations.
To change the list of banned domains, subclass this form and
override the attribute ``bad_domains``.
"""
bad_domains = ['aim.com', 'aol.com', 'email.com', 'gmail.com',
'googlemail.com', 'hotmail.com', 'hushmail.com',
'msn.com', 'mail.ru', 'mailinator.com', 'live.com',
'yahoo.com']
def clean_email(self):
"""
Check the supplied email address against a list of known free
webmail domains.
"""
email_domain = self.cleaned_data['email'].split('@')[1]
if email_domain in self.bad_domains:
raise forms.ValidationError(_("Registration using free email addresses is prohibited. Please supply a different email address."))
return self.cleaned_data['email']
| bsd-3-clause |
ValentineBlacker/TopDownMap | src/sprite.py | 2 | 9275 | '''
Created on Apr 7, 2012
@author: Demicow
'''
import pygame
#Tie circle collion to self. leftside etc
class customSprite(pygame.sprite.DirtySprite):
def __init__(self, scene, image, location):
#init the sprite!
pygame.sprite.DirtySprite.__init__(self)
#info about the screen
self.screen = scene.screen
self.map = scene.mainmap
self.screen_center = scene.screen_center
self.field_length = scene.field_length
self.field_height = scene.field_height
#load image
self.imagemaster = pygame.image.load(image).convert_alpha()
self.imagesize = (20,20)
self.load_images()
self.image = self.imagestill
self.currentimage = self.imagefront
self.rect = self.image.get_rect()
self.radius = 10
self.movement = 4
#use same movement speed as map
self.speed = scene.mainmap.scroll_speed
#variables for animation
self.pause = 0
self.delay = 11
self.frame = 0
#initial position
self.x, self.y = location
#holds info about location for camera toggling purposes
self.previouscenter = ((self.x, self.y), (scene.mainmap.camera.x, scene.mainmap.camera.y))
#init variables to be used later
self.animation_on = False
self.collidedblock = None
self.movingleft = False
self.rightside = self.leftside = self.topside = self.bottomside = False
self.rect.x = self.x
self.rect.y = self.y
def load_images(self):
# load the image if the sprite is still
self.imagestill = pygame.Surface(self.imagesize, pygame.SRCALPHA)
self.imagestill.blit(self.imagemaster, (0, 0), ((0,0), self.imagesize))
#make a list of frames for the sprite viewed from the front
self.imagefront= []
offsetfront = []
for i in range(3):
offsetfront.append((self.imagesize[0]*i,0*self.imagesize[1]))
for i in range(0,3):
tmpimg = pygame.Surface(self.imagesize, pygame.SRCALPHA)
tmpimg.blit(self.imagemaster, (0, 0), (offsetfront[i], self.imagesize))
self.imagefront.append(tmpimg)
#make a list of frames for the sprite viewed from the back
self.imageback= []
offsetback = []
for i in range(3):
offsetback.append((self.imagesize[0]*i,1*self.imagesize[1]))
for i in range(0,3):
tmpimg = pygame.Surface(self.imagesize, pygame.SRCALPHA)
tmpimg.blit(self.imagemaster, (0, 0), (offsetback[i], self.imagesize))
self.imageback.append(tmpimg)
#make a list of frames for the sprite viewed from the side
self.imageside= []
offsetside = []
for i in range(3):
offsetside.append((self.imagesize[0]*i,2*self.imagesize[1]))
for i in range(0,3):
tmpimg = pygame.Surface(self.imagesize, pygame.SRCALPHA)
tmpimg.blit(self.imagemaster, (0, 0), (offsetside[i], self.imagesize))
self.imageside.append(tmpimg)
def animation(self):
#flip through the frames of animation
if self.animation_on == True:
delay = 10
self.pause += 1
if self.pause >= delay:
self.pause = 0
self.frame += 1
if self.frame >= len(self.currentimage):
self.frame = 0
self.image = self.currentimage[self.frame]
else: self.image = self.currentimage[1]
def scroll_with_map_horizontal(self, scene):
if scene.mainmap.scrolling_right == True:
self.movingleft = False
self.currentimage = self.imageside
elif scene.mainmap.scrolling_left == True:
self.currentimage = self.imageside
self.movingleft = True
def scroll_with_map_vertical(self,scene):
if scene.mainmap.scrolling_up == True:
self.currentimage = self.imageback
elif scene.mainmap.scrolling_down == True:
self.currentimage = self.imagefront
def update(self,scene):
self.animation()
# Determines which blocks the sprite is colliding with. Had to tweak it a bit
#to get it to recognize multiple blocks
collidelist = self.rect.collidelistall(scene.mainmap.collisionblocks)
blocklist =[scene.mainmap.collisionblocks[b] for b in collidelist]
if len(blocklist) > 0:
for block in blocklist:
self.check_block(block)
else: self.rightside = self.leftside = self.topside = self.bottomside = False
if scene.camera_focus is self:
self.check_bounds()
if self.movingleft == True:
self.flip()
else: pass
self.rect.x = self.x
self.rect.y = self.y
if scene.mainmap.scrolling_mode_horizontal == True:
self.scroll_with_map_horizontal(scene)
self.x = scene.field_length/2
else: pass
if scene.mainmap.scrolling_mode_vertical == True:
self.scroll_with_map_vertical(scene)
self.y = scene.field_height/2
else: pass
else:
#if camera is not focused on object
#DO NOT CHANGE. Changing this will not help you.
if scene.mainmap.scrolling_up == True and scene.mainmap.at_top == False:
self.y += self.speed
elif scene.mainmap.scrolling_down == True and scene.mainmap.at_bottom == False:
self.y -= self.speed
elif scene.mainmap.scrolling_right == True and scene.mainmap.at_right_side == False:
self.x -= self.speed
elif scene.mainmap.scrolling_left == True and scene.mainmap.at_left_side== False:
self.x += self.speed
else: pass
self.rect.x = self.x
self.rect.y = self.y
self.screen.blit(self.image, (self.rect.centerx, self.rect.centery), special_flags= 0)
def check_bounds(self):
#keeps ya from walkin' off the edge
mapleft = 0
mapright = self.map.rightedge
maptop = 0
mapbottom = self.map.bottomedge
if self.rect.left < mapleft:
self.leftside = True
if self.rect.top < maptop:
self.topside = True
if self.rect.right > mapright:
self.rightside = True
if self.rect.bottom > mapbottom:
self.bottomside = True
def flip(self):
""" flips sprite horizontally"""
self.image = pygame.transform.flip(self.image, True, False)
def check_block(self, block):
"""
This ended up getting a little complex. This code determines which side of a block the sprite is striking. The info is used by the
do_input module.
"""
offset = self.imagesize[0] -2
#LEFT side of sprite
if self.rect.left <= block.right and self.rect.right > block.centerx:
if self.rect.midleft[1] - block.midright[1] < offset and self.rect.midleft[1] - block.midright[1] > -offset:
self.leftside = True
#RIGHT side of sprite
if self.rect.right >= block.left and self.rect.left < block.centerx:
if self.rect.midright[1] - block.midleft[1] < offset and self.rect.midright[1] - block.midleft[1] > -offset:
self.rightside = True
#TOP of sprite
if self.rect.top <= block.bottom and self.rect.bottom> block.centery:
if self.rect.midtop[0] - block.midbottom[0]< offset and self.rect.midtop[0]- block.midbottom[0]> -offset:
self.topside = True
#BOTTOM of sprite
if self.rect.bottom >= block.top and self.rect.top< block.centery:
if self.rect.midbottom[0] - block.midtop[0] < offset and self.rect.midbottom[0] - block.midtop[0] > -offset:
self.bottomside = True
| gpl-2.0 |
divio/djangocms-accordion | djangocms_accordion/cms_plugins.py | 1 | 1171 | # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from djangocms_accordion.models import Accordion, AccordionEntry
class AccordionPlugin(CMSPluginBase):
model = Accordion
name = _('Accordion')
module = _('Accordion')
render_template = 'djangocms_accordion/accordion.html'
allow_children = True
child_classes = ['AccordionEntryPlugin']
def render(self, context, instance, placeholder):
context.update({
'accordion': instance,
'placeholder': placeholder,
})
return context
class AccordionEntryPlugin(CMSPluginBase):
model = AccordionEntry
name = _('Accordion Entry')
module = _('Accordion')
render_template = 'djangocms_accordion/accordion_entry.html'
allow_children = True
def render(self, context, instance, placeholder):
context.update({
'instance': instance,
'placeholder': placeholder,
})
return context
plugin_pool.register_plugin(AccordionPlugin)
plugin_pool.register_plugin(AccordionEntryPlugin)
| bsd-3-clause |
Johnzero/OE7 | openerp/addons-modules/account/project/report/account_journal.py | 61 | 2142 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.report import report_sxw
#
# Use period and Journal for selection or resources
#
class journal_print(report_sxw.rml_parse):
def lines(self, journal_id, *args):
self.cr.execute('select id from account_analytic_line where journal_id=%s order by date,id', (journal_id,))
ids = map(lambda x: x[0], self.cr.fetchall())
res = self.pool.get('account.analytic.line').browse(self.cr, self.uid, ids)
return res
def _sum_lines(self, journal_id):
self.cr.execute('select sum(amount) from account_analytic_line where journal_id=%s', (journal_id,))
return self.cr.fetchone()[0] or 0.0
def __init__(self, cr, uid, name, context):
super(journal_print, self).__init__(cr, uid, name, context=context)
self.localcontext = {
'time': time,
'lines': self.lines,
'sum_lines': self._sum_lines,
}
report_sxw.report_sxw('report.account.analytic.journal.print', 'account.analytic.journal', 'addons/account/project/report/analytic_journal.rml',parser=journal_print)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
hwroitzsch/BikersLifeSaver | lib/python3.5/site-packages/numpy/lib/tests/test_type_check.py | 103 | 10247 | from __future__ import division, absolute_import, print_function
import numpy as np
from numpy.compat import long
from numpy.testing import (
TestCase, assert_, assert_equal, assert_array_equal, run_module_suite
)
from numpy.lib.type_check import (
common_type, mintypecode, isreal, iscomplex, isposinf, isneginf,
nan_to_num, isrealobj, iscomplexobj, asfarray, real_if_close
)
def assert_all(x):
assert_(np.all(x), x)
class TestCommonType(TestCase):
def test_basic(self):
ai32 = np.array([[1, 2], [3, 4]], dtype=np.int32)
af16 = np.array([[1, 2], [3, 4]], dtype=np.float16)
af32 = np.array([[1, 2], [3, 4]], dtype=np.float32)
af64 = np.array([[1, 2], [3, 4]], dtype=np.float64)
acs = np.array([[1+5j, 2+6j], [3+7j, 4+8j]], dtype=np.csingle)
acd = np.array([[1+5j, 2+6j], [3+7j, 4+8j]], dtype=np.cdouble)
assert_(common_type(ai32) == np.float64)
assert_(common_type(af16) == np.float16)
assert_(common_type(af32) == np.float32)
assert_(common_type(af64) == np.float64)
assert_(common_type(acs) == np.csingle)
assert_(common_type(acd) == np.cdouble)
class TestMintypecode(TestCase):
def test_default_1(self):
for itype in '1bcsuwil':
assert_equal(mintypecode(itype), 'd')
assert_equal(mintypecode('f'), 'f')
assert_equal(mintypecode('d'), 'd')
assert_equal(mintypecode('F'), 'F')
assert_equal(mintypecode('D'), 'D')
def test_default_2(self):
for itype in '1bcsuwil':
assert_equal(mintypecode(itype+'f'), 'f')
assert_equal(mintypecode(itype+'d'), 'd')
assert_equal(mintypecode(itype+'F'), 'F')
assert_equal(mintypecode(itype+'D'), 'D')
assert_equal(mintypecode('ff'), 'f')
assert_equal(mintypecode('fd'), 'd')
assert_equal(mintypecode('fF'), 'F')
assert_equal(mintypecode('fD'), 'D')
assert_equal(mintypecode('df'), 'd')
assert_equal(mintypecode('dd'), 'd')
#assert_equal(mintypecode('dF',savespace=1),'F')
assert_equal(mintypecode('dF'), 'D')
assert_equal(mintypecode('dD'), 'D')
assert_equal(mintypecode('Ff'), 'F')
#assert_equal(mintypecode('Fd',savespace=1),'F')
assert_equal(mintypecode('Fd'), 'D')
assert_equal(mintypecode('FF'), 'F')
assert_equal(mintypecode('FD'), 'D')
assert_equal(mintypecode('Df'), 'D')
assert_equal(mintypecode('Dd'), 'D')
assert_equal(mintypecode('DF'), 'D')
assert_equal(mintypecode('DD'), 'D')
def test_default_3(self):
assert_equal(mintypecode('fdF'), 'D')
#assert_equal(mintypecode('fdF',savespace=1),'F')
assert_equal(mintypecode('fdD'), 'D')
assert_equal(mintypecode('fFD'), 'D')
assert_equal(mintypecode('dFD'), 'D')
assert_equal(mintypecode('ifd'), 'd')
assert_equal(mintypecode('ifF'), 'F')
assert_equal(mintypecode('ifD'), 'D')
assert_equal(mintypecode('idF'), 'D')
#assert_equal(mintypecode('idF',savespace=1),'F')
assert_equal(mintypecode('idD'), 'D')
class TestIsscalar(TestCase):
def test_basic(self):
assert_(np.isscalar(3))
assert_(not np.isscalar([3]))
assert_(not np.isscalar((3,)))
assert_(np.isscalar(3j))
assert_(np.isscalar(long(10)))
assert_(np.isscalar(4.0))
class TestReal(TestCase):
def test_real(self):
y = np.random.rand(10,)
assert_array_equal(y, np.real(y))
def test_cmplx(self):
y = np.random.rand(10,)+1j*np.random.rand(10,)
assert_array_equal(y.real, np.real(y))
class TestImag(TestCase):
def test_real(self):
y = np.random.rand(10,)
assert_array_equal(0, np.imag(y))
def test_cmplx(self):
y = np.random.rand(10,)+1j*np.random.rand(10,)
assert_array_equal(y.imag, np.imag(y))
class TestIscomplex(TestCase):
def test_fail(self):
z = np.array([-1, 0, 1])
res = iscomplex(z)
assert_(not np.sometrue(res, axis=0))
def test_pass(self):
z = np.array([-1j, 1, 0])
res = iscomplex(z)
assert_array_equal(res, [1, 0, 0])
class TestIsreal(TestCase):
def test_pass(self):
z = np.array([-1, 0, 1j])
res = isreal(z)
assert_array_equal(res, [1, 1, 0])
def test_fail(self):
z = np.array([-1j, 1, 0])
res = isreal(z)
assert_array_equal(res, [0, 1, 1])
class TestIscomplexobj(TestCase):
def test_basic(self):
z = np.array([-1, 0, 1])
assert_(not iscomplexobj(z))
z = np.array([-1j, 0, -1])
assert_(iscomplexobj(z))
class TestIsrealobj(TestCase):
def test_basic(self):
z = np.array([-1, 0, 1])
assert_(isrealobj(z))
z = np.array([-1j, 0, -1])
assert_(not isrealobj(z))
class TestIsnan(TestCase):
def test_goodvalues(self):
z = np.array((-1., 0., 1.))
res = np.isnan(z) == 0
assert_all(np.all(res, axis=0))
def test_posinf(self):
with np.errstate(divide='ignore'):
assert_all(np.isnan(np.array((1.,))/0.) == 0)
def test_neginf(self):
with np.errstate(divide='ignore'):
assert_all(np.isnan(np.array((-1.,))/0.) == 0)
def test_ind(self):
with np.errstate(divide='ignore', invalid='ignore'):
assert_all(np.isnan(np.array((0.,))/0.) == 1)
def test_integer(self):
assert_all(np.isnan(1) == 0)
def test_complex(self):
assert_all(np.isnan(1+1j) == 0)
def test_complex1(self):
with np.errstate(divide='ignore', invalid='ignore'):
assert_all(np.isnan(np.array(0+0j)/0.) == 1)
class TestIsfinite(TestCase):
# Fixme, wrong place, isfinite now ufunc
def test_goodvalues(self):
z = np.array((-1., 0., 1.))
res = np.isfinite(z) == 1
assert_all(np.all(res, axis=0))
def test_posinf(self):
with np.errstate(divide='ignore', invalid='ignore'):
assert_all(np.isfinite(np.array((1.,))/0.) == 0)
def test_neginf(self):
with np.errstate(divide='ignore', invalid='ignore'):
assert_all(np.isfinite(np.array((-1.,))/0.) == 0)
def test_ind(self):
with np.errstate(divide='ignore', invalid='ignore'):
assert_all(np.isfinite(np.array((0.,))/0.) == 0)
def test_integer(self):
assert_all(np.isfinite(1) == 1)
def test_complex(self):
assert_all(np.isfinite(1+1j) == 1)
def test_complex1(self):
with np.errstate(divide='ignore', invalid='ignore'):
assert_all(np.isfinite(np.array(1+1j)/0.) == 0)
class TestIsinf(TestCase):
# Fixme, wrong place, isinf now ufunc
def test_goodvalues(self):
z = np.array((-1., 0., 1.))
res = np.isinf(z) == 0
assert_all(np.all(res, axis=0))
def test_posinf(self):
with np.errstate(divide='ignore', invalid='ignore'):
assert_all(np.isinf(np.array((1.,))/0.) == 1)
def test_posinf_scalar(self):
with np.errstate(divide='ignore', invalid='ignore'):
assert_all(np.isinf(np.array(1.,)/0.) == 1)
def test_neginf(self):
with np.errstate(divide='ignore', invalid='ignore'):
assert_all(np.isinf(np.array((-1.,))/0.) == 1)
def test_neginf_scalar(self):
with np.errstate(divide='ignore', invalid='ignore'):
assert_all(np.isinf(np.array(-1.)/0.) == 1)
def test_ind(self):
with np.errstate(divide='ignore', invalid='ignore'):
assert_all(np.isinf(np.array((0.,))/0.) == 0)
class TestIsposinf(TestCase):
def test_generic(self):
with np.errstate(divide='ignore', invalid='ignore'):
vals = isposinf(np.array((-1., 0, 1))/0.)
assert_(vals[0] == 0)
assert_(vals[1] == 0)
assert_(vals[2] == 1)
class TestIsneginf(TestCase):
def test_generic(self):
with np.errstate(divide='ignore', invalid='ignore'):
vals = isneginf(np.array((-1., 0, 1))/0.)
assert_(vals[0] == 1)
assert_(vals[1] == 0)
assert_(vals[2] == 0)
class TestNanToNum(TestCase):
def test_generic(self):
with np.errstate(divide='ignore', invalid='ignore'):
vals = nan_to_num(np.array((-1., 0, 1))/0.)
assert_all(vals[0] < -1e10) and assert_all(np.isfinite(vals[0]))
assert_(vals[1] == 0)
assert_all(vals[2] > 1e10) and assert_all(np.isfinite(vals[2]))
def test_integer(self):
vals = nan_to_num(1)
assert_all(vals == 1)
vals = nan_to_num([1])
assert_array_equal(vals, np.array([1], np.int))
def test_complex_good(self):
vals = nan_to_num(1+1j)
assert_all(vals == 1+1j)
def test_complex_bad(self):
with np.errstate(divide='ignore', invalid='ignore'):
v = 1 + 1j
v += np.array(0+1.j)/0.
vals = nan_to_num(v)
# !! This is actually (unexpectedly) zero
assert_all(np.isfinite(vals))
def test_complex_bad2(self):
with np.errstate(divide='ignore', invalid='ignore'):
v = 1 + 1j
v += np.array(-1+1.j)/0.
vals = nan_to_num(v)
assert_all(np.isfinite(vals))
# Fixme
#assert_all(vals.imag > 1e10) and assert_all(np.isfinite(vals))
# !! This is actually (unexpectedly) positive
# !! inf. Comment out for now, and see if it
# !! changes
#assert_all(vals.real < -1e10) and assert_all(np.isfinite(vals))
class TestRealIfClose(TestCase):
def test_basic(self):
a = np.random.rand(10)
b = real_if_close(a+1e-15j)
assert_all(isrealobj(b))
assert_array_equal(a, b)
b = real_if_close(a+1e-7j)
assert_all(iscomplexobj(b))
b = real_if_close(a+1e-7j, tol=1e-6)
assert_all(isrealobj(b))
class TestArrayConversion(TestCase):
def test_asfarray(self):
a = asfarray(np.array([1, 2, 3]))
assert_equal(a.__class__, np.ndarray)
assert_(np.issubdtype(a.dtype, np.float))
if __name__ == "__main__":
run_module_suite()
| mit |
dezelin/snaily-lib | 3rd_party/gtest-1.6.0/test/gtest_xml_output_unittest.py | 397 | 11279 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import errno
import os
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = "\nStack trace:\n*"
else:
STACK_TRACE_TEMPLATE = ""
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" name="AllTests">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*" name="AllTests">
</testsuites>"""
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput("gtest_no_test_unittest",
EXPECTED_EMPTY_XML, 0)
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
"gtest_no_test_unittest")
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, "%s=xml" % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + "out.xml")
if os.path.isfile(xml_path):
os.remove(xml_path)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
command = [gtest_prog_path,
"%s=xml:%s" % (GTEST_OUTPUT_FLAG, xml_path),
"--shut_down_xml"]
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
"%s was killed by signal %d" % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
"the expected exit code %s."
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def _TestXmlOutput(self, gtest_prog_name, expected_xml, expected_exit_code):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + "out.xml")
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = [gtest_prog_path, "%s=xml:%s" % (GTEST_OUTPUT_FLAG, xml_path)]
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
"%s was killed by signal %d" % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
"the expected exit code %s."
% (command, p.exit_code, expected_exit_code))
expected = minidom.parseString(expected_xml)
actual = minidom.parse(xml_path)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual .unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
| gpl-3.0 |
criteo/biggraphite | biggraphite/cli/command_stats.py | 1 | 13270 | #!/usr/bin/env python
# Copyright 2016 Criteo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Stats Command."""
from __future__ import print_function
import collections
import datetime
import re
import random
import time
import socket
import logging
from os import environ
import tabulate
from six.moves.configparser import ConfigParser
from biggraphite.cli import command
from prometheus_client import write_to_textfile, CollectorRegistry, Gauge
# Hack to add some more formats.
# TODO: Add Graphite support.
# TODO: Remove padding.
tabulate._table_formats["csv"] = tabulate.TableFormat(
lineabove=None,
linebelowheader=None,
linebetweenrows=None,
linebelow=None,
headerrow=tabulate.DataRow("", ";", ""),
datarow=tabulate.DataRow("", ";", ""),
padding=0,
with_header_hide=None,
)
tabulate.tabulate_formats = list(sorted(tabulate._table_formats.keys()))
class Namespaces(object):
r"""Helper for namespaces.
The config file would look like:
```
[carbon-relay]
pattern = carbon\.relay\.*
[carbon-cache]
pattern = carbon\.agents\.*
[carbon-aggregator]
pattern = carbon\.aggregator\.*
[prometheus]
pattern = prometheus\.*
```
"""
def __init__(self, filename=None):
"""Initializer."""
self.config = ConfigParser({}, collections.OrderedDict)
self.patterns = collections.OrderedDict()
if not filename:
self.patterns[re.compile(".*")] = "total"
self.config.add_section("total")
return
self.config.read(filename)
for section in self.config.sections():
pattern = re.compile(self.config.get(section, "pattern"))
self.patterns[pattern] = section
def lookup(self, metric_name):
"""Return the namespace corresponding to the metric."""
for pattern, section in self.patterns.items():
if pattern.match(metric_name):
return section, self.config.items(section)
return "none", None
class CommandStats(command.BaseCommand):
"""Stats for metrics."""
NAME = "stats"
HELP = "disk usage if one or several specific metrics."
def __init__(self, *args, **kwargs):
"""Initialize."""
super(CommandStats, self).__init__(*args, **kwargs)
self.ns = None
self._n_metrics = collections.defaultdict(int)
self._n_points = collections.defaultdict(int)
expiration_days = 15
env_expiration_days = environ.get('STATS_ANALYSIS_EXPIRATION')
if env_expiration_days is not None and env_expiration_days.isdigit():
expiration_days = int(env_expiration_days)
self.delta = datetime.timedelta(days=expiration_days)
self.cutoff = datetime.datetime.utcnow() - self.delta
self.oldest = datetime.datetime.utcnow()
self.current_oldest = datetime.datetime.utcnow()
self._n_count = 0
self._n_range_size = 0
self._n_count_expired = 0
self._n_dir_count = 0
self._n_dir_empty = 0
self.metrics_file_path = ""
def add_arguments(self, parser):
"""Add custom arguments.
See command.CommandBase.
"""
command.add_sharding_arguments(parser)
parser.add_argument(
"-c", "--conf", help="Configuration file for namespaces", dest="conf"
)
formats = tabulate.tabulate_formats
formats.append("graphite")
parser.add_argument(
"-f", "--format", help="Format: %s" % ", ".join(formats), dest="fmt"
)
parser.add_argument(
"--carbon",
help="Carbon host:port to send points to when using graphite output."
)
parser.add_argument(
"--prefix",
help="Prefix to add to every section name.",
default='',
)
parser.add_argument(
"--metadata-analysis",
help="Run a statistical analysis on metadata contents.",
default=False,
)
parser.add_argument(
"--metrics-file-path",
help="Dump metrics in file",
type=str,
default="",
action="store"
)
self._n_metrics = collections.defaultdict(int)
self._n_points = collections.defaultdict(int)
def statistical_analysis_stats(self, metric, done, total):
"""Aggregate stats for given metrics."""
if metric.updated_on is not None:
if self.oldest > metric.updated_on:
self.oldest = metric.updated_on
if self.current_oldest > metric.updated_on:
self.current_oldest = metric.updated_on
if metric.updated_on < self.cutoff:
self._n_count_expired += 1
self._n_count += 1
def statistical_analysis_stats_directories(self, dir, result):
"""Aggregate stats for given directories."""
if result == []:
self._n_dir_empty += 1
self._n_dir_count += 1
def write_metrics(self):
"""Write metrics in textfile."""
registry = CollectorRegistry()
metric_scanned_range = Gauge(
'scanned_range',
'Total of offset ranges scanned',
registry=registry)
metric_scanned_range.set(self._n_range_size)
metric_total = Gauge(
'metric_total',
'Total of metric found in offset ranges scanned',
registry=registry)
metric_total.set(self._n_count)
metric_total_expired = Gauge(
'metric_expired',
'Total of expired metric found in offset ranges scanned',
registry=registry)
metric_total_expired.set(self._n_count_expired)
multiplier = 2**64 / self._n_range_size
metric_estimated_total = Gauge(
'metric_estimated_total',
'Estimated total of metric in database',
registry=registry)
metric_estimated_total.set(int(self._n_count * multiplier))
metric_estimated_total_expired = Gauge(
'metric_estimated_expired',
'Estimated total of expired metric in database',
registry=registry)
metric_estimated_total_expired.set(int(self._n_count_expired * multiplier))
directories_total = Gauge(
'directories_total',
'Total of directories found in offset ranges scanned',
registry=registry)
directories_total.set(self._n_dir_count)
directories_total_empty = Gauge(
'directories_empty',
'Total of empty directories found in offset ranges scanned',
registry=registry)
directories_total_empty.set(self._n_dir_empty)
directories_estimated_total = Gauge(
'directories_estimated_total',
'Estimated total of directories in database',
registry=registry)
directories_estimated_total.set(int(self._n_dir_count * multiplier))
directories_estimated_total_empty = Gauge(
'directories_estimated_empty',
'Estimated total of empty directories in database',
registry=registry)
directories_estimated_total_empty.set(int(self._n_dir_empty * multiplier))
# Final metric dump
write_to_textfile(self.metrics_file_path, registry)
def statistical_analysis(self, accessor):
"""Access cassandra database and get compute metrics."""
range_size = 2**48
env_exponent = environ.get('STATS_ANALYSIS_EXPONENT')
if env_exponent is not None and env_exponent.isdigit():
range_size = 2**int(env_exponent)
scanned_ranges = 20
env_ranges = environ.get('STATS_ANALYSIS_RANGES')
if env_ranges is not None and env_ranges.isdigit():
scanned_ranges = int(env_ranges)
stats_debug = False
stats_debug_env = environ.get('STATS_DEBUG')
if stats_debug_env is not None:
stats_debug = True
current_offset = 0
for i in range(0, scanned_ranges):
start_offset = random.randrange(
current_offset,
current_offset + (2**64)/scanned_ranges - range_size
) - 2**63
current_offset += (2**64)/scanned_ranges
o_metric = self._n_count
o_metric_expired = self._n_count_expired
o_directory = self._n_dir_count
o_directory_empty = self._n_dir_empty
self.current_oldest = datetime.datetime.utcnow()
self._n_range_size += range_size
# Scan metrics
accessor.sync_map(
self.statistical_analysis_stats,
start_key=start_offset,
end_key=start_offset + range_size,
)
# Scan directories
accessor.map_empty_directories_sync(
self.statistical_analysis_stats_directories,
start_key=start_offset,
end_key=start_offset + range_size,
)
if stats_debug:
print("range:%d from:%d to:%d metrics:%d "
"expired:%d dirs:%d empty:%d oldest:%s" % (
i,
start_offset,
start_offset + range_size,
self._n_count - o_metric,
self._n_count_expired - o_metric_expired,
self._n_dir_count - o_directory,
self._n_dir_empty - o_directory_empty,
self.current_oldest))
print("Range: %d (%f%%)" % (self._n_range_size, self._n_range_size / 2.**64))
print("Metrics extrated: %d; Outdated: %d (%.2f%%)" % (
self._n_count,
self._n_count_expired,
100. * self._n_count_expired / self._n_count))
multiplier = 2**64 / self._n_range_size
print("Extrapolation: %d; Estimated outdated: %d" % (
self._n_count * multiplier,
self._n_count_expired * multiplier))
print("Oldest metric found: %s" % self.oldest)
print("Directories found: %d / Empty: %d" % (self._n_dir_count, self._n_dir_empty))
print("Extrapolation: %d; Estimated empty: %d" % (
self._n_dir_count * multiplier,
self._n_dir_empty * multiplier))
if self.metrics_file_path != "":
self.write_metrics()
def run(self, accessor, opts):
"""Disk usage of metrics.
See command.CommandBase.
"""
self.ns = Namespaces(opts.conf)
accessor.connect()
self.metrics_file_path = opts.metrics_file_path
if accessor.TYPE.startswith("elasticsearch+"):
accessor = accessor._metadata_accessor
if accessor.TYPE == "cassandra" and opts.metadata_analysis:
self.statistical_analysis(accessor)
return
if accessor.TYPE == "elasticsearch":
# Elasticsearch has a better implementation.
self._n_metrics, self._n_points = accessor.metric_stats(self.ns)
else:
accessor.map(
self.stats,
start_key=opts.start_key,
end_key=opts.end_key,
shard=opts.shard,
nshards=opts.nshards,
)
columns = ("Namespace", "Metrics", "Points")
rows = [columns]
if opts.fmt == "graphite":
now = int(time.time())
output = ""
for k, v in self._n_metrics.items():
output += "%smetrics.%s %s %s\n" % (opts.prefix, k, v, now)
for k, v in self._n_points.items():
output += "%spoints.%s %s %s\n" % (opts.prefix, k, v, now)
if not opts.carbon:
print(output)
else:
# This is a very-very cheap implementation of a carbon client.
host, port = opts.carbon.split(':')
logging.info("Sending data to %s:%s" % (host, port))
sock = socket.socket()
sock.connect((host, int(port)))
sock.sendall(output)
sock.close()
return
for k in self._n_metrics.keys():
data = (
'%s%s' % (opts.prefix, k),
self._n_metrics.get(k),
self._n_points.get(k)
)
rows.append(data)
print(tabulate.tabulate(rows, headers="firstrow", tablefmt=opts.fmt))
def stats(self, metric, done, total):
"""Compute stats."""
ns, _ = self.ns.lookup(metric.name)
self._n_metrics[ns] += 1
self._n_points[ns] += metric.metadata.retention.points
| apache-2.0 |
kaplun/invenio | modules/bibencode/lib/bibencode_utils.py | 24 | 12430 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibEncode helper functions.
Functions that are used throughout the BibEncode module
"""
import os
import subprocess
import unicodedata
import re
import sys
import time
try:
from uuid import uuid4
except ImportError:
import random
def uuid4():
return "%x" % random.getrandbits(16*8)
from invenio.bibencode_config import (
CFG_BIBENCODE_FFMPEG_PROBE_LOG,
CFG_BIBENCODE_FFMPEG_PROBE_COMMAND,
CFD_BIBENCODE_FFMPEG_OUT_RE_CONFIGURATION,
CFG_BIBENCODE_FFMPEG_CONFIGURATION_REQUIRED,
CFG_BIBENCODE_MEDIAINFO_COMMAND
)
from invenio.config import CFG_PATH_FFPROBE
## The timestamp for the process. Used to identify Logfiles.
def generate_timestamp():
""" Generates a timestamp for the logfile to make it unique
"""
return "%s-%s" % (time.strftime("%Y%m%d%H%M%S", time.gmtime()), str(uuid4()))
## The following functions are for ffmpeg specific timecodes
## The format is HH:MM:SS.ss
def timecode_to_seconds(timecode):
""" Converts a timecode to a total duration in seconds
"""
if type(timecode) == type(str()):
try:
hours, minutes, seconds = timecode.split(':')
total_seconds = int(hours)*3600+int(minutes)*60+float(seconds)
except ValueError:
raise ValueError(timecode + " is not a valid timecode, the format is hh:mm:ss.ss or hh:mm:ss")
else:
raise ValueError(timecode + " is not a valid timecode, the format is hh:mm:ss.ss or hh:mm:ss")
return total_seconds
def seconds_to_timecode(total_seconds):
""" Converts seconds to a timecode
"""
## Cast to float
if (type(total_seconds) == type(int())
or type(total_seconds) == type(float())
or type(total_seconds) == type(str())
):
try:
total_seconds = float(total_seconds)
except ValueError:
ValueError("string must be of format '1.1' or '1'")
hours = int(total_seconds / 3600)
minutes = int(total_seconds / 60) - hours * 60
seconds = total_seconds % 60
else:
raise TypeError("seconds must be given as integer or float or string values")
return "%02d:%02d:%05.2f" % (hours, minutes, seconds)
def is_timecode(value):
""" Checks if the given string is a timecode
"""
if type(value) == type(str()) or type(value) == type(unicode()):
pattern = re.compile("^\d\d:\d\d:\d\d(\.\d+)?$")
if pattern.match(value):
return True
else:
return False
else:
return False
def is_seconds(value):
""" Checks if the given value represents seconds
Integer, Floats and Strings of the right format are valid
"""
if type(value) == type(float()) or type(value) == type(int()):
return True
elif type(value) == type(str()):
pattern = re.compile("^\d+(\.\d+)?$")
if pattern.match(value):
return True
else:
return False
else:
return False
## Try to parse anything to unicode
## http://www.codigomanso.com/en/2010/05/una-de-python-force_unicode/
def force_unicode(seq, encoding='utf-8', errors='ignore'):
"""
Returns a unicode object representing 'seq'. Treats bytestrings using the
'encoding' codec.
"""
import codecs
if seq is None:
return ''
try:
if not isinstance(seq, basestring,):
if hasattr(seq, '__unicode__'):
seq = unicode(seq)
else:
try:
seq = unicode(str(seq), encoding, errors)
except UnicodeEncodeError:
if not isinstance(seq, Exception):
raise
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII data without special
# handling to display as a string. We need to handle this
# without raising a further exception. We do an
# approximation to what the Exception's standard str()
# output should be.
seq = ' '.join([force_unicode(arg, encoding, errors) for arg in seq])
elif not isinstance(seq, unicode):
# Note: We use .decode() here, instead of unicode(seq, encoding,
# errors), so that if seq is a SafeString, it ends up being a
# SafeUnicode at the end.
seq = seq.decode(encoding, errors)
except UnicodeDecodeError, e:
if not isinstance(seq, Exception):
raise UnicodeDecodeError (seq, *e.args)
else:
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII bytestring data without a
# working unicode method. Try to handle this without raising a
# further exception by individually forcing the exception args
# to unicode.
seq = ' '.join([force_unicode(arg, encoding, errors) for arg in seq])
return seq
def normalize_string(value):
"""
Normalizes string, converts to lowercase, removes non-alpha characters,
and converts spaces to hyphens.
Returns a unicode object
"""
value = force_unicode(value)
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
return re.sub('[-\s]+', '-', value)
def filename_convert(input_file):
""" Converts /foo/bar/Example.mp4 to Example_mp4
Used for the generation of log filenames
"""
path, filename = os.path.split(input_file)
fname, ext = os.path.splitext(filename)
return fname + "_" + ext[1:]
def get_probe_log_filename(input_file):
""" generates the filename for the ffprobe logfile
"""
return CFG_BIBENCODE_FFMPEG_PROBE_LOG % (filename_convert(input_file))
def get_lines_from_probe_log(input_file):
""" Probes the file using FFprobe and returns the lines of the probe log
This will create a log file.
"""
## Create Log file
log_file = open(get_probe_log_filename(input_file), 'w')
## Build command for ffprobe execution
command = (CFG_BIBENCODE_FFMPEG_PROBE_COMMAND % input_file).split()
## Start process and wait until it finishes
process = subprocess.Popen(command, stderr=log_file)
returncode = process.wait()
## If the process ends normal parse log file
if returncode == 0:
## Read the Log
log_file = open(get_probe_log_filename(input_file))
finput = log_file.read()
lines = finput.splitlines()
log_file.close()
return lines
## If there was a problem during execution
if returncode == -15 or returncode == 1:
return None
else:
return None
## Simple function to receive ffprobe results
def probe(input_file, parsable=False):
""" Probes the file using FFprobe and returns the output as a string
"""
command = (CFG_BIBENCODE_FFMPEG_PROBE_COMMAND % input_file).split()
process = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
returncode = process.wait()
if returncode == 0:
if not parsable:
return process.communicate()[1]
else:
return process.communicate()[0]
else:
return None
def mediainfo(input_file):
""" Receives XML output from mediainfo CLI
"""
command = (CFG_BIBENCODE_MEDIAINFO_COMMAND % input_file).split()
process = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
returncode = process.wait()
if returncode == 0:
return process.communicate()[0]
else:
return None
def check_ffmpeg_configuration():
""" Uses ffprobe to check if ffmpeg is compiled with the right
options to integrate in BibEncode
@return: Returns a list of missing options
@rtype: set
"""
## Use ffprobe to get the current ffmpeg configuration
try:
process = subprocess.Popen(CFG_PATH_FFPROBE, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
except OSError:
return ["FFMPEG/FFPROBE does not seem to be installed!"]
returncode = process.wait()
## This reads the stream from PIPE
output = process.communicate()[1]
## find the configuration text inside ffprobes output
## and parse all the configuration arguments
## 'options' is a list configuration flags
options = CFD_BIBENCODE_FFMPEG_OUT_RE_CONFIGURATION.findall(output)
## check if the neccessary configuration is availables
## This should be at least --enable-libvpx, ...libtheora, ...libvorbis
## ...gpl, ..version3, ...nonfree,
## libx264 and libfaac should be recommended in the manual but with
## regards about the licensing and patenting issues
## !!! Warn: For old Python versions, there is the sets module
## For newer ones, this is deprecated, set is a build in type now
if sys.version_info < (2, 6):
import sets
o = sets.Set(options)
s = sets.Set(CFG_BIBENCODE_FFMPEG_CONFIGURATION_REQUIRED)
if not s.issubset(o):
return o.difference(s)
else:
if not set(CFG_BIBENCODE_FFMPEG_CONFIGURATION_REQUIRED).issubset(options):
return set(CFG_BIBENCODE_FFMPEG_CONFIGURATION_REQUIRED).difference(options)
def check_mediainfo_configuration():
""" Checks if mediainfo lib is installed
@return: Returns a list of missing options or simply False if no missings
@rtype: set
"""
try:
process = subprocess.Popen('mediainfo', stderr=subprocess.PIPE, stdout=subprocess.PIPE)
except OSError:
return ["MEDIAINFO does not seem to be installed!"]
return False
def getval(dictionary, key, fallback=None):
""" Returns a value from a dict. If the key doesn't exist, returns fallback
@param dictionary: a dictionary with the value to access
@type dictionary: dict
@param key: key of the value to access
@type key: object
@param fallback: a fallback value if the key does not exist
@type fallback: object
@return: the value of the key or the fallback
@rtype: object
"""
if type(dictionary) == type(dict()) and key in dictionary:
return dictionary[key]
else:
return fallback
def chose(primary, fallback_key, fallback_dict):
""" Returns a fallback from a dictionary if the primary is not available
@param primary: value to take first
@type primary: object
@param fallback_key: key of the fallback
@type fallback_key: object
@param fallback_dict: the dictinary where the fallback key is stored
@type fallback_dict: dict
@return: primary, fallback value or None
@rtype: object
"""
if not primary:
return getval(fallback_dict, fallback_key)
else:
return primary
def chose2(key, primary_dict, fallback_dict):
""" Tries to receive a key from one dictionary and falls back to another
"""
return getval(primary_dict, key,
getval(fallback_dict, key))
def aspect_string_to_float(aspect_as_string):
"""
Transforms a string containing an aspect ratio to a float
@param aspect_as_string: Aspect ratio as a String eg '16:9'
@type aspect_as_string: string
@return: Aspect ratio as a float eg '1.77777'
@rtype: float
"""
try:
aspect_x, aspect_y = aspect_as_string.split(':')
aspect_x = float(aspect_x)
aspect_y = float(aspect_y)
except:
raise
return aspect_x / aspect_y
| gpl-2.0 |
mgrman/BitportViewer | BitportViewer_Kodi/BitportAPI.py | 1 | 4087 | import sys
import json
import requests
import re
class BitportAPI:
access_token = ""
apiBaseUrl = "https://api.bitport.io/v2"
isTvShowRegex = re.compile("[Ss]\d{1,2}[Ee]\d{1,2}")
getNameRegex = re.compile("(^[a-zA-Z. _\-0-9()'\"]*?)\(?2\d\d\d|(^[a-zA-Z. _\-0-9()'\"]*)([Ss]\d{1,2}[Ee]\d{1,2})")
def __init__(self,tokenPath):
tokenJsonFile = open(tokenPath, 'r')
tokenJson = tokenJsonFile.read()
token = json.loads(tokenJson)
self.access_token = token.get("access_token")
def getFolder_raw(self,code):
getFolderUrl = self.apiBaseUrl + "/cloud"
if code != None :
getFolderUrl = getFolderUrl + "/" + code
resp = requests.get(getFolderUrl,headers={'Authorization':'Bearer ' + self.access_token})
data = resp.json()
if data is None:
return BP_RawResult()
data = data.get("data")
if data is None or len(data) == 0:
return BP_RawResult()
data = data[0]
if data is None:
return BP_RawResult()
files = data.get("files")
if files is None:
files = []
folders = data.get("folders")
if folders is None:
filesfolders = []
#for folder in folders:
# subFiles = self.getFiles(folder["code"])
# files.extend(subFiles)
result = BP_RawResult()
result.folders = folders
result.files = files
return result
def getUrl(self,code,converted):
if converted:
operation = "stream"
else:
operation = "download"
getFileUrlUrl = self.apiBaseUrl + "/files/" + code + "/" + operation
return getFileUrlUrl + "|Authorization=Bearer " + self.access_token
#resp = requests.get(getFileUrlUrl,headers={
# 'Authorization':'Bearer ' + self.access_token,
# "Range": "bytes=0-0"})
#return resp.url
def convertFile(self, file):
resultFile = BP_File()
resultFile.code = file.get("code")
resultFile.filename = file.get("name")
resultFile.converted = file.get("conversion_status") == "converted"
nameMatch = self.getNameRegex.match(resultFile.filename)
if nameMatch is not None:
movieGroup = nameMatch.group(1)
tvShowGroup = nameMatch.group(2)
tvShowEpisodeGroup = nameMatch.group(3)
if movieGroup is not None:
resultFile.name = movieGroup.replace("."," ").strip().title()
elif tvShowGroup is not None:
resultFile.name = tvShowGroup.replace("."," ").strip().title() + " " + tvShowEpisodeGroup.upper()
else:
resultFile.name = resultFile.filename
else:
resultFile.name = resultFile.filename
if file.get("video"):
if self.isTvShowRegex.match(resultFile.name) is not None:
resultFile.type = BP_FileType.tv_show
else:
resultFile.type = BP_FileType.movie
else:
resultFile.type = BP_FileType.other
return resultFile
def convertFolder(self, folder):
resultFile = BP_Folder()
resultFile.code = folder.get("code")
resultFile.name = folder.get("name")
return resultFile
def getFolder(self,code):
rawResult = self.getFolder_raw(code)
result = []
for file in rawResult.files:
resultFile = self.convertFile(file)
result.append(resultFile)
for folder in rawResult.folders:
resultFolder = self.convertFolder(folder)
result.append(resultFolder)
return result
class BP_RawResult:
folders = []
files = []
class BP_FileType:
movie = 1
tv_show = 2
other = 3
class BP_File:
type = BP_FileType.other
code = ""
name = ""
filename = ""
class BP_Folder:
code = ""
name = ""
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.