text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""Offer device oriented automation."""
import voluptuous as vol
from homeassistant.components.device_automation import (
DEVICE_TRIGGER_BASE_SCHEMA,
async_get_device_automation_platform,
)
from homeassistant.const import CONF_DOMAIN
from .exceptions import InvalidDeviceAutomationConfig
# mypy: allow-untyped-defs, no-check-untyped-defs
TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA)
async def async_validate_trigger_config(hass, config):
"""Validate config."""
platform = await async_get_device_automation_platform(
hass, config[CONF_DOMAIN], "trigger"
)
if not hasattr(platform, "async_validate_trigger_config"):
return platform.TRIGGER_SCHEMA(config)
try:
return await getattr(platform, "async_validate_trigger_config")(hass, config)
except InvalidDeviceAutomationConfig as err:
raise vol.Invalid(str(err) or "Invalid trigger configuration") from err
async def async_attach_trigger(hass, config, action, automation_info):
"""Listen for trigger."""
platform = await async_get_device_automation_platform(
hass, config[CONF_DOMAIN], "trigger"
)
return await platform.async_attach_trigger(hass, config, action, automation_info)
|
{
"content_hash": "09545ffefb84a601d7eb0e2d14b86e1f",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 85,
"avg_line_length": 34.888888888888886,
"alnum_prop": 0.73328025477707,
"repo_name": "jawilson/home-assistant",
"id": "1a63dcb9e9ba0c3a899d2ad8e2b8393608db1170",
"size": "1256",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "homeassistant/components/device_automation/trigger.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2782"
},
{
"name": "Python",
"bytes": "40129467"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
try:
from nose.plugins.skip import SkipTest
except ImportError:
pass
else:
def setup_module():
raise SkipTest('Doctests fail for externals.validator')
"""
The Validator object is used to check that supplied values
conform to a specification.
The value can be supplied as a string - e.g. from a config file.
In this case the check will also *convert* the value to
the required type. This allows you to add validation
as a transparent layer to access data stored as strings.
The validation checks that the data is correct *and*
converts it to the expected type.
Some standard checks are provided for basic data types.
Additional checks are easy to write. They can be
provided when the ``Validator`` is instantiated or
added afterwards.
The standard functions work with the following basic data types :
* integers
* floats
* booleans
* strings
* ip_addr
plus lists of these datatypes
Adding additional checks is done through coding simple functions.
The full set of standard checks are :
* 'integer': matches integer values (including negative)
Takes optional 'min' and 'max' arguments : ::
integer()
integer(3, 9) # any value from 3 to 9
integer(min=0) # any positive value
integer(max=9)
* 'float': matches float values
Has the same parameters as the integer check.
* 'boolean': matches boolean values - ``True`` or ``False``
Acceptable string values for True are :
true, on, yes, 1
Acceptable string values for False are :
false, off, no, 0
Any other value raises an error.
* 'ip_addr': matches an Internet Protocol address, v.4, represented
by a dotted-quad string, i.e. '1.2.3.4'.
* 'string': matches any string.
Takes optional keyword args 'min' and 'max'
to specify min and max lengths of the string.
* 'list': matches any list.
Takes optional keyword args 'min', and 'max' to specify min and
max sizes of the list. (Always returns a list.)
* 'tuple': matches any tuple.
Takes optional keyword args 'min', and 'max' to specify min and
max sizes of the tuple. (Always returns a tuple.)
* 'int_list': Matches a list of integers.
Takes the same arguments as list.
* 'float_list': Matches a list of floats.
Takes the same arguments as list.
* 'bool_list': Matches a list of boolean values.
Takes the same arguments as list.
* 'ip_addr_list': Matches a list of IP addresses.
Takes the same arguments as list.
* 'string_list': Matches a list of strings.
Takes the same arguments as list.
* 'mixed_list': Matches a list with different types in
specific positions. List size must match
the number of arguments.
Each position can be one of :
'integer', 'float', 'ip_addr', 'string', 'boolean'
So to specify a list with two strings followed
by two integers, you write the check as : ::
mixed_list('string', 'string', 'integer', 'integer')
* 'pass': This check matches everything ! It never fails
and the value is unchanged.
It is also the default if no check is specified.
* 'option': This check matches any from a list of options.
You specify this check with : ::
option('option 1', 'option 2', 'option 3')
You can supply a default value (returned if no value is supplied)
using the default keyword argument.
You specify a list argument for default using a list constructor syntax in
the check : ::
checkname(arg1, arg2, default=list('val 1', 'val 2', 'val 3'))
A badly formatted set of arguments will raise a ``VdtParamError``.
"""
__version__ = '1.0.1'
__all__ = (
'__version__',
'dottedQuadToNum',
'numToDottedQuad',
'ValidateError',
'VdtUnknownCheckError',
'VdtParamError',
'VdtTypeError',
'VdtValueError',
'VdtValueTooSmallError',
'VdtValueTooBigError',
'VdtValueTooShortError',
'VdtValueTooLongError',
'VdtMissingValue',
'Validator',
'is_integer',
'is_float',
'is_boolean',
'is_list',
'is_tuple',
'is_ip_addr',
'is_string',
'is_int_list',
'is_bool_list',
'is_float_list',
'is_string_list',
'is_ip_addr_list',
'is_mixed_list',
'is_option',
'__docformat__',
)
import re
_list_arg = re.compile(r'''
(?:
([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*list\(
(
(?:
\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)
\s*,\s*
)*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)? # last one
)
\)
)
''', re.VERBOSE | re.DOTALL) # two groups
_list_members = re.compile(r'''
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?) # unquoted
)
(?:
(?:\s*,\s*)|(?:\s*$) # comma
)
''', re.VERBOSE | re.DOTALL) # one group
_paramstring = r'''
(?:
(
(?:
[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*list\(
(?:
\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)
\s*,\s*
)*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)? # last one
\)
)|
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?)| # unquoted
(?: # keyword argument
[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?) # unquoted
)
)
)
)
(?:
(?:\s*,\s*)|(?:\s*$) # comma
)
)
'''
_matchstring = '^%s*' % _paramstring
# Python pre 2.2.1 doesn't have bool
try:
bool
except NameError:
def bool(val):
"""Simple boolean equivalent function. """
if val:
return 1
else:
return 0
def dottedQuadToNum(ip):
"""
Convert decimal dotted quad string to long integer
>>> int(dottedQuadToNum('1 '))
1
>>> int(dottedQuadToNum(' 1.2'))
16777218
>>> int(dottedQuadToNum(' 1.2.3 '))
16908291
>>> int(dottedQuadToNum('1.2.3.4'))
16909060
>>> dottedQuadToNum('1.2.3. 4')
16909060
>>> dottedQuadToNum('255.255.255.255')
4294967295L
>>> dottedQuadToNum('255.255.255.256')
Traceback (most recent call last):
ValueError: Not a good dotted-quad IP: 255.255.255.256
"""
# import here to avoid it when ip_addr values are not used
import socket, struct
try:
return struct.unpack('!L',
socket.inet_aton(ip.strip()))[0]
except socket.error:
# bug in inet_aton, corrected in Python 2.3
if ip.strip() == '255.255.255.255':
return 0xFFFFFFFFL
else:
raise ValueError('Not a good dotted-quad IP: %s' % ip)
return
def numToDottedQuad(num):
"""
Convert long int to dotted quad string
>>> numToDottedQuad(-1L)
Traceback (most recent call last):
ValueError: Not a good numeric IP: -1
>>> numToDottedQuad(1L)
'0.0.0.1'
>>> numToDottedQuad(16777218L)
'1.0.0.2'
>>> numToDottedQuad(16908291L)
'1.2.0.3'
>>> numToDottedQuad(16909060L)
'1.2.3.4'
>>> numToDottedQuad(4294967295L)
'255.255.255.255'
>>> numToDottedQuad(4294967296L)
Traceback (most recent call last):
ValueError: Not a good numeric IP: 4294967296
"""
# import here to avoid it when ip_addr values are not used
import socket, struct
# no need to intercept here, 4294967295L is fine
if num > 4294967295L or num < 0:
raise ValueError('Not a good numeric IP: %s' % num)
try:
return socket.inet_ntoa(
struct.pack('!L', long(num)))
except (socket.error, struct.error, OverflowError):
raise ValueError('Not a good numeric IP: %s' % num)
class ValidateError(Exception):
"""
This error indicates that the check failed.
It can be the base class for more specific errors.
Any check function that fails ought to raise this error.
(or a subclass)
>>> raise ValidateError
Traceback (most recent call last):
ValidateError
"""
class VdtMissingValue(ValidateError):
"""No value was supplied to a check that needed one."""
class VdtUnknownCheckError(ValidateError):
"""An unknown check function was requested"""
def __init__(self, value):
"""
>>> raise VdtUnknownCheckError('yoda')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
"""
ValidateError.__init__(self, 'the check "%s" is unknown.' % (value,))
class VdtParamError(SyntaxError):
"""An incorrect parameter was passed"""
def __init__(self, name, value):
"""
>>> raise VdtParamError('yoda', 'jedi')
Traceback (most recent call last):
VdtParamError: passed an incorrect value "jedi" for parameter "yoda".
"""
SyntaxError.__init__(self, 'passed an incorrect value "%s" for parameter "%s".' % (value, name))
class VdtTypeError(ValidateError):
"""The value supplied was of the wrong type"""
def __init__(self, value):
"""
>>> raise VdtTypeError('jedi')
Traceback (most recent call last):
VdtTypeError: the value "jedi" is of the wrong type.
"""
ValidateError.__init__(self, 'the value "%s" is of the wrong type.' % (value,))
class VdtValueError(ValidateError):
"""The value supplied was of the correct type, but was not an allowed value."""
def __init__(self, value):
"""
>>> raise VdtValueError('jedi')
Traceback (most recent call last):
VdtValueError: the value "jedi" is unacceptable.
"""
ValidateError.__init__(self, 'the value "%s" is unacceptable.' % (value,))
class VdtValueTooSmallError(VdtValueError):
"""The value supplied was of the correct type, but was too small."""
def __init__(self, value):
"""
>>> raise VdtValueTooSmallError('0')
Traceback (most recent call last):
VdtValueTooSmallError: the value "0" is too small.
"""
ValidateError.__init__(self, 'the value "%s" is too small.' % (value,))
class VdtValueTooBigError(VdtValueError):
"""The value supplied was of the correct type, but was too big."""
def __init__(self, value):
"""
>>> raise VdtValueTooBigError('1')
Traceback (most recent call last):
VdtValueTooBigError: the value "1" is too big.
"""
ValidateError.__init__(self, 'the value "%s" is too big.' % (value,))
class VdtValueTooShortError(VdtValueError):
"""The value supplied was of the correct type, but was too short."""
def __init__(self, value):
"""
>>> raise VdtValueTooShortError('jed')
Traceback (most recent call last):
VdtValueTooShortError: the value "jed" is too short.
"""
ValidateError.__init__(
self,
'the value "%s" is too short.' % (value,))
class VdtValueTooLongError(VdtValueError):
"""The value supplied was of the correct type, but was too long."""
def __init__(self, value):
"""
>>> raise VdtValueTooLongError('jedie')
Traceback (most recent call last):
VdtValueTooLongError: the value "jedie" is too long.
"""
ValidateError.__init__(self, 'the value "%s" is too long.' % (value,))
class Validator(object):
"""
Validator is an object that allows you to register a set of 'checks'.
These checks take input and test that it conforms to the check.
This can also involve converting the value from a string into
the correct datatype.
The ``check`` method takes an input string which configures which
check is to be used and applies that check to a supplied value.
An example input string would be:
'int_range(param1, param2)'
You would then provide something like:
>>> def int_range_check(value, min, max):
... # turn min and max from strings to integers
... min = int(min)
... max = int(max)
... # check that value is of the correct type.
... # possible valid inputs are integers or strings
... # that represent integers
... if not isinstance(value, (int, long, basestring)):
... raise VdtTypeError(value)
... elif isinstance(value, basestring):
... # if we are given a string
... # attempt to convert to an integer
... try:
... value = int(value)
... except ValueError:
... raise VdtValueError(value)
... # check the value is between our constraints
... if not min <= value:
... raise VdtValueTooSmallError(value)
... if not value <= max:
... raise VdtValueTooBigError(value)
... return value
>>> fdict = {'int_range': int_range_check}
>>> vtr1 = Validator(fdict)
>>> vtr1.check('int_range(20, 40)', '30')
30
>>> vtr1.check('int_range(20, 40)', '60')
Traceback (most recent call last):
VdtValueTooBigError: the value "60" is too big.
New functions can be added with : ::
>>> vtr2 = Validator()
>>> vtr2.functions['int_range'] = int_range_check
Or by passing in a dictionary of functions when Validator
is instantiated.
Your functions *can* use keyword arguments,
but the first argument should always be 'value'.
If the function doesn't take additional arguments,
the parentheses are optional in the check.
It can be written with either of : ::
keyword = function_name
keyword = function_name()
The first program to utilise Validator() was Michael Foord's
ConfigObj, an alternative to ConfigParser which supports lists and
can validate a config file using a config schema.
For more details on using Validator with ConfigObj see:
http://www.voidspace.org.uk/python/configobj.html
"""
# this regex does the initial parsing of the checks
_func_re = re.compile(r'(.+?)\((.*)\)', re.DOTALL)
# this regex takes apart keyword arguments
_key_arg = re.compile(r'^([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.*)$', re.DOTALL)
# this regex finds keyword=list(....) type values
_list_arg = _list_arg
# this regex takes individual values out of lists - in one pass
_list_members = _list_members
# These regexes check a set of arguments for validity
# and then pull the members out
_paramfinder = re.compile(_paramstring, re.VERBOSE | re.DOTALL)
_matchfinder = re.compile(_matchstring, re.VERBOSE | re.DOTALL)
def __init__(self, functions=None):
"""
>>> vtri = Validator()
"""
self.functions = {
'': self._pass,
'integer': is_integer,
'float': is_float,
'boolean': is_boolean,
'ip_addr': is_ip_addr,
'string': is_string,
'list': is_list,
'tuple': is_tuple,
'int_list': is_int_list,
'float_list': is_float_list,
'bool_list': is_bool_list,
'ip_addr_list': is_ip_addr_list,
'string_list': is_string_list,
'mixed_list': is_mixed_list,
'pass': self._pass,
'option': is_option,
'force_list': force_list,
}
if functions is not None:
self.functions.update(functions)
# tekNico: for use by ConfigObj
self.baseErrorClass = ValidateError
self._cache = {}
def check(self, check, value, missing=False):
"""
Usage: check(check, value)
Arguments:
check: string representing check to apply (including arguments)
value: object to be checked
Returns value, converted to correct type if necessary
If the check fails, raises a ``ValidateError`` subclass.
>>> vtor.check('yoda', '')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
>>> vtor.check('yoda()', '')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
>>> vtor.check('string(default="")', '', missing=True)
''
"""
fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
if missing:
if default is None:
# no information needed here - to be handled by caller
raise VdtMissingValue()
value = self._handle_none(default)
if value is None:
return None
return self._check_value(value, fun_name, fun_args, fun_kwargs)
def _handle_none(self, value):
if value == 'None':
value = None
elif value in ("'None'", '"None"'):
# Special case a quoted None
value = self._unquote(value)
return value
def _parse_with_caching(self, check):
if check in self._cache:
fun_name, fun_args, fun_kwargs, default = self._cache[check]
# We call list and dict below to work with *copies* of the data
# rather than the original (which are mutable of course)
fun_args = list(fun_args)
fun_kwargs = dict(fun_kwargs)
else:
fun_name, fun_args, fun_kwargs, default = self._parse_check(check)
fun_kwargs = dict([(str(key), value) for (key, value) in fun_kwargs.items()])
self._cache[check] = fun_name, list(fun_args), dict(fun_kwargs), default
return fun_name, fun_args, fun_kwargs, default
def _check_value(self, value, fun_name, fun_args, fun_kwargs):
try:
fun = self.functions[fun_name]
except KeyError:
raise VdtUnknownCheckError(fun_name)
else:
return fun(value, *fun_args, **fun_kwargs)
def _parse_check(self, check):
fun_match = self._func_re.match(check)
if fun_match:
fun_name = fun_match.group(1)
arg_string = fun_match.group(2)
arg_match = self._matchfinder.match(arg_string)
if arg_match is None:
# Bad syntax
raise VdtParamError('Bad syntax in check "%s".' % check)
fun_args = []
fun_kwargs = {}
# pull out args of group 2
for arg in self._paramfinder.findall(arg_string):
# args may need whitespace removing (before removing quotes)
arg = arg.strip()
listmatch = self._list_arg.match(arg)
if listmatch:
key, val = self._list_handle(listmatch)
fun_kwargs[key] = val
continue
keymatch = self._key_arg.match(arg)
if keymatch:
val = keymatch.group(2)
if not val in ("'None'", '"None"'):
# Special case a quoted None
val = self._unquote(val)
fun_kwargs[keymatch.group(1)] = val
continue
fun_args.append(self._unquote(arg))
else:
# allows for function names without (args)
return check, (), {}, None
# Default must be deleted if the value is specified too,
# otherwise the check function will get a spurious "default" keyword arg
try:
default = fun_kwargs.pop('default', None)
except AttributeError:
# Python 2.2 compatibility
default = None
try:
default = fun_kwargs['default']
del fun_kwargs['default']
except KeyError:
pass
return fun_name, fun_args, fun_kwargs, default
def _unquote(self, val):
"""Unquote a value if necessary."""
if (len(val) >= 2) and (val[0] in ("'", '"')) and (val[0] == val[-1]):
val = val[1:-1]
return val
def _list_handle(self, listmatch):
"""Take apart a ``keyword=list('val, 'val')`` type string."""
out = []
name = listmatch.group(1)
args = listmatch.group(2)
for arg in self._list_members.findall(args):
out.append(self._unquote(arg))
return name, out
def _pass(self, value):
"""
Dummy check that always passes
>>> vtor.check('', 0)
0
>>> vtor.check('', '0')
'0'
"""
return value
def get_default_value(self, check):
"""
Given a check, return the default value for the check
(converted to the right type).
If the check doesn't specify a default value then a
``KeyError`` will be raised.
"""
fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
if default is None:
raise KeyError('Check "%s" has no default value.' % check)
value = self._handle_none(default)
if value is None:
return value
return self._check_value(value, fun_name, fun_args, fun_kwargs)
def _is_num_param(names, values, to_float=False):
"""
Return numbers from inputs or raise VdtParamError.
Lets ``None`` pass through.
Pass in keyword argument ``to_float=True`` to
use float for the conversion rather than int.
>>> _is_num_param(('', ''), (0, 1.0))
[0, 1]
>>> _is_num_param(('', ''), (0, 1.0), to_float=True)
[0.0, 1.0]
>>> _is_num_param(('a'), ('a'))
Traceback (most recent call last):
VdtParamError: passed an incorrect value "a" for parameter "a".
"""
fun = to_float and float or int
out_params = []
for (name, val) in zip(names, values):
if val is None:
out_params.append(val)
elif isinstance(val, (int, long, float, basestring)):
try:
out_params.append(fun(val))
except ValueError, e:
raise VdtParamError(name, val)
else:
raise VdtParamError(name, val)
return out_params
# built in checks
# you can override these by setting the appropriate name
# in Validator.functions
# note: if the params are specified wrongly in your input string,
# you will also raise errors.
def is_integer(value, min=None, max=None):
"""
A check that tests that a given value is an integer (int, or long)
and optionally, between bounds. A negative value is accepted, while
a float will fail.
If the value is a string, then the conversion is done - if possible.
Otherwise a VdtError is raised.
>>> vtor.check('integer', '-1')
-1
>>> vtor.check('integer', '0')
0
>>> vtor.check('integer', 9)
9
>>> vtor.check('integer', 'a')
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
>>> vtor.check('integer', '2.2')
Traceback (most recent call last):
VdtTypeError: the value "2.2" is of the wrong type.
>>> vtor.check('integer(10)', '20')
20
>>> vtor.check('integer(max=20)', '15')
15
>>> vtor.check('integer(10)', '9')
Traceback (most recent call last):
VdtValueTooSmallError: the value "9" is too small.
>>> vtor.check('integer(10)', 9)
Traceback (most recent call last):
VdtValueTooSmallError: the value "9" is too small.
>>> vtor.check('integer(max=20)', '35')
Traceback (most recent call last):
VdtValueTooBigError: the value "35" is too big.
>>> vtor.check('integer(max=20)', 35)
Traceback (most recent call last):
VdtValueTooBigError: the value "35" is too big.
>>> vtor.check('integer(0, 9)', False)
0
"""
(min_val, max_val) = _is_num_param(('min', 'max'), (min, max))
if not isinstance(value, (int, long, basestring)):
raise VdtTypeError(value)
if isinstance(value, basestring):
# if it's a string - does it represent an integer ?
try:
value = int(value)
except ValueError:
raise VdtTypeError(value)
if (min_val is not None) and (value < min_val):
raise VdtValueTooSmallError(value)
if (max_val is not None) and (value > max_val):
raise VdtValueTooBigError(value)
return value
def is_float(value, min=None, max=None):
"""
A check that tests that a given value is a float
(an integer will be accepted), and optionally - that it is between bounds.
If the value is a string, then the conversion is done - if possible.
Otherwise a VdtError is raised.
This can accept negative values.
>>> vtor.check('float', '2')
2.0
From now on we multiply the value to avoid comparing decimals
>>> vtor.check('float', '-6.8') * 10
-68.0
>>> vtor.check('float', '12.2') * 10
122.0
>>> vtor.check('float', 8.4) * 10
84.0
>>> vtor.check('float', 'a')
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
>>> vtor.check('float(10.1)', '10.2') * 10
102.0
>>> vtor.check('float(max=20.2)', '15.1') * 10
151.0
>>> vtor.check('float(10.0)', '9.0')
Traceback (most recent call last):
VdtValueTooSmallError: the value "9.0" is too small.
>>> vtor.check('float(max=20.0)', '35.0')
Traceback (most recent call last):
VdtValueTooBigError: the value "35.0" is too big.
"""
(min_val, max_val) = _is_num_param(
('min', 'max'), (min, max), to_float=True)
if not isinstance(value, (int, long, float, basestring)):
raise VdtTypeError(value)
if not isinstance(value, float):
# if it's a string - does it represent a float ?
try:
value = float(value)
except ValueError:
raise VdtTypeError(value)
if (min_val is not None) and (value < min_val):
raise VdtValueTooSmallError(value)
if (max_val is not None) and (value > max_val):
raise VdtValueTooBigError(value)
return value
bool_dict = {
True: True, 'on': True, '1': True, 'true': True, 'yes': True,
False: False, 'off': False, '0': False, 'false': False, 'no': False,
}
def is_boolean(value):
"""
Check if the value represents a boolean.
>>> vtor.check('boolean', 0)
0
>>> vtor.check('boolean', False)
0
>>> vtor.check('boolean', '0')
0
>>> vtor.check('boolean', 'off')
0
>>> vtor.check('boolean', 'false')
0
>>> vtor.check('boolean', 'no')
0
>>> vtor.check('boolean', 'nO')
0
>>> vtor.check('boolean', 'NO')
0
>>> vtor.check('boolean', 1)
1
>>> vtor.check('boolean', True)
1
>>> vtor.check('boolean', '1')
1
>>> vtor.check('boolean', 'on')
1
>>> vtor.check('boolean', 'true')
1
>>> vtor.check('boolean', 'yes')
1
>>> vtor.check('boolean', 'Yes')
1
>>> vtor.check('boolean', 'YES')
1
>>> vtor.check('boolean', '')
Traceback (most recent call last):
VdtTypeError: the value "" is of the wrong type.
>>> vtor.check('boolean', 'up')
Traceback (most recent call last):
VdtTypeError: the value "up" is of the wrong type.
"""
if isinstance(value, basestring):
try:
return bool_dict[value.lower()]
except KeyError:
raise VdtTypeError(value)
# we do an equality test rather than an identity test
# this ensures Python 2.2 compatibilty
# and allows 0 and 1 to represent True and False
if value == False:
return False
elif value == True:
return True
else:
raise VdtTypeError(value)
def is_ip_addr(value):
"""
Check that the supplied value is an Internet Protocol address, v.4,
represented by a dotted-quad string, i.e. '1.2.3.4'.
>>> vtor.check('ip_addr', '1 ')
'1'
>>> vtor.check('ip_addr', ' 1.2')
'1.2'
>>> vtor.check('ip_addr', ' 1.2.3 ')
'1.2.3'
>>> vtor.check('ip_addr', '1.2.3.4')
'1.2.3.4'
>>> vtor.check('ip_addr', '0.0.0.0')
'0.0.0.0'
>>> vtor.check('ip_addr', '255.255.255.255')
'255.255.255.255'
>>> vtor.check('ip_addr', '255.255.255.256')
Traceback (most recent call last):
VdtValueError: the value "255.255.255.256" is unacceptable.
>>> vtor.check('ip_addr', '1.2.3.4.5')
Traceback (most recent call last):
VdtValueError: the value "1.2.3.4.5" is unacceptable.
>>> vtor.check('ip_addr', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
"""
if not isinstance(value, basestring):
raise VdtTypeError(value)
value = value.strip()
try:
dottedQuadToNum(value)
except ValueError:
raise VdtValueError(value)
return value
def is_list(value, min=None, max=None):
"""
Check that the value is a list of values.
You can optionally specify the minimum and maximum number of members.
It does no check on list members.
>>> vtor.check('list', ())
[]
>>> vtor.check('list', [])
[]
>>> vtor.check('list', (1, 2))
[1, 2]
>>> vtor.check('list', [1, 2])
[1, 2]
>>> vtor.check('list(3)', (1, 2))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2)" is too short.
>>> vtor.check('list(max=5)', (1, 2, 3, 4, 5, 6))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
>>> vtor.check('list(min=3, max=5)', (1, 2, 3, 4))
[1, 2, 3, 4]
>>> vtor.check('list', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('list', '12')
Traceback (most recent call last):
VdtTypeError: the value "12" is of the wrong type.
"""
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
if isinstance(value, basestring):
raise VdtTypeError(value)
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if min_len is not None and num_members < min_len:
raise VdtValueTooShortError(value)
if max_len is not None and num_members > max_len:
raise VdtValueTooLongError(value)
return list(value)
def is_tuple(value, min=None, max=None):
"""
Check that the value is a tuple of values.
You can optionally specify the minimum and maximum number of members.
It does no check on members.
>>> vtor.check('tuple', ())
()
>>> vtor.check('tuple', [])
()
>>> vtor.check('tuple', (1, 2))
(1, 2)
>>> vtor.check('tuple', [1, 2])
(1, 2)
>>> vtor.check('tuple(3)', (1, 2))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2)" is too short.
>>> vtor.check('tuple(max=5)', (1, 2, 3, 4, 5, 6))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
>>> vtor.check('tuple(min=3, max=5)', (1, 2, 3, 4))
(1, 2, 3, 4)
>>> vtor.check('tuple', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('tuple', '12')
Traceback (most recent call last):
VdtTypeError: the value "12" is of the wrong type.
"""
return tuple(is_list(value, min, max))
def is_string(value, min=None, max=None):
"""
Check that the supplied value is a string.
You can optionally specify the minimum and maximum number of members.
>>> vtor.check('string', '0')
'0'
>>> vtor.check('string', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('string(2)', '12')
'12'
>>> vtor.check('string(2)', '1')
Traceback (most recent call last):
VdtValueTooShortError: the value "1" is too short.
>>> vtor.check('string(min=2, max=3)', '123')
'123'
>>> vtor.check('string(min=2, max=3)', '1234')
Traceback (most recent call last):
VdtValueTooLongError: the value "1234" is too long.
"""
if not isinstance(value, basestring):
raise VdtTypeError(value)
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if min_len is not None and num_members < min_len:
raise VdtValueTooShortError(value)
if max_len is not None and num_members > max_len:
raise VdtValueTooLongError(value)
return value
def is_int_list(value, min=None, max=None):
"""
Check that the value is a list of integers.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an integer.
>>> vtor.check('int_list', ())
[]
>>> vtor.check('int_list', [])
[]
>>> vtor.check('int_list', (1, 2))
[1, 2]
>>> vtor.check('int_list', [1, 2])
[1, 2]
>>> vtor.check('int_list', [1, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_integer(mem) for mem in is_list(value, min, max)]
def is_bool_list(value, min=None, max=None):
"""
Check that the value is a list of booleans.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a boolean.
>>> vtor.check('bool_list', ())
[]
>>> vtor.check('bool_list', [])
[]
>>> check_res = vtor.check('bool_list', (True, False))
>>> check_res == [True, False]
1
>>> check_res = vtor.check('bool_list', [True, False])
>>> check_res == [True, False]
1
>>> vtor.check('bool_list', [True, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_boolean(mem) for mem in is_list(value, min, max)]
def is_float_list(value, min=None, max=None):
"""
Check that the value is a list of floats.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a float.
>>> vtor.check('float_list', ())
[]
>>> vtor.check('float_list', [])
[]
>>> vtor.check('float_list', (1, 2.0))
[1.0, 2.0]
>>> vtor.check('float_list', [1, 2.0])
[1.0, 2.0]
>>> vtor.check('float_list', [1, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_float(mem) for mem in is_list(value, min, max)]
def is_string_list(value, min=None, max=None):
"""
Check that the value is a list of strings.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a string.
>>> vtor.check('string_list', ())
[]
>>> vtor.check('string_list', [])
[]
>>> vtor.check('string_list', ('a', 'b'))
['a', 'b']
>>> vtor.check('string_list', ['a', 1])
Traceback (most recent call last):
VdtTypeError: the value "1" is of the wrong type.
>>> vtor.check('string_list', 'hello')
Traceback (most recent call last):
VdtTypeError: the value "hello" is of the wrong type.
"""
if isinstance(value, basestring):
raise VdtTypeError(value)
return [is_string(mem) for mem in is_list(value, min, max)]
def is_ip_addr_list(value, min=None, max=None):
"""
Check that the value is a list of IP addresses.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an IP address.
>>> vtor.check('ip_addr_list', ())
[]
>>> vtor.check('ip_addr_list', [])
[]
>>> vtor.check('ip_addr_list', ('1.2.3.4', '5.6.7.8'))
['1.2.3.4', '5.6.7.8']
>>> vtor.check('ip_addr_list', ['a'])
Traceback (most recent call last):
VdtValueError: the value "a" is unacceptable.
"""
return [is_ip_addr(mem) for mem in is_list(value, min, max)]
def force_list(value, min=None, max=None):
"""
Check that a value is a list, coercing strings into
a list with one member. Useful where users forget the
trailing comma that turns a single value into a list.
You can optionally specify the minimum and maximum number of members.
A minumum of greater than one will fail if the user only supplies a
string.
>>> vtor.check('force_list', ())
[]
>>> vtor.check('force_list', [])
[]
>>> vtor.check('force_list', 'hello')
['hello']
"""
if not isinstance(value, (list, tuple)):
value = [value]
return is_list(value, min, max)
fun_dict = {
'integer': is_integer,
'float': is_float,
'ip_addr': is_ip_addr,
'string': is_string,
'boolean': is_boolean,
}
def is_mixed_list(value, *args):
"""
Check that the value is a list.
Allow specifying the type of each member.
Work on lists of specific lengths.
You specify each member as a positional argument specifying type
Each type should be one of the following strings :
'integer', 'float', 'ip_addr', 'string', 'boolean'
So you can specify a list of two strings, followed by
two integers as :
mixed_list('string', 'string', 'integer', 'integer')
The length of the list must match the number of positional
arguments you supply.
>>> mix_str = "mixed_list('integer', 'float', 'ip_addr', 'string', 'boolean')"
>>> check_res = vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', True))
>>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
1
>>> check_res = vtor.check(mix_str, ('1', '2.0', '1.2.3.4', 'a', 'True'))
>>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
1
>>> vtor.check(mix_str, ('b', 2.0, '1.2.3.4', 'a', True))
Traceback (most recent call last):
VdtTypeError: the value "b" is of the wrong type.
>>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a'))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2.0, '1.2.3.4', 'a')" is too short.
>>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', 1, 'b'))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2.0, '1.2.3.4', 'a', 1, 'b')" is too long.
>>> vtor.check(mix_str, 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
This test requires an elaborate setup, because of a change in error string
output from the interpreter between Python 2.2 and 2.3 .
>>> res_seq = (
... 'passed an incorrect value "',
... 'yoda',
... '" for parameter "mixed_list".',
... )
>>> res_str = "'".join(res_seq)
>>> try:
... vtor.check('mixed_list("yoda")', ('a'))
... except VdtParamError, err:
... str(err) == res_str
1
"""
try:
length = len(value)
except TypeError:
raise VdtTypeError(value)
if length < len(args):
raise VdtValueTooShortError(value)
elif length > len(args):
raise VdtValueTooLongError(value)
try:
return [fun_dict[arg](val) for arg, val in zip(args, value)]
except KeyError, e:
raise VdtParamError('mixed_list', e)
def is_option(value, *options):
"""
This check matches the value to any of a set of options.
>>> vtor.check('option("yoda", "jedi")', 'yoda')
'yoda'
>>> vtor.check('option("yoda", "jedi")', 'jed')
Traceback (most recent call last):
VdtValueError: the value "jed" is unacceptable.
>>> vtor.check('option("yoda", "jedi")', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
"""
if not isinstance(value, basestring):
raise VdtTypeError(value)
if not value in options:
raise VdtValueError(value)
return value
def _test(value, *args, **keywargs):
"""
A function that exists for test purposes.
>>> checks = [
... '3, 6, min=1, max=3, test=list(a, b, c)',
... '3',
... '3, 6',
... '3,',
... 'min=1, test="a b c"',
... 'min=5, test="a, b, c"',
... 'min=1, max=3, test="a, b, c"',
... 'min=-100, test=-99',
... 'min=1, max=3',
... '3, 6, test="36"',
... '3, 6, test="a, b, c"',
... '3, max=3, test=list("a", "b", "c")',
... '''3, max=3, test=list("'a'", 'b', "x=(c)")''',
... "test='x=fish(3)'",
... ]
>>> v = Validator({'test': _test})
>>> for entry in checks:
... print v.check(('test(%s)' % entry), 3)
(3, ('3', '6'), {'test': ['a', 'b', 'c'], 'max': '3', 'min': '1'})
(3, ('3',), {})
(3, ('3', '6'), {})
(3, ('3',), {})
(3, (), {'test': 'a b c', 'min': '1'})
(3, (), {'test': 'a, b, c', 'min': '5'})
(3, (), {'test': 'a, b, c', 'max': '3', 'min': '1'})
(3, (), {'test': '-99', 'min': '-100'})
(3, (), {'max': '3', 'min': '1'})
(3, ('3', '6'), {'test': '36'})
(3, ('3', '6'), {'test': 'a, b, c'})
(3, ('3',), {'test': ['a', 'b', 'c'], 'max': '3'})
(3, ('3',), {'test': ["'a'", 'b', 'x=(c)'], 'max': '3'})
(3, (), {'test': 'x=fish(3)'})
>>> v = Validator()
>>> v.check('integer(default=6)', '3')
3
>>> v.check('integer(default=6)', None, True)
6
>>> v.get_default_value('integer(default=6)')
6
>>> v.get_default_value('float(default=6)')
6.0
>>> v.get_default_value('pass(default=None)')
>>> v.get_default_value("string(default='None')")
'None'
>>> v.get_default_value('pass')
Traceback (most recent call last):
KeyError: 'Check "pass" has no default value.'
>>> v.get_default_value('pass(default=list(1, 2, 3, 4))')
['1', '2', '3', '4']
>>> v = Validator()
>>> v.check("pass(default=None)", None, True)
>>> v.check("pass(default='None')", None, True)
'None'
>>> v.check('pass(default="None")', None, True)
'None'
>>> v.check('pass(default=list(1, 2, 3, 4))', None, True)
['1', '2', '3', '4']
Bug test for unicode arguments
>>> v = Validator()
>>> v.check(u'string(min=4)', u'test')
u'test'
>>> v = Validator()
>>> v.get_default_value(u'string(min=4, default="1234")')
u'1234'
>>> v.check(u'string(min=4, default="1234")', u'test')
u'test'
>>> v = Validator()
>>> default = v.get_default_value('string(default=None)')
>>> default == None
1
"""
return (value, args, keywargs)
def _test2():
"""
>>>
>>> v = Validator()
>>> v.get_default_value('string(default="#ff00dd")')
'#ff00dd'
>>> v.get_default_value('integer(default=3) # comment')
3
"""
def _test3():
r"""
>>> vtor.check('string(default="")', '', missing=True)
''
>>> vtor.check('string(default="\n")', '', missing=True)
'\n'
>>> print vtor.check('string(default="\n")', '', missing=True),
<BLANKLINE>
>>> vtor.check('string()', '\n')
'\n'
>>> vtor.check('string(default="\n\n\n")', '', missing=True)
'\n\n\n'
>>> vtor.check('string()', 'random \n text goes here\n\n')
'random \n text goes here\n\n'
>>> vtor.check('string(default=" \nrandom text\ngoes \n here\n\n ")',
... '', missing=True)
' \nrandom text\ngoes \n here\n\n '
>>> vtor.check("string(default='\n\n\n')", '', missing=True)
'\n\n\n'
>>> vtor.check("option('\n','a','b',default='\n')", '', missing=True)
'\n'
>>> vtor.check("string_list()", ['foo', '\n', 'bar'])
['foo', '\n', 'bar']
>>> vtor.check("string_list(default=list('\n'))", '', missing=True)
['\n']
"""
if __name__ == '__main__':
# run the code tests in doctest format
import sys
import doctest
m = sys.modules.get('__main__')
globs = m.__dict__.copy()
globs.update({
'vtor': Validator(),
})
doctest.testmod(m, globs=globs)
|
{
"content_hash": "2b241ef9bd2b677c5c76c68dc2d5d8ff",
"timestamp": "",
"source": "github",
"line_count": 1452,
"max_line_length": 104,
"avg_line_length": 31.74862258953168,
"alnum_prop": 0.5421158810386343,
"repo_name": "arokem/nipy",
"id": "27918d5fbb1647ea54411c16908227c77c73ee62",
"size": "46864",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "nipy/externals/validate.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1601255"
},
{
"name": "C++",
"bytes": "999"
},
{
"name": "Makefile",
"bytes": "2786"
},
{
"name": "Matlab",
"bytes": "5508"
},
{
"name": "Python",
"bytes": "3047221"
}
],
"symlink_target": ""
}
|
from .settings import settings
import requests
import io
import dill
from os import path
from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor
import sys
if settings.IS_IPYTHON:
from tqdm import tqdm_notebook as tqdm
else:
from tqdm import tqdm
def save_var_cloud(data_var, data_name):
if not isinstance(data_name, str):
print("data_name must be a string")
return
user_hash = settings.API_KEY
data_buffer = io.BytesIO(dill.dumps(data_var))
print('Uploading %s...' % data_name)
url = 'http://%s/api/save/getUploadUrl' % settings.CATALEARN_URL
r = requests.post(url, data={
'type': 'variable',
'user_hash': user_hash,
'file_name': data_name
})
if r.status_code != 200:
raise RuntimeError(r.text)
presigned_url = r.content
r = requests.put(presigned_url, data=data_buffer)
if (r.status_code != 200):
print("Error saving %s\: %s" % (data_name, r.content))
else:
print("Successfully uploaded %s" % data_name)
return
def save_file_cloud(file_path):
if not path.exists(file_path):
print('%s does not exist' % file_path)
return
user_hash = settings.API_KEY
save_name = path.basename(file_path)
print('Uploading %s...' % save_name)
url = 'http://%s/api/save/getUploadUrl' % settings.CATALEARN_URL
r = requests.post(url, data={
'type': 'file',
'user_hash': user_hash,
'file_name': save_name
})
if r.status_code != 200:
raise RuntimeError(r.text)
presigned_url = r.content
with open(file_path, 'rb') as f:
r = requests.put(presigned_url, data=f)
if (r.status_code != 200):
print("Error uploading %s\: %s" % (save_name, r.content))
else:
print("Successfully uploaded %s" % save_name)
return
def download_progress(res, stream, save_name):
total_size = int(res.headers.get('content-length', 0));
chunk_size = 32 * 1024
pbar = tqdm(total=total_size, unit='B', unit_scale=True, desc=save_name)
for data in res.iter_content(chunk_size):
stream.write(data)
pbar.update(chunk_size)
def download_var_cloud(data_name):
if not isinstance(data_name, str):
print("data_name must be a string")
return
user_hash = settings.API_KEY
url = 'http://%s/api/save/getDownloadUrl' % settings.CATALEARN_URL
r = requests.post(url, data={
'type': 'variable',
'user_hash': user_hash,
'file_name': data_name
})
if r.status_code != 200:
raise RuntimeError(r.text)
presigned_url = r.content
# Now send the post request to the catalearn server
res = requests.get(presigned_url, stream=True)
raw = io.BytesIO()
download_progress(res, raw, data_name)
result = dill.loads(raw.getvalue())
return result
def download_file_cloud(file_name):
if not isinstance(file_name, str):
print("file_name must be a string")
return
user_hash = settings.API_KEY
url = 'http://%s/api/save/getDownloadUrl' % settings.CATALEARN_URL
r = requests.post(url, data={
'type': 'file',
'user_hash': user_hash,
'file_name': file_name
})
if r.status_code != 200:
raise RuntimeError(r.text)
presigned_url = r.content
# Now send the post request to the catalearn server
res = requests.get(presigned_url, stream=True)
with open(file_name, 'wb')as file_handle:
download_progress(res, file_handle, file_name)
settings.record_file_download(file_name)
def download_file_url(url):
if not isinstance(url, str):
print("url must be a string")
return
file_name = path.basename(url)
res = requests.get(url, stream=True)
with open(file_name, 'wb')as file_handle:
download_progress(res, file_handle, file_name)
settings.record_file_download(file_name)
|
{
"content_hash": "0b8d10ab10353273c708964ee565ed72",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 76,
"avg_line_length": 27.40277777777778,
"alnum_prop": 0.6259503294475418,
"repo_name": "Catalearn/catalearn",
"id": "b00ac3551be649c6327b5c690a99fc76dfc03e4e",
"size": "3946",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "catalearn/saver.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "28241"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from sentry.utils.compat.mock import MagicMock
from sentry.testutils.cases import RuleTestCase
from sentry.rules.actions.notify_event import NotifyEventAction
from sentry.rules.actions.services import LegacyPluginService
class NotifyEventActionTest(RuleTestCase):
rule_cls = NotifyEventAction
def test_applies_correctly(self):
event = self.get_event()
plugin = MagicMock()
rule = self.get_rule()
rule.get_plugins = lambda: (LegacyPluginService(plugin),)
results = list(rule.after(event=event, state=self.get_state()))
assert len(results) == 1
assert plugin.should_notify.call_count == 1
assert results[0].callback is plugin.rule_notify
|
{
"content_hash": "d58ec244c502f0d316bf9c0a73b837cc",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 71,
"avg_line_length": 31.458333333333332,
"alnum_prop": 0.7178807947019867,
"repo_name": "beeftornado/sentry",
"id": "e40f37e469bcfa3ec0c00a59b534a40979044bc4",
"size": "755",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/sentry/rules/actions/test_notify_event.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "157195"
},
{
"name": "HTML",
"bytes": "197026"
},
{
"name": "JavaScript",
"bytes": "380379"
},
{
"name": "Makefile",
"bytes": "2832"
},
{
"name": "Python",
"bytes": "6473603"
}
],
"symlink_target": ""
}
|
import os
import numpy as np
from collections import deque
from sge.utils import MOVE_ACTS, AGENT, BLOCK, WATER, EMPTY, KEY, OBJ_BIAS,\
TYPE_PICKUP, TYPE_TRANSFORM
__PATH__ = os.path.abspath(os.path.dirname(__file__))
class Mazemap(object):
def __init__(self, game_name, game_config):
if game_name not in ['playground', 'mining']:
raise ValueError("Unsupported : {}".format(game_name))
# load game config (outcome)
self.gamename = game_name
self.config = game_config
self.operation_list = self.config.operation_list
self.legal_actions = self.config.legal_actions
self.step_penalty = 0.0
self.w = game_config.width
self.h = game_config.height
# map tensor
self.obs = np.zeros(
(self.config.nb_obj_type+3, self.w, self.h), dtype=np.uint8)
self.wall_mask = np.zeros((self.w, self.h), dtype=np.bool_)
self.item_map = np.zeros((self.w, self.h), dtype=np.int16)
def reset(self, subtask_id_list):
self.subtask_id_list = subtask_id_list
self.nb_subtask = len(subtask_id_list)
self.obs.fill(0)
self.wall_mask.fill(0)
self.item_map.fill(-1)
self.empty_list = []
self._add_blocks()
self._add_targets()
def act(self, action):
oid = -1
assert action in self.legal_actions, 'Illegal action: '
if action in {KEY.UP, KEY.DOWN, KEY.LEFT, KEY.RIGHT}: # move
new_x = self.agent_x
new_y = self.agent_y
if action == KEY.RIGHT:
new_x += 1
elif action == KEY.LEFT:
new_x -= 1
elif action == KEY.DOWN:
new_y += 1
elif action == KEY.UP:
new_y -= 1
# wall_collision
item_id = self.item_map[new_x, new_y]
if not (item_id == BLOCK or item_id == WATER): # If not block or water, agent can move
self.obs[AGENT, self.agent_x, self.agent_y] = 0
self.agent_x = new_x
self.agent_y = new_y
self.obs[AGENT, new_x, new_y] = 1
else: # perform
iid = self._get_cur_item()
if iid > -1:
oid = iid-3
self._perform(action, oid) # perform action in the map
self._process_obj() # moving objects
return oid
def get_obs(self):
return self.obs
def _process_obj(self):
for obj in self.object_list:
oid = obj['oid']
obj_param = self.config.object_param_list[oid]
if 'speed' in obj_param and obj_param['speed'] > 0 and np.random.uniform() < obj_param['speed']:
# randomly move
x, y = obj['pos']
candidates = [(x+1, y), (x-1, y), (x, y+1), (x, y-1)]
pool = []
for nx, ny in candidates:
if self.item_map[nx, ny] == -1:
pool.append((nx, ny))
if len(pool) > 0:
new_pos = tuple(np.random.permutation(pool)[0])
# remove and push
self._remove_item(obj)
self._add_item(oid, new_pos)
def _remove_item(self, obj):
oid = obj['oid']
x, y = obj['pos']
self.obs[oid+OBJ_BIAS, x, y] = 0
self.item_map[x, y] = -1
self.object_list.remove(obj)
def _add_item(self, oid, pos):
obj = dict(oid=oid, pos=pos)
self.obs[oid+OBJ_BIAS, pos[0], pos[1]] = 1
self.item_map[pos[0], pos[1]] = oid+OBJ_BIAS
self.object_list.append(obj)
def _perform(self, action, oid):
assert(action not in MOVE_ACTS)
act_type = self.operation_list[action]['oper_type']
obj = None
for oind in range(len(self.object_list)):
o = self.object_list[oind]
if o['pos'] == (self.agent_x, self.agent_y):
obj = o
break
assert obj is not None
# pickup
if act_type == TYPE_PICKUP and self.config.object_param_list[oid]['pickable']:
self._remove_item(obj)
# transform
elif act_type == TYPE_TRANSFORM and self.config.object_param_list[oid]['transformable']:
self._remove_item(obj)
outcome_oid = self.config.object_param_list[oid]['outcome']
self._add_item(outcome_oid, (self.agent_x, self.agent_y))
def _add_blocks(self):
# boundary
self.item_map[0:self.w:self.w-1, :] = BLOCK # left, right wall
self.item_map[:, 0:self.h:self.h-1] = BLOCK # top, bottom wall
# empty list
self.empty_list = np.argwhere(self.item_map == EMPTY).tolist()
self.empty_list = [tuple(l) for l in self.empty_list]
# random block
if self.config.nb_block[0] < self.config.nb_block[1]:
nb_block = np.random.randint(
self.config.nb_block[0], self.config.nb_block[1]+1)
num_candidate = len(self.empty_list)
pool = np.random.permutation(self.empty_list)
pool_idx = 0
for block_idx in range(nb_block):
success = False
while pool_idx < num_candidate:
# 1. pop from candidate
x, y = pool[pool_idx]
pool_idx += 1
# 2. check connectivity
self.empty_list.remove((x, y))
self.item_map[x, y] = BLOCK
if self._check_connectivity(self.empty_list): # if okay, add the block
success = True
break
else: # if not, revert
self.empty_list.append((x, y))
self.item_map[x, y] = EMPTY
if not success:
import ipdb; ipdb.set_trace()
raise RuntimeError('Cannot generate a map without\
inaccessible regions! Decrease the #waters or #blocks')
# random water
if self.config.nb_water[0] < self.config.nb_water[1]:
nb_water = np.random.randint(
self.config.nb_water[0], self.config.nb_water[1]+1)
num_candidate = len(self.empty_list)
pool = np.random.permutation(self.empty_list)
pool_idx = 0
for water_idx in range(nb_water):
success = False
while pool_idx < num_candidate:
# 1. pop from candidate
x, y = pool[pool_idx]
pool_idx += 1
# 2. check connectivity
self.empty_list.remove((x, y))
self.item_map[x, y] = WATER
if self._check_connectivity(self.empty_list): # if okay, add the water
self.obs[WATER, x, y] = 1
success = True
break
else: # if not, revert
self.empty_list.append((x, y))
self.item_map[x, y] = EMPTY
if not success:
import ipdb; ipdb.set_trace()
raise RuntimeError('Cannot generate a map without\
inaccessible regions! Decrease the #waters or #blocks')
# Apply changes to self.obs
self.obs[BLOCK][self.item_map == BLOCK] = True
self.obs[WATER][self.item_map == WATER] = True
def _add_targets(self):
# reset
self.object_list = []
self.omask = np.zeros((self.config.nb_obj_type), dtype=np.int8)
# create objects
# 1. create required objects
pool = np.random.permutation(self.empty_list)
for tind in range(self.nb_subtask):
# make sure each subtask is executable
self._place_object(tind, (pool[tind][0], pool[tind][1]))
# 2. create additional objects
index = self.nb_subtask
for obj_param in self.config.object_param_list:
if 'max' in obj_param:
oid = obj_param['oid']
nb_obj = np.random.randint(0, obj_param['max']+1)
for i in range(nb_obj):
self._add_item(oid, (pool[index][0], pool[index][1]))
index += 1
# create agent
(self.agent_init_pos_x, self.agent_init_pos_y) = pool[index]
self.agent_x = self.agent_init_pos_x
self.agent_y = self.agent_init_pos_y
self.obs[AGENT, self.agent_x, self.agent_y] = 1
def _place_object(self, task_ind, pos):
subid = self.subtask_id_list[task_ind]
(_, oid) = self.config.subtask_param_list[subid]
if ('unique' not in self.config.object_param_list[oid]) or \
(not self.config.object_param_list[oid]['unique']) or \
(self.omask[oid] == 0):
self.omask[oid] = 1
self._add_item(oid, pos)
def _check_connectivity(self, empty_list):
nb_empty = len(empty_list)
mask = np.copy(self.item_map)
#
queue = deque([empty_list[0]])
x, y = empty_list[0]
mask[x, y] = 1
count = 0
while len(queue) > 0:
[x, y] = queue.popleft()
count += 1
candidate = [(x+1, y), (x-1, y), (x, y+1), (x, y-1)]
for item in candidate:
if mask[item[0], item[1]] == -1: # if empty
mask[item[0], item[1]] = 1
queue.append(item)
if count > nb_empty:
print('Bug in the code')
import ipdb; ipdb.set_trace()
return count == nb_empty
def _get_cur_item(self):
return self.item_map[self.agent_x, self.agent_y]
|
{
"content_hash": "4381624f310b1f7557543250f011911b",
"timestamp": "",
"source": "github",
"line_count": 256,
"max_line_length": 108,
"avg_line_length": 38.51953125,
"alnum_prop": 0.5038028597505324,
"repo_name": "srsohn/subtask-graph-execution-light",
"id": "e05a4dd7c599c717dcbca0e621ae29fdb0231d9b",
"size": "9861",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sge/mazemap.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35392"
}
],
"symlink_target": ""
}
|
from logger import Logger
class DeviceMerger:
""" DeviceMerger
Merges several devices into logical groups.
Since most Devices have the same peripherals, but differ in other
parameters, like Flash/RAM/EEPROM sizes, packaging or other minor
differences, it makes sense to group them accordingly.
Please not that this class only makes the resulting XML files more user
friendly to manually edit, since the user does not have to apply the
changes to multiple files. Ideally - reality might differ :(.
"""
def __init__(self, devices, logger=None):
self.mergedDevices = list(devices)
if logger == None:
self.log = Logger()
else:
self.log = logger
def mergedByPlatform(self, platform):
if platform == 'avr':
self.mergedByType()
self.mergedByName()
elif platform == 'stm32':
self.mergedBySize()
self.mergedByName()
def mergedByType(self):
self.mergedDevices = self._mergeDevicesByType(self.mergedDevices)
def mergedByName(self):
self.mergedDevices = self._mergeDevicesByName(self.mergedDevices)
def mergedBySize(self):
self.mergedDevices = self._mergeDevicesBySize(self.mergedDevices)
def _mergeDevicesByName(self, devices):
"""
This is a simple helper method to merge devices based on name.
"""
avrDevices = []
xmegaDevices = []
stm32Devices = []
result = []
for dev in devices:
if dev.ids.intersection.platform == 'avr':
if dev.ids.intersection.family == 'xmega':
xmegaDevices.append(dev)
else:
avrDevices.append(dev)
elif dev.ids.intersection.platform == 'stm32':
stm32Devices.append(dev)
else:
result.append(dev)
avrDevices = self._mergeDevicesByNameAVR(avrDevices)
xmegaDevices = self._mergeDevicesByNameXMEGA(xmegaDevices)
stm32Devices = self._mergeDevicesByNameSTM32(stm32Devices)
result.extend(avrDevices)
result.extend(xmegaDevices)
result.extend(stm32Devices)
return result
def _mergeDevicesByNameSTM32(self, devices):
"""
This checks the size-id and name of the devices, and merges the devices
based on the observation, that the size-id only influences the size of
memories, i.e. FLASH, RAM.
"""
# copy the devices, since this array will be modified
devs = list(devices)
merged = []
while len(devs) > 0:
current = devs[0]
devs.remove(current)
matches = []
name_ids = self._getCategoryNameSTM32(current)
size_ids = self._getCategorySizeSTM32(current)
self.log.info("ByName: Searching for device with names '%s' and size-ids '%s'" % (name_ids, size_ids))
for dev in devs:
if dev.ids.getAttribute('name')[0] in name_ids and \
dev.ids.getAttribute('size_id')[0] in size_ids:
matches.append(dev)
for match in matches:
devs.remove(match)
current = current.getMergedDevice(match)
if len(matches) == 0:
self.log.info("ByName: no match for device: " + current.id.string)
self.log.debug("ByName:\nResulting device:\n" + str(current))
merged.append(current)
return merged
def _getCategoryNameSTM32(self, device):
names = device.ids.getAttribute('name')
family = device.id.family
if family == 'f0':
categories = [ ['030', '050', '070'],
['031', '051', '071', '091'],
['042', '072'],
['038', '048', '058', '078', '098']]
elif family == 'f1':
categories = [ ['100'],
['101', '102'],
['103'],
['105', '107'] ]
elif family == 'f2':
categories = [ ['205', '207', '215', '217'] ]
elif family == 'f3':
categories = [ ['301'],
['302'],
['303'],
['334'],
['318', '328', '358', '378', '398'],
['373'] ]
elif family == 'f4':
categories = [ ['401', '411'],
['410'],
['412'],
['405', '415', '407', '417'],
['427', '437', '429', '439'],
['446'],
['469', '479'] ]
elif family == 'f7':
categories = [ ['745', '746', '756'],
['765', '767', '768', '769', '777', '778', '779'] ]
# make sure that only one category is used!
for cat in categories:
if names[0] in cat:
return cat
return categories[0]
def _mergeDevicesByNameXMEGA(self, devices):
"""
This checks the size-id and name of the devices, and merges the devices
based on the observation, that the size-id only influences the size of
memories, i.e. FLASH, RAM, and EEPROM.
"""
# copy the devices, since this array will be modified
devs = list(devices)
merged = []
while len(devs) > 0:
current = devs[0]
devs.remove(current)
matches = []
device_type = current.ids.getAttribute('type')[0]
if device_type != None:
self.log.info("ByName: Searching for device with type '%s'" % device_type)
for dev in devs:
if dev.ids.getAttribute('type')[0] == device_type:
# A3 none|b and bu|u are different enough to warrant
# a new device file
if device_type == 'a3':
if dev.ids.getAttribute('pin_id')[0] in self._getCategoryPinIdAVR(current):
matches.append(dev)
else:
matches.append(dev)
for match in matches:
devs.remove(match)
current = current.getMergedDevice(match)
if len(matches) == 0:
self.log.info("ByName: no match for device: " + current.id.string)
self.log.debug("ByName:\nResulting device:\n" + str(current))
merged.append(current)
return merged
def _mergeDevicesByNameAVR(self, devices):
"""
This checks the size-id and name of the devices, and merges the devices
based on the observation, that the size-id only influences the size of
memories, i.e. FLASH, RAM, and EEPROM.
"""
# copy the devices, since this array will be modified
devs = list(devices)
merged = []
while len(devs) > 0:
current = devs[0]
devs.remove(current)
matches = []
size_id = current.ids.getAttribute('size_id')[0]
if size_id != None:
name = current.ids.getAttribute('name')[0]
device_type = current.ids.getAttribute('type')[0]
family = name[len(size_id):]
if not (family == "" and device_type == None):
device_type = self._getCategoryTypeAVR(current)
self.log.info("ByName: Searching for device ending in '"
+ family + "' and '" + str(device_type) + "'")
for dev in devs:
dname = dev.ids.getAttribute('name')[0]
dsize_id = dev.ids.getAttribute('size_id')[0]
# if they do not have a size-id they are probably unmergable
if dsize_id != None:
dfamily = dname[len(dsize_id):]
# perpare for type comparison
# we should only merge when the family is the same,
# and if the type is the same
if dfamily == family and dev.ids.getAttribute('type')[0] in device_type:
matches.append(dev)
# The following code is Atmel's fault with their stupid naming schemes.
# the AT90's, ATmega's and ATtiny's have special merging rules
if current.id.family == "at90":
name = current.id.name
# Some Devices are just not in the same group
if name in ['1', '2', '3', '216', '316', '646', '647', '1286', '1287']:
# these are not the matches you are looking for *move hand*
matches = []
# these are not the devices you want to matched with
for match in matches:
if match.id.name in ['1', '2', '3', '216', '316', '646', '647', '1286', '1287']:
matches.remove(match)
break
# but these are:
namesA = [ ['1', '2', '216'], ['3', '316'], ['646', '647', '1286', '1287'] ]
for names in namesA:
if name in names:
for dev in [d for d in devs if dev.id.family == "at90"]:
for dname in dev.ids.getAttribute('name'):
if dname in names:
matches.append(dev)
if current.id.family == "atmega":
name = current.id.name
if current.ids.getAttribute('type')[0] in [None, 'none', 'p', 'a', 'pa']:
# Some Devices are just not in the same group
if name in ['8', '16', '32', '64', '128']:
# these are not the matches you are looking for *move hand*
matches = []
# these are not the devices you want to be matched with
for match in matches:
if match.id.name in ['8', '16', '32', '64', '128']:
matches.remove(match)
break
# but these are:
namesA = [ ['16', '32'], ['64', '128'] ]
for names in namesA:
if name in names:
for dev in devs:
if dev.id.family == "atmega" and dev.ids.getAttribute('type')[0] in [None, 'none', 'p', 'a', 'pa']:
for dname in dev.ids.getAttribute('name'):
if dname in names:
matches.append(dev)
if current.id.family == "attiny":
name = current.id.name
names = ['4', '5', '9', '10']
if name in names:
for dev in devs:
if dev.id.family == "attiny":
for dname in dev.ids.getAttribute('name'):
if dname in names:
matches.append(dev)
# Some Devices are just not in the same group
if name in ['28', '20', '40']:
# these are not the matches you are looking for *move hand*
matches = []
# these are not the devices you want to matched with
for match in matches:
if match.id.name in ['28', '20', '40']:
matches.remove(match)
break
for match in matches:
devs.remove(match)
current = current.getMergedDevice(match)
if len(matches) == 0:
self.log.info("ByName: no match for device: " + current.id.string)
self.log.debug("ByName:\nResulting device:\n" + str(current))
merged.append(current)
return merged
def _mergeDevicesBySize(self, devices):
"""
This is a simple helper method to merge devices based on size.
"""
stm32Devices = []
result = []
for dev in devices:
if dev.id.platform == 'stm32':
stm32Devices.append(dev)
else:
result.append(dev)
stm32Devices = self._mergeDevicesBySizeSTM32(stm32Devices)
result.extend(stm32Devices)
return result
def _mergeDevicesBySizeSTM32(self, devices):
"""
This checks the size-id and name of the devices, and merges the devices
based on the observation, that the size-id only influences the size of
memories, i.e. FLASH, RAM.
"""
# copy the devices, since this array will be modified
devs = list(devices)
merged = []
while len(devs) > 0:
current = devs[0]
devs.remove(current)
matches = []
size_ids = self._getCategorySizeSTM32(current)
name = current.ids.getAttribute('name')[0]
self.log.info("BySize: Searching for device with size-id '%s'" % size_ids)
for dev in devs:
if dev.ids.getAttribute('name')[0] == name and \
dev.ids.getAttribute('size_id')[0] in size_ids:
matches.append(dev)
matches.sort(key=lambda k : int(k.getProperty('pin-count').values[0].value), reverse=True)
for match in matches:
devs.remove(match)
current = current.getMergedDevice(match)
if len(matches) == 0:
self.log.info("BySize: no match for device: " + current.id.string)
self.log.debug("BySize:\nResulting device:\n" + str(current))
merged.append(current)
return merged
def _getCategorySizeSTM32(self, device):
size_ids = device.ids.getAttribute('size_id')
family = device.id.family
name = device.ids.getAttribute('name')[0]
# these categories are dependent on name
# these are the categories of mergeable size-ids
if family == 'f0':
categories = [ ['4', '6'],
['8'],
['b', 'c'] ]
if name in ['072', '042']:
categories = [['4', '6'], ['8', 'b']]
elif family == 'f1':
categories = [ ['4', '6'], # low density
['8', 'b'], # medium density
['c', 'd', 'e'], # high density
['f', 'g'] ] # super high density
if name in ['105', '107']:
categories = [ ['8', 'b', 'c'] ] # medium and high density
elif family == 'f2':
categories = [ ['b', 'c', 'd', 'e', 'f', 'g'] ] # high density
elif family == 'f3':
categories = [ ['4', '6', '8'], ['b', 'c', 'd', 'e'] ]
if name in ['373']:
categories = [['8', 'b', 'c']]
elif family == 'f4':
categories = [ ['8', 'b', 'c', 'd'],
['e', 'g', 'i'] ]
if name in ['401']:
categories = [ ['b', 'c', 'd', 'e'] ]
if name in ['411', '412', '446']:
categories = [['c', 'e', 'g']]
elif family == 'f7':
categories = [['e', 'g', 'i']]
# make sure that only one category is used!
for cat in categories:
if size_ids[0] in cat:
return cat
return categories[0]
def _mergeDevicesByType(self, devices):
"""
This is a simple helper method to merge devices based on type.
"""
avrDevices = []
result = []
for dev in devices:
if dev.id.platform == 'avr' and dev.id.family != 'xmega':
avrDevices.append(dev)
else:
result.append(dev)
avrDevices = self._mergeDevicesByTypeAVR(avrDevices)
result.extend(avrDevices)
return result
def _mergeDevicesByTypeAVR(self, devices):
"""
This checks the name suffix (for example 'P', 'A', 'PA') of the
devices and merges them based on the observation, that the suffix
does not have anything to do with the mapping of peripherals.
"""
devs = list(devices)
merged = []
while len(devs) > 0:
current = devs[0]
devs.remove(current)
props = current.id
if props.valid == False:
continue
matches = []
suffix = self._getCategoryTypeAVR(current)
self.log.info("ByType: Searching for device ending in " + str(suffix))
for dev in devs:
if dev.id.name == props.name and dev.id.type in suffix:
matches.append(dev)
for match in matches:
devs.remove(match)
current = current.getMergedDevice(match)
if len(matches) == 0:
self.log.info("ByType: No match for device: " + current.id.string)
self.log.debug("ByType:\nResulting device:\n" + str(current))
merged.append(current)
return merged
def _getCategoryPinIdAVR(self, device):
device_type = device.ids.getAttribute('pin_id')[0]
# these are the categories of mergable types
categories = [ # Xmega devices
[None, 'none', 'b'],
['bu', 'u'],
]
# make sure that only one category is used!
for cat in categories:
if device_type in cat:
return cat
return categories[0]
def _getCategoryTypeAVR(self, device):
device_type = device.ids.getAttribute('type')[0]
# these are the categories of mergable types
categories = [ # ATmega devices
[None, 'none', 'p', 'a', 'pa'],
['rfa1', 'rfa2', 'rfr1', 'rfr2'],
['hvb', 'hvbrevb'],
['hve2'],
['hva'],
['u2'],
['u4', 'u6'],
['m1', 'c1'],
# AT90 devices
['can'],
['pwm'],
['usb'],
]
# make sure that only one category is used!
for cat in categories:
if device_type in cat:
return cat
return categories[0]
|
{
"content_hash": "50dc1453a0e32a7befbc50154c9e54c8",
"timestamp": "",
"source": "github",
"line_count": 504,
"max_line_length": 107,
"avg_line_length": 29.071428571428573,
"alnum_prop": 0.6145236145236145,
"repo_name": "dergraaf/xpcc",
"id": "f8233bf03a5daa40e32dad2f37946c90b9cd83dd",
"size": "14982",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tools/device_file_generator/dfg/merger.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "54265378"
},
{
"name": "C++",
"bytes": "3595390"
},
{
"name": "Objective-C",
"bytes": "24080"
},
{
"name": "Python",
"bytes": "177057"
},
{
"name": "Shell",
"bytes": "1071"
}
],
"symlink_target": ""
}
|
"""
Common directory service interfaces
"""
from zope.interface.interface import Interface, Attribute
__all__ = [
"IStoreDirectoryService",
"IStoreDirectoryRecord",
]
class IStoreDirectoryError(Exception):
"""
Base class for directory related errors.
"""
class DirectoryRecordNotFoundError(Exception):
"""
Directory record not found.
"""
class IStoreDirectoryService(Interface):
"""
Directory Service for looking up users.
"""
def recordWithUID(uid): # @NoSelf
"""
Return the record for the specified store uid.
@return: the record.
@rtype: L{IStoreDirectoryRecord}
"""
def recordWithGUID(guid): # @NoSelf
"""
Return the record for the specified store guid.
@return: the record.
@rtype: L{IStoreDirectoryRecord}
"""
class IStoreDirectoryRecord(Interface):
"""
Directory record object
A record identifies a "user" in the system.
"""
uid = Attribute("The record UID: C{str}")
shortNames = Attribute("Short names of the record: C{tuple}")
fullName = Attribute("Full name for the entity associated with the record: C{str}")
displayName = Attribute("Display name for entity associated with the record: C{str}")
def serverURI(): # @NoSelf
"""
Return the URI for the record's server "pod".
@return: a URI.
@rtype: C{str}
"""
def server(): # @NoSelf
"""
Return the L{txdav.caldav.datastore.scheduling.localservers.Server} for the record's server "pod".
@return: a pod server record.
@rtype: L{txdav.caldav.datastore.scheduling.localservers.Server}
"""
def thisServer(): # @NoSelf
"""
Indicates whether the record is hosted on this server "pod".
@return: C{True} if hosted by this service.
@rtype: C{bool}
"""
|
{
"content_hash": "48a3bceb586ac9a445819895a07e64e8",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 106,
"avg_line_length": 22.68235294117647,
"alnum_prop": 0.6161825726141079,
"repo_name": "macosforge/ccs-calendarserver",
"id": "bfae63cf29bb81ce23f958562d1a56b468ce932c",
"size": "2535",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "txdav/common/idirectoryservice.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
from django.conf.urls import patterns
from django.conf.urls import url
from conveyordashboard.plans import views
PLAN = r'^(?P<plan_id>[^/]+)/%s$'
urlpatterns = patterns(
'conveyordashboard.plans.views',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^create$', views.CreateView.as_view(), name='create'),
url(PLAN % 'clone', views.CloneView.as_view(), name='clone'),
# url(r'^migrate$', views.MigrateView.as_view(), name='migrate'),
url(r'^import$', views.ImportView.as_view(), name='import'),
url(PLAN % '', views.DetailView.as_view(), name='detail'),
url(PLAN % 'export', views.ExportView.as_view(), name='export'),
url(PLAN % 'destination', views.DestinationView.as_view(),
name='destination'),
)
|
{
"content_hash": "de93e14973b7c75204e6ccd1235da990",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 69,
"avg_line_length": 40,
"alnum_prop": 0.6526315789473685,
"repo_name": "Hybrid-Cloud/conveyor-dashboard",
"id": "1a0f0604fbe10a0bfdc54774204fb675219d9674",
"size": "1369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conveyordashboard/plans/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1820"
},
{
"name": "HTML",
"bytes": "37568"
},
{
"name": "JavaScript",
"bytes": "112981"
},
{
"name": "Python",
"bytes": "185901"
}
],
"symlink_target": ""
}
|
from ..script import tools
from ... import encoding
from ...networks import address_prefix_for_netcode
from ...serialize import b2h
from ..exceptions import SolvingError
from .ScriptType import ScriptType
class ScriptPayToAddress(ScriptType):
TEMPLATE = tools.compile("OP_DUP OP_HASH160 OP_PUBKEYHASH OP_EQUALVERIFY OP_CHECKSIG")
def __init__(self, hash160):
self.hash160 = hash160
self._address = None
self._script = None
@classmethod
def from_script(cls, script):
r = cls.match(script)
if r:
hash160 = r["PUBKEYHASH_LIST"][0]
s = cls(hash160)
return s
raise ValueError("bad script")
def script(self):
if self._script is None:
# create the script
STANDARD_SCRIPT_OUT = "OP_DUP OP_HASH160 %s OP_EQUALVERIFY OP_CHECKSIG"
script_text = STANDARD_SCRIPT_OUT % b2h(self.hash160)
self._script = tools.compile(script_text)
return self._script
def solve(self, **kwargs):
"""
The kwargs required depend upon the script type.
hash160_lookup:
dict-like structure that returns a secret exponent for a hash160
sign_value:
the integer value to sign (derived from the transaction hash)
signature_type:
usually SIGHASH_ALL (1)
"""
# we need a hash160 => secret_exponent lookup
db = kwargs.get("hash160_lookup")
if db is None:
raise SolvingError("missing hash160_lookup parameter")
result = db.get(self.hash160)
if result is None:
raise SolvingError("can't find secret exponent for %s" % self.address())
# we got it
sign_value = kwargs.get("sign_value")
signature_type = kwargs.get("signature_type")
secret_exponent, public_pair, compressed = result
binary_signature = self._create_script_signature(secret_exponent, sign_value, signature_type)
binary_public_pair_sec = encoding.public_pair_to_sec(public_pair, compressed=compressed)
solution = tools.bin_script([binary_signature, binary_public_pair_sec])
return solution
def info(self, netcode='BTC'):
address_prefix = address_prefix_for_netcode(netcode)
address = encoding.hash160_sec_to_bitcoin_address(self.hash160, address_prefix=address_prefix)
return dict(type="pay to address", address=address, hash160=self.hash160,
script=self._script, address_prefix=address_prefix, summary=address)
def __repr__(self):
return "<Script: pay to %s>" % self.address()
|
{
"content_hash": "537812b9412c7531af224c220b14d766",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 102,
"avg_line_length": 35.82432432432432,
"alnum_prop": 0.6333459072048284,
"repo_name": "shayanb/pycoin",
"id": "7058e5ef0bd16c7ab726be51190790f39bf133f5",
"size": "2651",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "pycoin/tx/pay_to/ScriptPayToAddress.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "115"
},
{
"name": "Python",
"bytes": "454923"
},
{
"name": "Shell",
"bytes": "198"
}
],
"symlink_target": ""
}
|
import unittest
import numpy
import six.moves.cPickle as pickle
import chainer
from chainer.backends import cuda
from chainer import functions as F
from chainer import links
from chainer import testing
from chainer.testing import attr
from chainer.utils import conv
@testing.parameterize(*testing.product({
'x_dtype': [numpy.float16, numpy.float32, numpy.float64],
'W_dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
@testing.inject_backend_tests(
None,
# CPU tests
[{}]
# GPU tests
+ testing.product({
'use_cuda': [True],
'use_cudnn': ['never', 'always'],
'cuda_device': [0, 1],
})
+ [
{'use_chainerx': True, 'chainerx_device': 'native:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:1'},
])
class TestConvolution2D(testing.LinkTestCase):
param_names = ('W', 'b')
skip_double_backward_test = True
def setUp(self):
self.N = 2
self.in_channels = 3
self.out_channels = 2
self.ksize = 3
self.stride = 2
self.pad = 1
if self.x_dtype == numpy.float16 or self.W_dtype == numpy.float16:
self.check_forward_options.update({'atol': 5e-3, 'rtol': 5e-2})
self.check_backward_options.update({'atol': 3e-2, 'rtol': 5e-2})
def before_test(self, test_name):
# cuDNN 5 and 5.1 results suffer from precision issues
using_old_cudnn = (self.backend_config.xp is cuda.cupy
and self.backend_config.use_cudnn == 'always'
and cuda.cuda.cudnn.getVersion() < 6000)
if using_old_cudnn:
self.check_backward_options.update({'atol': 3e-2, 'rtol': 5e-2})
def generate_params(self):
initialW = chainer.initializers.Normal(1, self.W_dtype)
initial_bias = chainer.initializers.Normal(1, self.x_dtype)
return initialW, initial_bias
def create_link(self, initializers):
initialW, initial_bias = initializers
link = links.Convolution2D(
self.in_channels, self.out_channels, self.ksize,
stride=self.stride, pad=self.pad,
initialW=initialW,
initial_bias=initial_bias)
return link
def generate_inputs(self):
h, w = 4, 3
x = numpy.random.uniform(-1, 1,
(self.N, self.in_channels,
h, w)).astype(self.x_dtype)
return x,
def forward_expected(self, link, inputs):
x, = inputs
W = link.W
b = link.b
y = F.convolution_2d(
x, W, b,
pad=self.pad,
stride=self.stride)
return y.array,
def test_pickling(self, backend_config):
x_data, = self.generate_inputs()
link = self.create_link(self.generate_params())
link.to_device(backend_config.device)
x = chainer.Variable(x_data)
x.to_device(backend_config.device)
y = link(x)
y_data1 = y.data
del x, y
pickled = pickle.dumps(link, -1)
del link
link = pickle.loads(pickled)
x = chainer.Variable(x_data)
x.to_device(backend_config.device)
y = link(x)
y_data2 = y.data
testing.assert_allclose(y_data1, y_data2, atol=0, rtol=0)
def test_from_params(self, backend_config):
if (
(backend_config.use_cuda and
backend_config.cuda_device == 1) or
(backend_config.use_chainerx and
'cuda' in backend_config.chainerx_device)):
raise unittest.SkipTest()
link1 = self.create_link(self.generate_params())
link1.to_device(backend_config.device)
link2 = links.Convolution2D.from_params(
link1.W, link1.b, stride=self.stride, pad=self.pad)
assert link2.W.shape == link1.W.shape
assert link2.b.shape == link2.b.shape
assert link2.stride == link1.stride
assert link2.pad == link1.pad
@testing.parameterize(*testing.product({
'x_dtype': [numpy.float16, numpy.float32, numpy.float64],
'W_dtype': [numpy.float16, numpy.float32, numpy.float64],
}))
class TestConvolution2DIm2ColConsistency(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1,
(2, 3, 4, 3)).astype(self.x_dtype)
@attr.gpu
def test_im2col_consistency(self):
col_cpu = conv.im2col_cpu(self.x, 3, 3, 2, 2, 1, 1)
col_gpu = conv.im2col_gpu(cuda.to_gpu(self.x), 3, 3, 2, 2, 1, 1)
testing.assert_allclose(col_cpu, col_gpu.get(), atol=0, rtol=0)
@attr.gpu
def test_col2im_consistency(self):
col = conv.im2col_cpu(self.x, 3, 3, 2, 2, 1, 1)
h, w = self.x.shape[2:]
im_cpu = conv.col2im_cpu(col, 2, 2, 1, 1, h, w)
im_gpu = conv.col2im_gpu(cuda.to_gpu(col), 2, 2, 1, 1, h, w)
testing.assert_allclose(im_cpu, im_gpu.get())
@testing.parameterize(*testing.product({
'conv_args': [((None, 2, 3, 2, 1), {}),
((2, 3), {'stride': 2, 'pad': 1})],
}))
@testing.inject_backend_tests(
None,
# CPU tests
[{}]
# GPU tests
+ testing.product({
'use_cuda': [True],
'use_cudnn': ['never', 'always'],
'cuda_device': [0, 1],
})
+ [
{'use_chainerx': True, 'chainerx_device': 'native:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:1'},
])
class TestConvolution2DParameterShapePlaceholder(testing.LinkTestCase):
param_names = ('W', 'b')
skip_double_backward_test = True
def before_test(self, test_name):
# cuDNN 5 and 5.1 results suffer from precision issues
using_old_cudnn = (self.backend_config.xp is cuda.cupy
and self.backend_config.use_cudnn == 'always'
and cuda.cuda.cudnn.getVersion() < 6000)
if using_old_cudnn:
self.check_backward_options.update({'atol': 3e-2, 'rtol': 5e-2})
def generate_params(self):
return ()
def create_link(self, initializers):
args, kwargs = self.conv_args
link = links.Convolution2D(*args, **kwargs)
b = link.b.data
b[...] = numpy.random.uniform(-1, 1, b.shape)
return link
def generate_inputs(self):
x = numpy.random.uniform(-1, 1,
(2, 3, 4, 3)).astype(numpy.float32)
return x,
def forward_expected(self, link, inputs):
x, = inputs
y = link(x).array
return y,
def test_pickling(self, backend_config):
x_data, = self.generate_inputs()
link = self.create_link(self.generate_params())
link.to_device(backend_config.device)
x = chainer.Variable(x_data)
x.to_device(backend_config.device)
y = link(x)
y_data1 = y.data
del x, y
pickled = pickle.dumps(link, -1)
del link
link = pickle.loads(pickled)
x = chainer.Variable(x_data)
x.to_device(backend_config.device)
y = link(x)
y_data2 = y.data
testing.assert_allclose(y_data1, y_data2, atol=0, rtol=0)
testing.run_module(__name__, __file__)
|
{
"content_hash": "c87b8c30441c07711fe6925c391a666f",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 76,
"avg_line_length": 31.478632478632477,
"alnum_prop": 0.5680152049959273,
"repo_name": "niboshi/chainer",
"id": "8f9acd545f85d509f64ab555a150497e4ab879f0",
"size": "7366",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/chainer_tests/links_tests/connection_tests/test_convolution_2d.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3796"
},
{
"name": "C",
"bytes": "1099"
},
{
"name": "C++",
"bytes": "1685561"
},
{
"name": "CMake",
"bytes": "51563"
},
{
"name": "Cuda",
"bytes": "191182"
},
{
"name": "Dockerfile",
"bytes": "6422"
},
{
"name": "PowerShell",
"bytes": "7197"
},
{
"name": "Python",
"bytes": "6334795"
},
{
"name": "Shell",
"bytes": "47473"
}
],
"symlink_target": ""
}
|
import pytest
from thefuck.rules.man import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command('man read'),
Command('man 2 read'),
Command('man 3 read'),
Command('man -s2 read'),
Command('man -s3 read'),
Command('man -s 2 read'),
Command('man -s 3 read')])
def test_match(command):
assert match(command)
@pytest.mark.parametrize('command', [
Command('man'),
Command('man ')])
def test_not_match(command):
assert not match(command)
@pytest.mark.parametrize('command, new_command', [
(Command('man read'), ['man 3 read', 'man 2 read', 'read --help']),
(Command('man missing', stderr="No manual entry for missing\n"), ['missing --help']),
(Command('man 2 read'), 'man 3 read'),
(Command('man 3 read'), 'man 2 read'),
(Command('man -s2 read'), 'man -s3 read'),
(Command('man -s3 read'), 'man -s2 read'),
(Command('man -s 2 read'), 'man -s 3 read'),
(Command('man -s 3 read'), 'man -s 2 read')])
def test_get_new_command(command, new_command):
assert get_new_command(command) == new_command
|
{
"content_hash": "34b7364c3b4327009a8872917956c45b",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 89,
"avg_line_length": 32.05714285714286,
"alnum_prop": 0.6229946524064172,
"repo_name": "mlk/thefuck",
"id": "c4714881f96faeea9b70425411cc0dbcf1275055",
"size": "1122",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/rules/test_man.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "391804"
},
{
"name": "Shell",
"bytes": "134"
}
],
"symlink_target": ""
}
|
"""
Elephant is a package for the analysis of neurophysiology data, based on Neo.
:copyright: Copyright 2014-2015 by the Elephant team, see AUTHORS.txt.
:license: Modified BSD, see LICENSE.txt for details.
"""
from . import (statistics,
spike_train_generation,
spike_train_correlation,
spectral,
spike_train_surrogates,
signal_processing,
sta,
conversion,
neo_tools)
try:
from . import pandas_bridge
except ImportError:
pass
__version__ = "0.2.0"
|
{
"content_hash": "e2426bfb3b180f85c61e27e15d641182",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 77,
"avg_line_length": 25.17391304347826,
"alnum_prop": 0.5906735751295337,
"repo_name": "neuroelectro/elephant",
"id": "0f760c273dcd069519ca69eda52e85c426457cf4",
"size": "603",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "elephant/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "451628"
},
{
"name": "Shell",
"bytes": "3848"
}
],
"symlink_target": ""
}
|
from nova.tests.functional.api_sample_tests import test_servers
from nova.tests.unit.api.openstack.compute import test_fping
class FpingSampleJsonTests(test_servers.ServersSampleBase):
sample_dir = "os-fping"
def setUp(self):
super(FpingSampleJsonTests, self).setUp()
def fake_check_fping(self):
pass
self.stub_out("nova.utils.execute", test_fping.execute)
self.stub_out("nova.api.openstack.compute.fping."
"FpingController.check_fping",
fake_check_fping)
def test_get_fping(self):
self._post_server()
response = self._do_get('os-fping')
self._verify_response('fping-get-resp', {}, response, 200)
def test_get_fping_details(self):
uuid = self._post_server()
response = self._do_get('os-fping/%s' % (uuid))
self._verify_response('fping-get-details-resp', {}, response, 200)
|
{
"content_hash": "e48802a5f2e932479c72f312c35a7141",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 74,
"avg_line_length": 35.80769230769231,
"alnum_prop": 0.6283566058002148,
"repo_name": "rajalokan/nova",
"id": "a6e4d369a71385ceb10d5274dd40af7046cdd086",
"size": "1563",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "nova/tests/functional/api_sample_tests/test_fping.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "601"
},
{
"name": "PHP",
"bytes": "4503"
},
{
"name": "Python",
"bytes": "19100322"
},
{
"name": "Shell",
"bytes": "26793"
},
{
"name": "Smarty",
"bytes": "299237"
}
],
"symlink_target": ""
}
|
'''
Created by auto_sdk on 2015.04.21
'''
from aliyun.api.base import RestApi
class Cdn20141111RefreshObjectCachesRequest(RestApi):
def __init__(self,domain='cdn.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.ObjectPath = None
self.ObjectType = None
def getapiname(self):
return 'cdn.aliyuncs.com.RefreshObjectCaches.2014-11-11'
|
{
"content_hash": "26476f038c3e489b83d49f0a4107bc6b",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 58,
"avg_line_length": 30.833333333333332,
"alnum_prop": 0.7216216216216216,
"repo_name": "wanghe4096/website",
"id": "8c48302dab937ac82420808e03fab6e55b5d401b",
"size": "370",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aliyun/api/rest/Cdn20141111RefreshObjectCachesRequest.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "121965"
},
{
"name": "HTML",
"bytes": "163477"
},
{
"name": "JavaScript",
"bytes": "227130"
},
{
"name": "Lua",
"bytes": "5653"
},
{
"name": "Python",
"bytes": "325945"
},
{
"name": "Shell",
"bytes": "1359"
}
],
"symlink_target": ""
}
|
from __future__ import annotations
import subprocess
import sys
from importlib.metadata import EntryPoint
from typing import NamedTuple
import pytest
import ibis
from ibis.backends.base import BaseBackend
def test_backends_are_cached():
assert isinstance(ibis.sqlite, BaseBackend)
del ibis.sqlite # delete to force recreation
assert isinstance(ibis.sqlite, BaseBackend)
assert ibis.sqlite is ibis.sqlite
def test_backends_tab_completion():
assert isinstance(ibis.sqlite, BaseBackend)
del ibis.sqlite # delete to ensure not real attr
assert "sqlite" in dir(ibis)
assert isinstance(ibis.sqlite, BaseBackend)
assert "sqlite" in dir(ibis) # in dir even if already created
def test_missing_backend():
msg = "module 'ibis' has no attribute 'foo'."
with pytest.raises(AttributeError, match=msg):
ibis.foo
def test_multiple_backends(mocker):
class Distribution(NamedTuple):
entry_points: list[EntryPoint]
entrypoints = [
EntryPoint(
name="foo",
value='ibis.backends.backend1',
group="ibis.backends",
),
EntryPoint(
name="foo",
value='ibis.backends.backend2',
group="ibis.backends",
),
]
if sys.version_info < (3, 10):
return_value = {"ibis.backends": entrypoints}
else:
return_value = entrypoints
mocker.patch("importlib.metadata.entry_points", return_value=return_value)
msg = r"\d+ packages found for backend 'foo'"
with pytest.raises(RuntimeError, match=msg):
ibis.foo
def test_no_import_pandas():
script = """\
import ibis
import sys
assert "pandas" not in sys.modules"""
subprocess.check_call([sys.executable], text=script)
|
{
"content_hash": "1c9bde725b17de687e18671dd4714587",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 78,
"avg_line_length": 25.285714285714285,
"alnum_prop": 0.6655367231638418,
"repo_name": "cpcloud/ibis",
"id": "b89f876196dd2bb8265a2a26a148aa88ac96874b",
"size": "1770",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ibis/tests/test_api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "44931"
},
{
"name": "CMake",
"bytes": "1862"
},
{
"name": "Dockerfile",
"bytes": "70"
},
{
"name": "JavaScript",
"bytes": "2713"
},
{
"name": "Nix",
"bytes": "12592"
},
{
"name": "Python",
"bytes": "2958224"
},
{
"name": "Shell",
"bytes": "3167"
}
],
"symlink_target": ""
}
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
from django.utils.text import capfirst
from django_easyfilters.filters import FILTER_ADD, FILTER_REMOVE, FILTER_DISPLAY, \
ValuesFilter, ChoicesFilter, ForeignKeyFilter, ManyToManyFilter, DateTimeFilter, NumericRangeFilter
def non_breaking_spaces(val):
# This helps a lot with presentation, by stopping the links+count from being
# split over a line end.
val = val.replace(u'-', u'\u2011')
return mark_safe(u' '.join(escape(part) for part in val.split(u' ')))
class cachedproperty(object):
"""
Decorator that creates converts a method with a single
self argument into a property cached on the instance.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, type):
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
class FilterSet(object):
template = u"""
<div class="filterline"><span class="filterlabel">{{ filterlabel }}:</span>
{% for choice in choices %}
{% if choice.link_type == 'add' %}
<span class="addfilter"><a href="{{ choice.url }}" title="Add filter">{{ choice.label }} ({{ choice.count }})</a></span>
{% else %}
{% if choice.link_type == 'remove' %}
<span class="removefilter"><a href="{{ choice.url }}" title="Remove filter">{{ choice.label }} « </a></span>
{% else %}
<span class="displayfilter">{{ choice.label }}</span>
{% endif %}
{% endif %}
{% endfor %}
</div>
"""
title_fields = None
def __init__(self, queryset, params):
self.params = params
self.model = queryset.model
self.filters = self.setup_filters()
self.qs = self.apply_filters(queryset)
@cachedproperty
def title(self):
return self.make_title()
def get_filter_choices(self, filter_field):
if not hasattr(self, '_cached_filter_choices'):
self._cached_filter_choices = dict((f.field, f.get_choices(self.qs))
for f in self.filters)
return self._cached_filter_choices[filter_field]
def apply_filters(self, queryset):
for f in self.filters:
queryset = f.apply_filter(queryset)
return queryset
def render_filter(self, filter_):
field_obj = self.model._meta.get_field(filter_.field)
choices = self.get_filter_choices(filter_.field)
ctx = {'filterlabel': capfirst(field_obj.verbose_name)}
ctx['choices'] = [dict(label=non_breaking_spaces(c.label),
url=u'?' + c.params.urlencode() \
if c.link_type != FILTER_DISPLAY else None,
link_type=c.link_type,
count=c.count)
for c in choices]
return self.get_template(filter_.field).render(template.Context(ctx))
def get_template(self, field_name):
return template.Template(self.template)
def render(self):
return mark_safe(u'\n'.join(self.render_filter(f) for f in self.filters))
def get_fields(self):
return self.fields
def get_filter_for_field(self, field):
f, model, direct, m2m = self.model._meta.get_field_by_name(field)
if f.rel is not None:
if m2m:
return ManyToManyFilter
else:
return ForeignKeyFilter
elif f.choices:
return ChoicesFilter
else:
type_ = f.get_internal_type()
if type_ == 'DateField' or type_ == 'DateTimeField':
return DateTimeFilter
elif type_ == 'DecimalField' or type_ == 'FloatField':
return NumericRangeFilter
else:
return ValuesFilter
def setup_filters(self):
filters = []
for i, f in enumerate(self.get_fields()):
klass = None
if isinstance(f, basestring):
opts = {}
field_name = f
else:
opts = f[1]
field_name = f[0]
if len(f) > 2:
klass = f[2]
if klass is None:
klass = self.get_filter_for_field(field_name)
filters.append(klass(field_name, self.model, self.params, **opts))
return filters
def make_title(self):
if self.title_fields is None:
title_fields = [filter_.field for filter_ in self.filters]
else:
title_fields = self.title_fields
return u", ".join(c.label
for f in title_fields
for c in self.get_filter_choices(f)
if c.link_type == FILTER_REMOVE)
def __unicode__(self):
return self.render()
|
{
"content_hash": "7e2c5121f30525b2c21bd436781ace28",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 141,
"avg_line_length": 35.88321167883212,
"alnum_prop": 0.5673311635475997,
"repo_name": "georgemarshall/django-easyfilters",
"id": "7dd12b1828cb5b0fb46c798c8d8a2538c634d969",
"size": "4916",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_easyfilters/filterset.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "104478"
}
],
"symlink_target": ""
}
|
"""
pyClanSphere Test Suite
~~~~~~~~~~~~~~~~~~~~~~~
This is the pyClanSphere test suite. It collects all modules in the pyClanSphere
package, builds a TestSuite with their doctests and executes them. It also
collects the tests from the text files in this directory (which are too
extensive to put them into the code without cluttering it up).
Plus all python files that start with test* will be added to the TestSuite.
Please note that coverage reporting and doctest don't play well together
and your reports will probably miss some of the executed code. Doctest can
be patched to remove this incompatibility, the patch is at
http://tinyurl.com/doctest-patch
:copyright: (c) 2009 by the pyClanSphere Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import sys
import os
from tempfile import mkdtemp
from os.path import join, dirname
from unittest import TestSuite, TextTestRunner, TestCase
from unittest2 import defaultTestLoader
from doctest import DocTestSuite, DocFileSuite
from pyClanSphere.utils.crypto import gen_pwhash, gen_secret_key, new_iid
try:
import coverage
except ImportError:
coverage = None
def create_temporary_instance():
"""Create a sqlite based test instance in a temporary directory"""
dbname = 'sqlite://database.db'
instance_folder = mkdtemp(prefix='pyclanspheretest')
# create database and all tables
from pyClanSphere.database import db, init_database
e = db.create_engine(dbname, instance_folder)
from pyClanSphere.schema import users, user_privileges, privileges
init_database(e)
# create admin account
from pyClanSphere.privileges import CLAN_ADMIN
user_id = e.execute(users.insert(),
username=u'TestAdmin',
pw_hash=gen_pwhash('TestPassWord'),
email=u'Somewhere@example.com',
real_name=u'',
description=u'',
extra={},
display_name='$username'
).inserted_primary_key[0]
# insert a privilege for the user
privilege_id = e.execute(privileges.insert(),
name=CLAN_ADMIN.name
).inserted_primary_key[0]
e.execute(user_privileges.insert(),
user_id=user_id,
privilege_id=privilege_id
)
# set up the initial config
from pyClanSphere.config import Configuration
config_filename = join(instance_folder, 'pyClanSphere.ini')
cfg = Configuration(config_filename)
t = cfg.edit()
t.update(
maintenance_mode=False,
site_url='http://localtest',
secret_key=gen_secret_key(),
database_uri=dbname,
iid=new_iid()
)
t.commit()
from pyClanSphere import setup
from pyClanSphere.upgrades.webapp import WebUpgrades
instance = setup(instance_folder)
if str(type(instance)) == "<class 'pyClanSphere.upgrades.webapp.WebUpgrades'>":
# Fast Migration
from pyClanSphere.upgrades import ManageDatabase
manage = ManageDatabase(instance)
upgrade = manage.cmd_upgrade()
while True:
try:
upgrade.next()
except StopIteration:
break
from pyClanSphere._core import _unload_pyClanSphere
_unload_pyClanSphere()
instance = setup(instance_folder)
if str(type(instance)) == "<class 'pyClanSphere.upgrades.webapp.WebUpgrades'>":
sys.stderr.write('Automatic db migration failed, check your scripts!\n')
sys.exit(1)
return instance, instance_folder
def suite(app, modnames=[], return_covermods=False):
"""Generate the test suite.
First argument is always the instance to use. Use a real one or a temporary.
The second argument is a list of modules to be tested. If it is empty (which
it is by default), all sub-modules of the pyClanSphere package are tested.
If the second argument is True, this function returns two objects: a
TestSuite instance and a list of the names of the tested modules. Otherwise
(which is the default) it only returns the former. This is done so that
this function can be used as setuptools' test_suite.
"""
# the app object is used for two purposes:
# 1) plugins are not usable (i.e. not testable) without an initialised app
# 2) for functions that require an application object as argument, you can
# write >>> my_function(app, ...) in the tests
# The instance directory of this object is located in the tests directory.
#
# setup isn't imported at module level because this way coverage
# can track the whole pyClanSphere imports
if return_covermods:
covermods = []
suite = TestSuite()
if modnames == []:
modnames = find_tp_modules()
test_files = os.listdir(dirname(__file__))
for modname in modnames:
# the fromlist must contain something, otherwise the pyClanSphere
# package is returned, not our module
try:
mod = __import__(modname, None, None, [''])
except ImportError, exc:
# some plugins can have external dependencies (e.g. creoleparser,
# pygments) that are not installed on the machine the tests are
# run on. Therefore, just skip those (with an error message)
if 'plugins.' in modname:
if 'versions.' not in modname and 'tests.' not in modname:
sys.stderr.write('could not import plugin %s: %s\n' % (modname, exc))
continue
else:
raise
suites = [DocTestSuite(mod, extraglobs={'app': app})]
filename = modname[10:] + '.txt'
if filename in test_files:
globs = {'app': app}
globs.update(mod.__dict__)
suites.append(DocFileSuite(filename, globs=globs))
for i, subsuite in enumerate(suites):
# skip modules without any tests
if subsuite.countTestCases():
suite.addTest(subsuite)
if return_covermods and i == 0:
covermods.append(mod)
if 'tests.' in modname:
suite.addTests(defaultTestLoader.discover(modname))
if return_covermods:
return suite, covermods
else:
return suite
def find_tp_modules():
"""Find all sub-modules of the pyClanSphere package."""
modules = []
import pyClanSphere
base = dirname(pyClanSphere.__file__)
start = len(dirname(base))
if base != 'pyClanSphere':
start += 1
for path, dirnames, filenames in os.walk(base):
for filename in filenames:
if filename.endswith('.py'):
fullpath = join(path, filename)
if filename == '__init__.py':
stripped = fullpath[start:-12]
else:
stripped = fullpath[start:-3]
modname = stripped.replace('/', '.')
modules.append(modname)
return modules
def main():
from optparse import OptionParser
usage = ('Usage: %prog [option] [modules to be tested]\n'
'Modules names have to be given in the form utils.mail (without '
'pyClanSphere.)\nIf no module names are given, all tests are run')
parser = OptionParser(usage=usage)
parser.add_option('-c', '--coverage', action='store_true', dest='coverage',
help='show coverage information (slow!)')
parser.add_option('-v', '--verbose', action='store_true', dest='verbose',
default=False, help='show which tests are run')
parser.add_option('--real-instance', dest='instance',
help='instance to use instead of a temporary one, only use it if you know what you are doing!')
options, args = parser.parse_args(sys.argv[1:])
modnames = ['pyClanSphere.' + modname for modname in args]
if options.coverage:
if coverage is not None:
use_coverage = True
else:
sys.stderr.write("coverage information requires Ned Batchelder's "
"coverage.py to be installed!\n")
sys.exit(1)
else:
use_coverage = False
# get our instance
if options.instance:
sys.stdout.write("Opening given instance ... ")
sys.stdout.flush()
from pyClanSphere.upgrades.webapp import WebUpgrades
from pyClanSphere import setup
instance = setup(options.instance)
if isinstance(instance, WebUpgrades):
sys.stderr.write("Please migrate your instance to latest schema first!\n")
sys.exit(1)
else:
sys.stdout.write("Creating temporary instance ... ")
sys.stdout.flush()
instance, instance_folder = create_temporary_instance()
sys.stdout.write("ok\nCollecting tests ... ")
sys.stdout.flush()
if use_coverage:
coverage.erase()
coverage.start()
s, covermods = suite(instance, modnames, True)
else:
s = suite(instance, modnames)
sys.stdout.write("ok\n")
TextTestRunner(verbosity=options.verbose + 1).run(s)
if use_coverage:
coverage.stop()
print '\n\n' + '=' * 25 + ' coverage information ' + '=' * 25
coverage.report(covermods)
if not options.instance:
try:
for root, dirs, files in os.walk(instance_folder, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
except OSError:
print "Could not remove all tempfiles, please remove", \
instance_folder, "yourself"
pass
class pyClanSphereTestCase(TestCase):
def setUp(self):
from pyClanSphere.api import get_application
from pyClanSphere.database import db, init_database
self.app = get_application()
self.db = db
# just in case the table(s) for the test haven't been created
init_database(self.app.database_engine)
|
{
"content_hash": "f9764809801078244b3f8938bf561c1c",
"timestamp": "",
"source": "github",
"line_count": 268,
"max_line_length": 117,
"avg_line_length": 37.58582089552239,
"alnum_prop": 0.6282140375260598,
"repo_name": "jokey2k/pyClanSphere",
"id": "7ad7121025f451087e1d17156235a6fcc200c7f8",
"size": "10097",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyClanSphere/tests/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "188174"
},
{
"name": "Python",
"bytes": "891594"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/poi/shared_dathomir_freedprisonerscamp_medium1.iff"
result.attribute_template_id = -1
result.stfName("poi_n","base_poi_building")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "9c493a0d28a64fd348532a78651d1fcc",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 87,
"avg_line_length": 24.692307692307693,
"alnum_prop": 0.7071651090342679,
"repo_name": "obi-two/Rebelion",
"id": "484ef2100c0d82d1fd8d9dd65b51dcc93386eed4",
"size": "466",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/building/poi/shared_dathomir_freedprisonerscamp_medium1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
}
|
"""Create a blockchain cache.
Creating a cache of the blockchain speeds up test execution when running
multiple functional tests. This helper script is executed by test_runner when multiple
tests are being run in parallel.
"""
from test_framework.test_framework import DoriancoinTestFramework
class CreateCache(DoriancoinTestFramework):
# Test network and test nodes are not required:
def set_test_params(self):
self.num_nodes = 0
self.supports_cli = True
def setup_network(self):
pass
def run_test(self):
pass
if __name__ == '__main__':
CreateCache().main()
|
{
"content_hash": "cdac151004a2dbad887158e385ee762f",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 86,
"avg_line_length": 25.75,
"alnum_prop": 0.7038834951456311,
"repo_name": "doriancoins/doriancoin",
"id": "ac830b74859f442146903af3f808604181776576",
"size": "835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/create_cache.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28453"
},
{
"name": "C",
"bytes": "685584"
},
{
"name": "C++",
"bytes": "5472199"
},
{
"name": "HTML",
"bytes": "21860"
},
{
"name": "Java",
"bytes": "30314"
},
{
"name": "M4",
"bytes": "196107"
},
{
"name": "Makefile",
"bytes": "114818"
},
{
"name": "Objective-C",
"bytes": "2171"
},
{
"name": "Objective-C++",
"bytes": "6765"
},
{
"name": "Python",
"bytes": "1309467"
},
{
"name": "QMake",
"bytes": "759"
},
{
"name": "Shell",
"bytes": "66893"
}
],
"symlink_target": ""
}
|
from argparse import ArgumentParser
import socket
import time
import os
from novaclient.client import Client
DEFAULT_SCHEME = '{}.nova.hypervisors'.format(socket.gethostname())
METRIC_KEYS = (
'current_workload',
'disk_available_least',
'local_gb',
'local_gb_used',
'memory_mb',
'memory_mb_used',
'running_vms',
'vcpus',
'vcpus_used',
)
def output_metric(name, value):
print '{}\t{}\t{}'.format(name, value, int(time.time()))
def main():
parser = ArgumentParser()
parser.add_argument('-u', '--user', default=os.environ['OS_USERNAME'])
parser.add_argument('-p', '--password', default=os.environ['OS_PASSWORD'])
parser.add_argument('-t', '--tenant', default=os.environ['OS_TENANT_NAME'])
parser.add_argument('-a', '--auth-url', default=os.environ['OS_AUTH_URL'])
parser.add_argument('-S', '--service-type', default='compute')
parser.add_argument('-H', '--host')
parser.add_argument('-s', '--scheme', default=DEFAULT_SCHEME)
args = parser.parse_args()
args.user
client = Client(version=2, username=args.user, api_key=args.password,
project_id=args.tenant, auth_url=args.auth_url,
service_type=args.service_type)
if args.host:
hypervisors = client.hypervisors.search(args.host)
else:
hypervisors = client.hypervisors.list()
for hv in hypervisors:
hostname = hv.hypervisor_hostname.split('.')[0]
for key, value in hv.to_dict().iteritems():
if key in METRIC_KEYS:
output_metric('{}.{}.{}'.format(args.scheme, hostname, key), value)
if __name__ == '__main__':
main()
|
{
"content_hash": "344f4dbff7d04a8905c8616ba77b64f9",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 83,
"avg_line_length": 30.925925925925927,
"alnum_prop": 0.6191616766467066,
"repo_name": "sivakom/ursula-monitoring",
"id": "c17a461cd55ceb4491fdf19260846b08ce75bbe2",
"size": "2019",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sensu/plugins/metrics-nova.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1262"
},
{
"name": "Perl",
"bytes": "54837"
},
{
"name": "Python",
"bytes": "124237"
},
{
"name": "Ruby",
"bytes": "94702"
},
{
"name": "Shell",
"bytes": "12139"
}
],
"symlink_target": ""
}
|
import tornado
import tornado.web
import tornado.websocket
import tornado.options
import os
import json
import uuid
import argparse
import logging
logger = logging.getLogger('gateway')
args = None
def parse_args():
global args
static_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'static'))
parser = argparse.ArgumentParser(description='Gateway server')
parser.add_argument('-v', '--verbose', help='verbose logging', action='store_true')
parser.add_argument('-s', '--static-path', help='path for static files [default: %(default)s]', default=static_path)
parser.add_argument('-p', '--listen-port', help='port to listen on [default: %(default)s]', default=9000, type=int, metavar='PORT')
parser.add_argument('-i', '--listen-interface', help='interface to listen on. [default: %(default)s]', default='0.0.0.0', metavar='IFACE')
args = parser.parse_args()
connections = set()
class ChatHandler(tornado.websocket.WebSocketHandler):
def open(self):
connections.add(self)
return None
def on_message(self, msg):
for c in connections:
if c is self:
continue
c.write_message(msg)
def on_close(self):
connections.remove(self)
def main():
global logger
#tornado.options.parse_command_line()
parse_args()
if args.verbose:
tornado.options.enable_pretty_logging()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
application = tornado.web.Application([
(r"/chat", ChatHandler),
(r"/(.*)", tornado.web.StaticFileHandler, {"path": args.static_path, "default_filename":'index.html'}),
],
)
print "Listening on %s:%s" % (args.listen_interface, args.listen_port)
application.listen(args.listen_port, args.listen_interface)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
{
"content_hash": "c19d831f931974415cf7785ec9e45755",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 142,
"avg_line_length": 25.61842105263158,
"alnum_prop": 0.6456086286594761,
"repo_name": "polyphony-ot/polyphony-ios-demo",
"id": "24000548b8591df6e2818b695ca2dbbee9e7fc9d",
"size": "1969",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "SocketRocket/TestChatServer/py/chatroom.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "221880"
},
{
"name": "C++",
"bytes": "5626"
},
{
"name": "Go",
"bytes": "1429"
},
{
"name": "JavaScript",
"bytes": "2437"
},
{
"name": "Objective-C",
"bytes": "93716"
},
{
"name": "Objective-C++",
"bytes": "139"
},
{
"name": "Python",
"bytes": "5557"
},
{
"name": "Ruby",
"bytes": "726"
},
{
"name": "Shell",
"bytes": "790"
}
],
"symlink_target": ""
}
|
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
def plot():
fig = plt.figure()
x, y = np.ogrid[-10:10:100j, -10:10:100j]
extent = (x.min(), x.max(), y.min(), y.max())
cmap = matplotlib.cm.get_cmap("gray")
plt.imshow(x * y, extent=extent, cmap=cmap)
plt.colorbar()
return fig
def test():
from .helpers import assert_equality
assert_equality(plot, "test_heat_reference.tex")
|
{
"content_hash": "a7869ac8c5bff5462cf8f41ef1dd36f7",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 52,
"avg_line_length": 22.894736842105264,
"alnum_prop": 0.639080459770115,
"repo_name": "nschloe/matplotlib2tikz",
"id": "8e84b7ea45c719081dee36cd7676cc09d1aa62f2",
"size": "435",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/test_heat.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "812"
},
{
"name": "Python",
"bytes": "161690"
},
{
"name": "TeX",
"bytes": "259177"
}
],
"symlink_target": ""
}
|
"""Operations that operate on whole columns.
.. autosummary::
ColumnRemovalOperation
ColumnRenameOperation
ColumnMoveOperation
column_reorder_operation
column_addition_operation
"""
from .base import operation
from ..expressions import compile_expression
@operation('column-removal')
class ColumnRemovalOperation:
"""Remove a specified column from the dataset.
Expects a ``dict`` as loaded from OpenRefine JSON script.
Args:
parameters['description'] (str): Human-readable description
parameters['columnName'] (str): Column to remove
"""
def __init__(self, parameters):
"""Initialise the operation."""
self.description = parameters['description']
self.column = parameters['columnName']
def __call__(self, data):
"""Remove the specified column from ``data``.
The column to remove is given by ``self.column``.
Args:
data (DataFrame): The data to transform. Not guaranteed
immutable.
Returns:
DataFrame: The transformed data.
"""
return data.drop(self.column, axis=1)
@operation('column-rename')
class ColumnRenameOperation:
"""Rename a specified column in the dataset.
Expects a ``dict`` as loaded from OpenRefine JSON script.
Args:
parameters['description'] (str): Human-readable description
parameters['oldColumnName'] (str): Column to rename
parameters['newColumnName'] (str): New name for column
"""
def __init__(self, parameters):
"""Initialise the operation."""
self.description = parameters['description']
self.transform = {parameters['oldColumnName']:
parameters['newColumnName']}
def __call__(self, data):
"""Execute the operation.
Args:
data (DataFrame): The data to transform. Not guaranteed
immutable.
Returns:
DataFrame: The transformed data.
"""
return data.rename(columns=self.transform)
@operation('column-move')
class ColumnMoveOperation:
"""Move a specified column to a different position."""
def __init__(self, parameters):
"""Initialise the operation."""
self.description = parameters['description']
self.column = parameters['columnName']
self.index = parameters['index']
def __call__(self, data):
"""Execute the operation.
Args:
data (DataFrame): The data to transform. Not guaranteed
immutable.
Returns:
DataFrame: The transformed data.
Raises:
:exc:`IndexError`: If the target column index is less than 0 or
past the last column.
:exc:`KeyError`: If the column to be moved is not found.
"""
cols = list(data.columns)
if not (0 <= self.index < len(cols)):
raise IndexError("Target column {} outside range (0, {})"
.format(self.index, len(cols) - 1))
if self.column not in cols:
raise KeyError("Column '{}' not found in {}"
.format(self.column, cols))
cols.remove(self.column)
cols.insert(self.index, self.column)
return data[cols]
@operation('column-reorder')
def column_reorder_operation(params):
"""Move a specified column to a different position."""
def exec_column_reorder_operation(data):
return data[params['columnNames']]
return exec_column_reorder_operation
@operation('column-addition')
def column_addition_operation(params):
"""Add a new column based on an existing one."""
base_column_name = params['baseColumnName']
new_column_name = params['newColumnName']
insert_index = params['columnInsertIndex']
expression = compile_expression(params['expression'],
on_error=params['onError'])
def exec_column_addition_operation(data):
new_cols = data.columns.insert(insert_index, new_column_name)
return data.assign(**{new_column_name:
lambda row: expression(row[base_column_name])}) \
.reindex(columns=new_cols)
return exec_column_addition_operation
|
{
"content_hash": "2ab06963434cb140d222a190d75874d1",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 79,
"avg_line_length": 29.881944444444443,
"alnum_prop": 0.6109690913316291,
"repo_name": "jezcope/pyrefine",
"id": "fb09bb8c04f29d9f97274f7b3f1ff0851a666aa2",
"size": "4303",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pyrefine/ops/column.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2278"
},
{
"name": "Python",
"bytes": "51026"
}
],
"symlink_target": ""
}
|
from datetime import datetime
import collections
import pytest
import numpy as np
import pandas as pd
from pandas import Series, DataFrame
from pandas.compat import StringIO, u
from pandas.util.testing import (assert_series_equal, assert_almost_equal,
assert_frame_equal, ensure_clean)
import pandas.util.testing as tm
from .common import TestData
class TestSeriesToCSV(TestData):
def read_csv(self, path, **kwargs):
params = dict(squeeze=True, index_col=0,
header=None, parse_dates=True)
params.update(**kwargs)
header = params.get("header")
out = pd.read_csv(path, **params)
if header is None:
out.name = out.index.name = None
return out
def test_from_csv_deprecation(self):
# see gh-17812
with ensure_clean() as path:
self.ts.to_csv(path)
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
ts = self.read_csv(path)
depr_ts = Series.from_csv(path)
assert_series_equal(depr_ts, ts)
def test_from_csv(self):
with ensure_clean() as path:
self.ts.to_csv(path)
ts = self.read_csv(path)
assert_series_equal(self.ts, ts, check_names=False)
assert ts.name is None
assert ts.index.name is None
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
depr_ts = Series.from_csv(path)
assert_series_equal(depr_ts, ts)
# see gh-10483
self.ts.to_csv(path, header=True)
ts_h = self.read_csv(path, header=0)
assert ts_h.name == "ts"
self.series.to_csv(path)
series = self.read_csv(path)
assert_series_equal(self.series, series, check_names=False)
assert series.name is None
assert series.index.name is None
self.series.to_csv(path, header=True)
series_h = self.read_csv(path, header=0)
assert series_h.name == "series"
outfile = open(path, "w")
outfile.write("1998-01-01|1.0\n1999-01-01|2.0")
outfile.close()
series = self.read_csv(path, sep="|")
check_series = Series({datetime(1998, 1, 1): 1.0,
datetime(1999, 1, 1): 2.0})
assert_series_equal(check_series, series)
series = self.read_csv(path, sep="|", parse_dates=False)
check_series = Series({"1998-01-01": 1.0, "1999-01-01": 2.0})
assert_series_equal(check_series, series)
def test_to_csv(self):
import io
with ensure_clean() as path:
self.ts.to_csv(path)
with io.open(path, newline=None) as f:
lines = f.readlines()
assert (lines[1] != '\n')
self.ts.to_csv(path, index=False)
arr = np.loadtxt(path)
assert_almost_equal(arr, self.ts.values)
def test_to_csv_unicode_index(self):
buf = StringIO()
s = Series([u("\u05d0"), "d2"], index=[u("\u05d0"), u("\u05d1")])
s.to_csv(buf, encoding="UTF-8")
buf.seek(0)
s2 = self.read_csv(buf, index_col=0, encoding="UTF-8")
assert_series_equal(s, s2)
def test_to_csv_float_format(self):
with ensure_clean() as filename:
ser = Series([0.123456, 0.234567, 0.567567])
ser.to_csv(filename, float_format="%.2f")
rs = self.read_csv(filename)
xp = Series([0.12, 0.23, 0.57])
assert_series_equal(rs, xp)
def test_to_csv_list_entries(self):
s = Series(['jack and jill', 'jesse and frank'])
split = s.str.split(r'\s+and\s+')
buf = StringIO()
split.to_csv(buf)
def test_to_csv_path_is_none(self):
# GH 8215
# Series.to_csv() was returning None, inconsistent with
# DataFrame.to_csv() which returned string
s = Series([1, 2, 3])
csv_str = s.to_csv(path=None)
assert isinstance(csv_str, str)
class TestSeriesIO(TestData):
def test_to_frame(self):
self.ts.name = None
rs = self.ts.to_frame()
xp = pd.DataFrame(self.ts.values, index=self.ts.index)
assert_frame_equal(rs, xp)
self.ts.name = 'testname'
rs = self.ts.to_frame()
xp = pd.DataFrame(dict(testname=self.ts.values), index=self.ts.index)
assert_frame_equal(rs, xp)
rs = self.ts.to_frame(name='testdifferent')
xp = pd.DataFrame(
dict(testdifferent=self.ts.values), index=self.ts.index)
assert_frame_equal(rs, xp)
def test_timeseries_periodindex(self):
# GH2891
from pandas import period_range
prng = period_range('1/1/2011', '1/1/2012', freq='M')
ts = Series(np.random.randn(len(prng)), prng)
new_ts = tm.round_trip_pickle(ts)
assert new_ts.index.freq == 'M'
def test_pickle_preserve_name(self):
for n in [777, 777., 'name', datetime(2001, 11, 11), (1, 2)]:
unpickled = self._pickle_roundtrip_name(tm.makeTimeSeries(name=n))
assert unpickled.name == n
def _pickle_roundtrip_name(self, obj):
with ensure_clean() as path:
obj.to_pickle(path)
unpickled = pd.read_pickle(path)
return unpickled
def test_to_frame_expanddim(self):
# GH 9762
class SubclassedSeries(Series):
@property
def _constructor_expanddim(self):
return SubclassedFrame
class SubclassedFrame(DataFrame):
pass
s = SubclassedSeries([1, 2, 3], name='X')
result = s.to_frame()
assert isinstance(result, SubclassedFrame)
expected = SubclassedFrame({'X': [1, 2, 3]})
assert_frame_equal(result, expected)
@pytest.mark.parametrize('mapping', (
dict,
collections.defaultdict(list),
collections.OrderedDict))
def test_to_dict(self, mapping):
# GH16122
ts = TestData().ts
tm.assert_series_equal(
Series(ts.to_dict(mapping), name='ts'), ts)
from_method = Series(ts.to_dict(collections.Counter))
from_constructor = Series(collections.Counter(ts.iteritems()))
tm.assert_series_equal(from_method, from_constructor)
|
{
"content_hash": "a39633aa669bebe8a0e565ccc89518f6",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 78,
"avg_line_length": 31.771844660194176,
"alnum_prop": 0.5599694423223835,
"repo_name": "zfrenchee/pandas",
"id": "ad51261a47c5c3fd901ddcdfbe414b17be592e2c",
"size": "6595",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "pandas/tests/series/test_io.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3847"
},
{
"name": "C",
"bytes": "470171"
},
{
"name": "C++",
"bytes": "17193"
},
{
"name": "HTML",
"bytes": "551706"
},
{
"name": "Makefile",
"bytes": "989"
},
{
"name": "PowerShell",
"bytes": "2970"
},
{
"name": "Python",
"bytes": "12658422"
},
{
"name": "Shell",
"bytes": "25785"
},
{
"name": "Smarty",
"bytes": "2045"
}
],
"symlink_target": ""
}
|
from distutils.core import setup, Extension
import numpy
mod = Extension('CorrCoef',
include_dirs = [numpy.get_include()],
sources = ['CorrCoef.c'],
extra_compile_args=['-fopenmp'],
extra_link_args=['-lgomp']
)
setup (name = 'CorrCoef',
author = 'Aljoscha Rheinwalt',
author_email = 'aljoscha.rheinwalt@uni-potsdam.de',
ext_modules = [mod]
)
|
{
"content_hash": "4284cfca19bfc8d0709fec1b97762039",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 55,
"avg_line_length": 24.8,
"alnum_prop": 0.6559139784946236,
"repo_name": "Rheinwalt/CorrCoef",
"id": "c761c6f99b4158576dc491de61b99afbd65095b2",
"size": "372",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3065"
},
{
"name": "Python",
"bytes": "1002"
}
],
"symlink_target": ""
}
|
import pygame, math
ais={}
def register_ai(name):
def _internal(cls):
ais[name]=cls
cls.name=name
return cls
return _internal
@register_ai("empty")
class AI(object):
def update(self, dt):
pass
def get_angle(x1, x2, y1, y2):
delta_angle=math.degrees(math.atan2(y2-y1, -(x2-x1)))+90
return delta_angle
def get_rel_angle(delta_angle, self_angle):
return math.degrees(math.atan2(
math.sin(math.radians(delta_angle)-math.radians(self_angle)),
math.cos(math.radians(delta_angle)-math.radians(self_angle))
))
@register_ai("projectile_ai")
class ProjectileAI(AI):
def __init__(self):
self.create_time=pygame.time.get_ticks()
def update(self, dt):
self.ship.accel_direction=1
self.ship.turn_direction=0
if pygame.time.get_ticks()-self.create_time>self.ship.type.raw.get("lifetime", 5000):
self.ship.marked_for_death=True
for ship in game.client.owned_entities.values():
if ship.faction!=self.ship.faction and ship is not self.ship:
# print(ship.rect, self.ship.rect)
if ship.rect.colliderect(self.ship.rect) and not ship.type.raw.get("is_projectile"):
self.ship.marked_for_death=True
ship.take_damage(self.ship.type.raw.get("damage", 10))
if self.ship.target:
if self.ship.type.max_rot_speed>0 and not self.ship.target.marked_for_death:
angle=get_rel_angle(get_angle(self.ship.rect.centerx, self.ship.target.rect.centerx,
self.ship.rect.centery, self.ship.target.rect.centery), self.ship.angle)
if abs(angle)>10:
if angle>0:
self.ship.turn_direction=1
else:
self.ship.turn_direction=-1
@register_ai("hostile_ai")
class HostileAI(AI):
def update(self, dt):
if self.ship.target:
if self.ship.type.max_rot_speed>0 and not self.ship.target.marked_for_death:
angle=get_rel_angle(get_angle(self.ship.rect.centerx, self.ship.target.rect.centerx,
self.ship.rect.centery, self.ship.target.rect.centery), self.ship.angle)
if abs(angle)>5:
self.ship.accel_direction=0
if angle>0:
self.ship.turn_direction=1
else:
self.ship.turn_direction=-1
else:
self.ship.turn_direction=0
self.ship.accel_direction=1
self.ship.fire_selected()
min_dist=-1
min_ship=self.ship.target
for e in game.client.owned_entities.values():
if e.faction!=self.ship.faction:
dist=math.sqrt((self.ship.rect.centerx-e.rect.centerx)**2+(self.ship.rect.centery-e.rect.centery)**2)
if dist<min_dist or min_dist==-1:
min_dist=dist
min_ship=e
for e in game.client.remote_entities.values():
if e.faction!=self.ship.faction:
dist=math.sqrt((self.ship.rect.centerx-e.rect.centerx)**2+(self.ship.rect.centery-e.rect.centery)**2)
if dist<min_dist or min_dist==-1:
min_dist=dist
min_ship=e
self.ship.target=min_ship
print(self.ship.target)
import game
|
{
"content_hash": "366d7248cf52b138dcfc9394bb2e1236",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 105,
"avg_line_length": 31.640449438202246,
"alnum_prop": 0.6967329545454546,
"repo_name": "602p/starfighter_revival",
"id": "6d2e31db411b672a72a2593730a22d64766e31d1",
"size": "2816",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ai.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24097"
}
],
"symlink_target": ""
}
|
import sys
sys.path.insert(1, "../../")
import h2o, tests
def get_model_test(ip,port):
prostate = h2o.import_file(path=h2o.locate("smalldata/logreg/prostate.csv"))
r = prostate[0].runif()
train = prostate[r < 0.70]
test = prostate[r >= 0.70]
# Regression
regression_gbm1 = h2o.gbm(y=train[1], x=train[2:9], distribution="gaussian")
predictions1 = regression_gbm1.predict(test)
regression_gbm2 = h2o.get_model(regression_gbm1._id)
assert regression_gbm2._model_json['output']['model_category'] == "Regression"
predictions2 = regression_gbm2.predict(test)
for r in range(predictions1.nrow):
p1 = predictions1[r,0]
p2 = predictions2[r,0]
assert p1 == p2, "expected regression predictions to be the same for row {}, but got {} and {}".format(r, p1, p2)
# Binomial
train[1] = train[1].asfactor()
bernoulli_gbm1 = h2o.gbm(y=train[1], x=train[2:], distribution="bernoulli")
predictions1 = bernoulli_gbm1.predict(test)
bernoulli_gbm2 = h2o.get_model(bernoulli_gbm1._id)
assert bernoulli_gbm2._model_json['output']['model_category'] == "Binomial"
predictions2 = bernoulli_gbm2.predict(test)
for r in range(predictions1.nrow):
p1 = predictions1[r,0]
p2 = predictions2[r,0]
assert p1 == p2, "expected binomial predictions to be the same for row {}, but got {} and {}".format(r, p1, p2)
# Clustering
benign_h2o = h2o.import_file(path=h2o.locate("smalldata/logreg/benign.csv"))
km_h2o = h2o.kmeans(x=benign_h2o, k=3)
benign_km = h2o.get_model(km_h2o._id)
assert benign_km._model_json['output']['model_category'] == "Clustering"
# Multinomial
train[4] = train[4].asfactor()
multinomial_dl1 = h2o.deeplearning(x=train[0:2], y=train[4], loss='CrossEntropy')
predictions1 = multinomial_dl1.predict(test)
multinomial_dl2 = h2o.get_model(multinomial_dl1._id)
assert multinomial_dl2._model_json['output']['model_category'] == "Multinomial"
predictions2 = multinomial_dl2.predict(test)
for r in range(predictions1.nrow):
p1 = predictions1[r,0]
p2 = predictions2[r,0]
assert p1 == p2, "expected multinomial predictions to be the same for row {0}, but got {1} and {2}" \
"".format(r, p1, p2)
if __name__ == "__main__":
tests.run_test(sys.argv, get_model_test)
|
{
"content_hash": "a93d6857cdc988ab0935b25ed2b38085",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 121,
"avg_line_length": 37.34375,
"alnum_prop": 0.6435146443514644,
"repo_name": "bospetersen/h2o-3",
"id": "c98532c84b2ee96f69ddc4863f5705289bd7419c",
"size": "2390",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "h2o-py/tests/testdir_misc/pyunit_get_model.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5090"
},
{
"name": "CSS",
"bytes": "163561"
},
{
"name": "CoffeeScript",
"bytes": "262107"
},
{
"name": "Emacs Lisp",
"bytes": "8914"
},
{
"name": "Groovy",
"bytes": "78"
},
{
"name": "HTML",
"bytes": "146874"
},
{
"name": "Java",
"bytes": "5441396"
},
{
"name": "JavaScript",
"bytes": "88331"
},
{
"name": "Makefile",
"bytes": "31513"
},
{
"name": "Python",
"bytes": "2021301"
},
{
"name": "R",
"bytes": "1829960"
},
{
"name": "Rebol",
"bytes": "3997"
},
{
"name": "Ruby",
"bytes": "299"
},
{
"name": "Scala",
"bytes": "16336"
},
{
"name": "Shell",
"bytes": "44718"
},
{
"name": "TeX",
"bytes": "470617"
}
],
"symlink_target": ""
}
|
import taskflow.engines
from taskflow.patterns import linear_flow
from taskflow.types import failure
from cinder import test
from cinder.tests.unit.volume.drivers.dell_emc.vnx import fake_exception \
as vnx_ex
from cinder.tests.unit.volume.drivers.dell_emc.vnx import res_mock
import cinder.volume.drivers.dell_emc.vnx.taskflows as vnx_taskflow
class TestTaskflow(test.TestCase):
def setUp(self):
super(TestTaskflow, self).setUp()
self.work_flow = linear_flow.Flow('test_task')
@res_mock.patch_client
def test_copy_snapshot_task(self, client, mocked):
store_spec = {'client': client,
'snap_name': 'original_name',
'new_snap_name': 'new_name'
}
self.work_flow.add(vnx_taskflow.CopySnapshotTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
engine.run()
@res_mock.patch_client
def test_copy_snapshot_task_revert(self, client, mocked):
store_spec = {'client': client,
'snap_name': 'original_name',
'new_snap_name': 'new_name'
}
self.work_flow.add(vnx_taskflow.CopySnapshotTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
self.assertRaises(vnx_ex.VNXSnapError,
engine.run)
@res_mock.patch_client
def test_create_smp_task(self, client, mocked):
store_spec = {
'client': client,
'smp_name': 'mount_point_name',
'base_lun_name': 'base_name'
}
self.work_flow.add(vnx_taskflow.CreateSMPTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
engine.run()
smp_id = engine.storage.fetch('smp_id')
self.assertEqual(15, smp_id)
@res_mock.patch_client
def test_create_smp_task_revert(self, client, mocked):
store_spec = {
'client': client,
'smp_name': 'mount_point_name',
'base_lun_name': 'base_name'
}
self.work_flow.add(vnx_taskflow.CreateSMPTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
self.assertRaises(vnx_ex.VNXCreateLunError,
engine.run)
smp_id = engine.storage.fetch('smp_id')
self.assertIsInstance(smp_id, failure.Failure)
@res_mock.patch_client
def test_attach_snap_task(self, client, mocked):
store_spec = {
'client': client,
'smp_name': 'mount_point_name',
'snap_name': 'snap_name'
}
self.work_flow.add(vnx_taskflow.AttachSnapTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
engine.run()
@res_mock.patch_client
def test_attach_snap_task_revert(self, client, mocked):
store_spec = {
'client': client,
'smp_name': 'mount_point_name',
'snap_name': 'snap_name'
}
self.work_flow.add(vnx_taskflow.AttachSnapTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
self.assertRaises(vnx_ex.VNXAttachSnapError,
engine.run)
@res_mock.patch_client
def test_create_snapshot_task(self, client, mocked):
store_spec = {
'client': client,
'lun_id': 12,
'snap_name': 'snap_name'
}
self.work_flow.add(vnx_taskflow.CreateSnapshotTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
engine.run()
@res_mock.patch_client
def test_create_snapshot_task_revert(self, client, mocked):
store_spec = {
'client': client,
'lun_id': 13,
'snap_name': 'snap_name'
}
self.work_flow.add(vnx_taskflow.CreateSnapshotTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
self.assertRaises(vnx_ex.VNXCreateSnapError,
engine.run)
@res_mock.patch_client
def test_allow_read_write_task(self, client, mocked):
store_spec = {
'client': client,
'snap_name': 'snap_name'
}
self.work_flow.add(vnx_taskflow.ModifySnapshotTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
engine.run()
@res_mock.patch_client
def test_allow_read_write_task_revert(self, client, mocked):
store_spec = {
'client': client,
'snap_name': 'snap_name'
}
self.work_flow.add(vnx_taskflow.ModifySnapshotTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
self.assertRaises(vnx_ex.VNXSnapError,
engine.run)
@res_mock.patch_client
def test_create_cg_snapshot_task(self, client, mocked):
store_spec = {
'client': client,
'cg_name': 'test_cg',
'cg_snap_name': 'my_snap_name'
}
self.work_flow.add(vnx_taskflow.CreateCGSnapshotTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
engine.run()
snap_name = engine.storage.fetch('new_cg_snap_name')
self.assertIsInstance(snap_name, res_mock.StorageObjectMock)
@res_mock.patch_client
def test_create_cg_snapshot_task_revert(self, client, mocked):
store_spec = {
'client': client,
'cg_name': 'test_cg',
'cg_snap_name': 'my_snap_name'
}
self.work_flow.add(vnx_taskflow.CreateCGSnapshotTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
self.assertRaises(vnx_ex.VNXCreateSnapError,
engine.run)
@res_mock.patch_client
def test_extend_smp_task(self, client, mocked):
store_spec = {
'client': client,
'smp_name': 'lun_test_extend_smp_task',
'lun_size': 100
}
self.work_flow.add(vnx_taskflow.ExtendSMPTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
engine.run()
@res_mock.patch_client
def test_extend_smp_task_skip_small_size(self, client, mocked):
store_spec = {
'client': client,
'smp_name': 'lun_test_extend_smp_task',
'lun_size': 1
}
self.work_flow.add(vnx_taskflow.ExtendSMPTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
engine.run()
@res_mock.patch_client
def test_extend_smp_task_skip_thick(self, client, mocked):
store_spec = {
'client': client,
'smp_name': 'lun_test_extend_smp_task_skip_thick',
'lun_size': 100
}
self.work_flow.add(vnx_taskflow.ExtendSMPTask())
engine = taskflow.engines.load(self.work_flow,
store=store_spec)
engine.run()
|
{
"content_hash": "c37a548baf081995c1dddfe0dbd66b2b",
"timestamp": "",
"source": "github",
"line_count": 203,
"max_line_length": 74,
"avg_line_length": 37.05911330049261,
"alnum_prop": 0.5395453941246843,
"repo_name": "eharney/cinder",
"id": "559075c5f50512a24c0f076d40345191c79828d7",
"size": "8137",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "cinder/tests/unit/volume/drivers/dell_emc/vnx/test_taskflows.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "561"
},
{
"name": "Python",
"bytes": "19839107"
},
{
"name": "Shell",
"bytes": "6453"
}
],
"symlink_target": ""
}
|
from django import shortcuts
from horizon import forms
from horizon.test import helpers as test
class FormMixinTests(test.TestCase):
def _prepare_view(self, cls, request_headers, *args, **kwargs):
req = self.factory.get('/my_url/', **request_headers)
req.user = self.user
view = cls()
view.request = req
view.args = args
view.kwargs = kwargs
view.template_name = 'test_template'
# Note(Itxaka): ModalFormView requires a form_class to behave properly
view.form_class = TestForm
return view
def test_modal_form_mixin_hide_true_if_ajax(self):
view = self._prepare_view(
forms.views.ModalFormView,
dict(HTTP_X_REQUESTED_WITH='XMLHttpRequest'))
context = view.get_context_data()
self.assertTrue(context['hide'])
def test_modal_form_mixin_add_to_field_header_set(self):
return self._test_form_mixin_add_to_field_header(add_field=True)
def test_modal_form_mixin_add_to_field_header_not_set(self):
return self._test_form_mixin_add_to_field_header(add_field=False)
def _test_form_mixin_add_to_field_header(self, add_field=False):
options = dict(HTTP_X_REQUESTED_WITH='XMLHttpRequest')
if add_field:
options[forms.views.ADD_TO_FIELD_HEADER] = "keepme"
view = self._prepare_view(forms.views.ModalFormView, options)
context = view.get_context_data()
if add_field:
self.assertEqual("keepme", context['add_to_field'])
else:
self.assertNotIn('add_to_field', context)
def test_template_name_change_based_on_ajax_request(self):
view = self._prepare_view(
forms.views.ModalFormView,
dict(HTTP_X_REQUESTED_WITH='XMLHttpRequest'))
self.assertEqual('_' + view.template_name,
view.get_template_names())
view = self._prepare_view(forms.views.ModalFormView, {})
self.assertEqual(view.template_name, view.get_template_names())
class TestForm(forms.SelfHandlingForm):
name = forms.CharField(max_length=255)
def handle(self, request, data):
return True
class FormErrorTests(test.TestCase):
template = 'horizon/common/_form_fields.html'
def setUp(self):
super(FormErrorTests, self).setUp()
# Note(Itxaka): We pass data to the form so its bound and has the
# proper cleaned_data fields
self.form = TestForm(self.request, data={'fake': 'data'})
def _render_form(self):
return shortcuts.render(self.request, self.template,
{'form': self.form})
def test_set_warning(self):
warning_text = 'WARNING 29380'
self.form.set_warning(warning_text)
self.assertEqual([warning_text], self.form.warnings)
resp = self._render_form()
self.assertIn(warning_text.encode('utf-8'), resp.content)
def test_api_error(self):
error_text = 'ERROR 12938'
self.form.full_clean()
self.form.api_error(error_text)
self.assertEqual([error_text], self.form.non_field_errors())
resp = self._render_form()
self.assertIn(error_text.encode('utf-8'), resp.content)
|
{
"content_hash": "7e8da8e6d0972a4e053317315d584427",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 78,
"avg_line_length": 34.946236559139784,
"alnum_prop": 0.6313846153846154,
"repo_name": "NeCTAR-RC/horizon",
"id": "58539917a9067e8b40e84b10c4404f6a544ad626",
"size": "3825",
"binary": false,
"copies": "1",
"ref": "refs/heads/nectar/train",
"path": "horizon/test/unit/forms/test_forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "598098"
},
{
"name": "JavaScript",
"bytes": "2474550"
},
{
"name": "Python",
"bytes": "5323984"
},
{
"name": "SCSS",
"bytes": "132603"
},
{
"name": "Shell",
"bytes": "7466"
}
],
"symlink_target": ""
}
|
'''
tcp_message Inline Script Hook API Demonstration
------------------------------------------------
* modifies packets containing "foo" to "bar"
* prints various details for each packet.
example cmdline invocation:
mitmdump -T --host --tcp ".*" -q -s examples/tcp_message.py
'''
from netlib.utils import clean_bin
def tcp_message(ctx, tcp_msg):
modified_msg = tcp_msg.message.replace("foo", "bar")
is_modified = False if modified_msg == tcp_msg.message else True
tcp_msg.message = modified_msg
print("[tcp_message{}] from {} {} to {} {}:\r\n{}".format(
" (modified)" if is_modified else "",
"client" if tcp_msg.sender == tcp_msg.client_conn else "server",
tcp_msg.sender.address,
"server" if tcp_msg.receiver == tcp_msg.server_conn else "client",
tcp_msg.receiver.address, clean_bin(tcp_msg.message)))
|
{
"content_hash": "365b8cc6b646b180a71f75b7b5787c58",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 74,
"avg_line_length": 36.041666666666664,
"alnum_prop": 0.623121387283237,
"repo_name": "ParthGanatra/mitmproxy",
"id": "c63368e4fc0868f061baa4dd6b44669024983a5e",
"size": "865",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "examples/tcp_message.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "410"
},
{
"name": "CSS",
"bytes": "211484"
},
{
"name": "HTML",
"bytes": "57472"
},
{
"name": "JavaScript",
"bytes": "1755960"
},
{
"name": "Python",
"bytes": "1190792"
},
{
"name": "Shell",
"bytes": "3990"
}
],
"symlink_target": ""
}
|
from grab import DataNotFound, GrabMisuseError
import six
from test.util import build_grab
from test.util import BaseGrabTestCase
HTML = u"""
<head>
<title>фыва</title>
<meta http-equiv="Content-Type" content="text/html; charset=cp1251" />
</head>
<body>
<div id="bee">
<div class="wrapper">
# russian LA
<strong id="bee-strong">пче</strong><em id="bee-em">ла</em>
</div>
<script type="text/javascript">
mozilla = 777;
</script>
<style type="text/css">
body { color: green; }
</style>
</div>
<div id="fly">
# russian XA
<strong id="fly-strong">му\n</strong><em id="fly-em">ха</em>
</div>
<ul id="num">
<li id="num-1">item #100 2</li>
<li id="num-2">item #2</li>
</ul>
""".encode('cp1251')
class TextExtensionTest(BaseGrabTestCase):
def setUp(self):
self.server.reset()
# Create fake grab instance with fake response
self.g = build_grab()
self.g.fake_response(HTML, charset='cp1251')
def test_search(self):
self.assertTrue(self.g.search(u'фыва'.encode('cp1251'), byte=True))
self.assertTrue(self.g.search(u'фыва'))
self.assertFalse(self.g.search(u'фыва2'))
def test_search_usage_errors(self):
self.assertRaises(GrabMisuseError, self.g.search, u'фыва', byte=True)
anchor = 'фыва'
# py3 hack
if six.PY3:
anchor = anchor.encode('utf-8')
self.assertRaises(GrabMisuseError, self.g.search, anchor)
def test_assert_substring(self):
self.g.assert_substring(u'фыва')
self.g.assert_substring(u'фыва'.encode('cp1251'), byte=True)
self.assertRaises(DataNotFound, self.g.assert_substring, u'фыва2')
def test_assert_substrings(self):
self.g.assert_substrings((u'фыва',))
self.g.assert_substrings((u'фывы нет', u'фыва'))
self.g.assert_substrings((u'фыва'.encode('cp1251'), 'где ты фыва?'),
byte=True)
self.assertRaises(DataNotFound, self.g.assert_substrings,
(u'фыва, вернись', u'фыва-а-а-а'))
|
{
"content_hash": "375f0eb6481277f500c3d86684fe6de6",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 77,
"avg_line_length": 32.088235294117645,
"alnum_prop": 0.5857011915673694,
"repo_name": "liorvh/grab",
"id": "2c1139842e2cdef31bacb81f92a7a5c0b24f3a78",
"size": "2289",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "test/ext_text.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "5434"
},
{
"name": "Makefile",
"bytes": "910"
},
{
"name": "PostScript",
"bytes": "2788"
},
{
"name": "Python",
"bytes": "405459"
}
],
"symlink_target": ""
}
|
from flask import Blueprint
fit = Blueprint('fit', __name__, static_folder='static', template_folder='templates', url_prefix='/fitness')
import views
|
{
"content_hash": "8c720b755e2098e2fc3157f652252910",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 108,
"avg_line_length": 30.2,
"alnum_prop": 0.7350993377483444,
"repo_name": "SNET-Entrance/Entrance-UM",
"id": "364c838e5faf64cf55fccddbdda322839b780c95",
"size": "151",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/fit/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "803"
},
{
"name": "CSS",
"bytes": "390071"
},
{
"name": "HTML",
"bytes": "1750213"
},
{
"name": "PHP",
"bytes": "1684"
},
{
"name": "Python",
"bytes": "2215661"
},
{
"name": "Shell",
"bytes": "52"
}
],
"symlink_target": ""
}
|
"""
An example of using shards directly to construct the MagnaDoodle component,
using the PygameComponentShard as a base.
Generated code is in MagnaDoodle.py
"""
from PygameComponentShard import pygameComponentShard
# import shards and inline shards from these files
from ExampleMagnaShards import __INIT__
from ExampleMagnaShards import *
from ExampleInlineShards import *
from ExampleShards import *
from ModuleShard import moduleShard
from BranchShard import *
from LoopShard import *
# the event handling loop imported uses a method provided by its
# its superclass (which no longer exists), so reconstruct it here
# using shards (see LoopOverPygameEvents in ExampleInlineShards.py)
# construct mouse event handling switch
mousehandler = switchShard('mouseHandler', switchVar = 'event.type',
conditions = ['pygame.MOUSEBUTTONDOWN',
'pygame.MOUSEBUTTONUP',
'pygame.MOUSEMOTION'],
shards = [MOUSEBUTTONDOWN_handler,
MOUSEBUTTONUP_handler,
MOUSEMOTION_handler])
# wrap switch in loop that reads from inbox
pyeventloop = forShard(name = 'eventhandler', forVars = ['event'], inVar = r'self.recv("inbox")',
shards = [mousehandler])
# wrap event loop in inbox checking loop so that no invalid reads are performed
pyeventloop = whileShard(name = 'LoopOverPygameEvents', condition = r'self.dataReady("inbox")',
shards = [pyeventloop])
# shard list, contains mainly the imported shards
shards = [blitToSurface, waitBox, drawBG, addListenEvent, __INIT__,
SetEventOptions, ShutdownHandler, RequestDisplay, GrabDisplay,
pyeventloop] # replace previous eventloop here
# construct magnadoodle class from the above chassis
chassis = pygameComponentShard(name = "MagnaDoodle", shards = shards)
# wrap magna with the necessary imports
file = moduleShard("MagnaDoodle", importmodules = ['pygame', 'Axon'],
importfrom = {'Kamaelia.UI.PygameDisplay': ['PygameDisplay']},
shards = [chassis])
if __name__ == '__main__':
file.writeFile() #writes MagnaDoodle.py
# import from created file
from MagnaDoodle import *
MagnaDoodle(size=(800,600)).run()
|
{
"content_hash": "db214bcff30f6557450e90ffafcb2e7f",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 97,
"avg_line_length": 42.63934426229508,
"alnum_prop": 0.6093810073048828,
"repo_name": "bbc/kamaelia",
"id": "1ea6f8e71b62ab5532a413c1a88480cd1b964213",
"size": "3407",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "Sketches/TG/gui/MagnaGen.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "62985"
},
{
"name": "C",
"bytes": "212854"
},
{
"name": "C++",
"bytes": "327546"
},
{
"name": "CSS",
"bytes": "114434"
},
{
"name": "ChucK",
"bytes": "422"
},
{
"name": "Diff",
"bytes": "483"
},
{
"name": "Gettext Catalog",
"bytes": "3919909"
},
{
"name": "HTML",
"bytes": "1288960"
},
{
"name": "Java",
"bytes": "31832"
},
{
"name": "JavaScript",
"bytes": "829491"
},
{
"name": "Makefile",
"bytes": "5768"
},
{
"name": "NSIS",
"bytes": "18867"
},
{
"name": "PHP",
"bytes": "49059"
},
{
"name": "Perl",
"bytes": "31234"
},
{
"name": "Processing",
"bytes": "2885"
},
{
"name": "Pure Data",
"bytes": "7485482"
},
{
"name": "Python",
"bytes": "18896320"
},
{
"name": "Ruby",
"bytes": "4165"
},
{
"name": "Shell",
"bytes": "711244"
}
],
"symlink_target": ""
}
|
from TreeNode import TreeNode
import collections
class Solution(object):
# https: // discuss.leetcode.com / topic / 21363 / python - solutions - dfs - stack - bfs - queue - dfs - recursively
# dfs + stack
def sumNumbers1(self, root):
if not root:
return 0
stack, res = [(root, root.val)], 0
while stack:
node, value = stack.pop()
if node:
if not node.left and not node.right:
res += value
if node.right:
stack.append((node.right, value * 10 + node.right.val))
if node.left:
stack.append((node.left, value * 10 + node.left.val))
return res
# bfs + queue
def sumNumbers2(self, root):
if not root:
return 0
queue, res = collections.deque([(root, root.val)]), 0
while queue:
node, value = queue.popleft()
if node:
if not node.left and not node.right:
res += value
if node.left:
queue.append((node.left, value * 10 + node.left.val))
if node.right:
queue.append((node.right, value * 10 + node.right.val))
return res
# recursively
def sumNumbers(self, root):
self.res = 0
self.dfs(root, 0)
return self.res
def dfs(self, root, value):
if root:
# if not root.left and not root.right:
# self.res += value*10 + root.val
self.dfs(root.left, value * 10 + root.val)
# if not root.left and not root.right:
# self.res += value*10 + root.val
self.dfs(root.right, value * 10 + root.val)
if not root.left and not root.right:
self.res += value * 10 + root.val
|
{
"content_hash": "3303429b73c0d267d61973360ac7a4b4",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 117,
"avg_line_length": 35.76923076923077,
"alnum_prop": 0.5043010752688172,
"repo_name": "menghanY/LeetCode-Python",
"id": "dcd5489db82e3255b5b9dd5edfb1cfdcee836032",
"size": "1860",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tree/SumRootToLeafNumbers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "69719"
}
],
"symlink_target": ""
}
|
"""The tests for Lock device actions."""
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.lock import DOMAIN
from homeassistant.const import CONF_PLATFORM
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
async def test_get_actions_support_open(hass, device_reg, entity_reg):
"""Test we get the expected actions from a lock which supports open."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES["support_open"].unique_id,
device_id=device_entry.id,
)
expected_actions = [
{
"domain": DOMAIN,
"type": "lock",
"device_id": device_entry.id,
"entity_id": "lock.support_open_lock",
},
{
"domain": DOMAIN,
"type": "unlock",
"device_id": device_entry.id,
"entity_id": "lock.support_open_lock",
},
{
"domain": DOMAIN,
"type": "open",
"device_id": device_entry.id,
"entity_id": "lock.support_open_lock",
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_get_actions_not_support_open(hass, device_reg, entity_reg):
"""Test we get the expected actions from a lock which doesn't support open."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES["no_support_open"].unique_id,
device_id=device_entry.id,
)
expected_actions = [
{
"domain": DOMAIN,
"type": "lock",
"device_id": device_entry.id,
"entity_id": "lock.no_support_open_lock",
},
{
"domain": DOMAIN,
"type": "unlock",
"device_id": device_entry.id,
"entity_id": "lock.no_support_open_lock",
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_action(hass):
"""Test for lock actions."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event_lock"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "lock.entity",
"type": "lock",
},
},
{
"trigger": {"platform": "event", "event_type": "test_event_unlock"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "lock.entity",
"type": "unlock",
},
},
{
"trigger": {"platform": "event", "event_type": "test_event_open"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "lock.entity",
"type": "open",
},
},
]
},
)
lock_calls = async_mock_service(hass, "lock", "lock")
unlock_calls = async_mock_service(hass, "lock", "unlock")
open_calls = async_mock_service(hass, "lock", "open")
hass.bus.async_fire("test_event_lock")
await hass.async_block_till_done()
assert len(lock_calls) == 1
assert len(unlock_calls) == 0
assert len(open_calls) == 0
hass.bus.async_fire("test_event_unlock")
await hass.async_block_till_done()
assert len(lock_calls) == 1
assert len(unlock_calls) == 1
assert len(open_calls) == 0
hass.bus.async_fire("test_event_open")
await hass.async_block_till_done()
assert len(lock_calls) == 1
assert len(unlock_calls) == 1
assert len(open_calls) == 1
|
{
"content_hash": "928a0eeeaa777312cb580fbc27985341",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 88,
"avg_line_length": 32.311764705882354,
"alnum_prop": 0.548152193701074,
"repo_name": "postlund/home-assistant",
"id": "0fc98d9460e69debb1d1852e84c163b8648233b6",
"size": "5493",
"binary": false,
"copies": "7",
"ref": "refs/heads/dev",
"path": "tests/components/lock/test_device_action.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20215859"
},
{
"name": "Shell",
"bytes": "6663"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import unicode_literals
from functools import reduce
import six
from .const import LABEL_CONTAINER_NUMBER
from .const import LABEL_PROJECT
from .const import LABEL_SERVICE
class Container(object):
"""
Represents a Docker container, constructed from the output of
GET /containers/:id:/json.
"""
def __init__(self, client, dictionary, has_been_inspected=False):
self.client = client
self.dictionary = dictionary
self.has_been_inspected = has_been_inspected
self.log_stream = None
@classmethod
def from_ps(cls, client, dictionary, **kwargs):
"""
Construct a container object from the output of GET /containers/json.
"""
name = get_container_name(dictionary)
if name is None:
return None
new_dictionary = {
'Id': dictionary['Id'],
'Image': dictionary['Image'],
'Name': '/' + name,
}
return cls(client, new_dictionary, **kwargs)
@classmethod
def from_id(cls, client, id):
return cls(client, client.inspect_container(id), has_been_inspected=True)
@classmethod
def create(cls, client, **options):
response = client.create_container(**options)
return cls.from_id(client, response['Id'])
@property
def id(self):
return self.dictionary['Id']
@property
def image(self):
return self.dictionary['Image']
@property
def image_config(self):
return self.client.inspect_image(self.image)
@property
def short_id(self):
return self.id[:12]
@property
def name(self):
return self.dictionary['Name'][1:]
@property
def service(self):
return self.labels.get(LABEL_SERVICE)
@property
def name_without_project(self):
project = self.labels.get(LABEL_PROJECT)
if self.name.startswith('{0}_{1}'.format(project, self.service)):
return '{0}_{1}'.format(self.service, self.number)
else:
return self.name
@property
def number(self):
number = self.labels.get(LABEL_CONTAINER_NUMBER)
if not number:
raise ValueError("Container {0} does not have a {1} label".format(
self.short_id, LABEL_CONTAINER_NUMBER))
return int(number)
@property
def ports(self):
self.inspect_if_not_inspected()
return self.get('NetworkSettings.Ports') or {}
@property
def human_readable_ports(self):
def format_port(private, public):
if not public:
return private
return '{HostIp}:{HostPort}->{private}'.format(
private=private, **public[0])
return ', '.join(format_port(*item)
for item in sorted(six.iteritems(self.ports)))
@property
def labels(self):
return self.get('Config.Labels') or {}
@property
def stop_signal(self):
return self.get('Config.StopSignal')
@property
def log_config(self):
return self.get('HostConfig.LogConfig') or None
@property
def human_readable_state(self):
if self.is_paused:
return 'Paused'
if self.is_restarting:
return 'Restarting'
if self.is_running:
return 'Ghost' if self.get('State.Ghost') else 'Up'
else:
return 'Exit %s' % self.get('State.ExitCode')
@property
def human_readable_command(self):
entrypoint = self.get('Config.Entrypoint') or []
cmd = self.get('Config.Cmd') or []
return ' '.join(entrypoint + cmd)
@property
def environment(self):
def parse_env(var):
if '=' in var:
return var.split("=", 1)
return var, None
return dict(parse_env(var) for var in self.get('Config.Env') or [])
@property
def exit_code(self):
return self.get('State.ExitCode')
@property
def is_running(self):
return self.get('State.Running')
@property
def is_restarting(self):
return self.get('State.Restarting')
@property
def is_paused(self):
return self.get('State.Paused')
@property
def log_driver(self):
return self.get('HostConfig.LogConfig.Type')
@property
def has_api_logs(self):
log_type = self.log_driver
return not log_type or log_type != 'none'
def attach_log_stream(self):
"""A log stream can only be attached if the container uses a json-file
log driver.
"""
if self.has_api_logs:
self.log_stream = self.attach(stdout=True, stderr=True, stream=True)
def get(self, key):
"""Return a value from the container or None if the value is not set.
:param key: a string using dotted notation for nested dictionary
lookups
"""
self.inspect_if_not_inspected()
def get_value(dictionary, key):
return (dictionary or {}).get(key)
return reduce(get_value, key.split('.'), self.dictionary)
def get_local_port(self, port, protocol='tcp'):
port = self.ports.get("%s/%s" % (port, protocol))
return "{HostIp}:{HostPort}".format(**port[0]) if port else None
def get_mount(self, mount_dest):
for mount in self.get('Mounts'):
if mount['Destination'] == mount_dest:
return mount
return None
def start(self, **options):
return self.client.start(self.id, **options)
def stop(self, **options):
return self.client.stop(self.id, **options)
def pause(self, **options):
return self.client.pause(self.id, **options)
def unpause(self, **options):
return self.client.unpause(self.id, **options)
def kill(self, **options):
return self.client.kill(self.id, **options)
def restart(self, **options):
return self.client.restart(self.id, **options)
def remove(self, **options):
return self.client.remove_container(self.id, **options)
def create_exec(self, command, **options):
return self.client.exec_create(self.id, command, **options)
def start_exec(self, exec_id, **options):
return self.client.exec_start(exec_id, **options)
def rename_to_tmp_name(self):
"""Rename the container to a hopefully unique temporary container name
by prepending the short id.
"""
self.client.rename(
self.id,
'%s_%s' % (self.short_id, self.name)
)
def inspect_if_not_inspected(self):
if not self.has_been_inspected:
self.inspect()
def wait(self):
return self.client.wait(self.id)
def logs(self, *args, **kwargs):
return self.client.logs(self.id, *args, **kwargs)
def inspect(self):
self.dictionary = self.client.inspect_container(self.id)
self.has_been_inspected = True
return self.dictionary
def attach(self, *args, **kwargs):
return self.client.attach(self.id, *args, **kwargs)
def __repr__(self):
return '<Container: %s (%s)>' % (self.name, self.id[:6])
def __eq__(self, other):
if type(self) != type(other):
return False
return self.id == other.id
def __hash__(self):
return self.id.__hash__()
def get_container_name(container):
if not container.get('Name') and not container.get('Names'):
return None
# inspect
if 'Name' in container:
return container['Name']
# ps
shortest_name = min(container['Names'], key=lambda n: len(n.split('/')))
return shortest_name.split('/')[-1]
|
{
"content_hash": "964d9f0572ab5cc328be07ae94d50a7b",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 81,
"avg_line_length": 28.488970588235293,
"alnum_prop": 0.5910440056781521,
"repo_name": "andrewgee/compose",
"id": "2c16863df9561d4162e8a646f2b6fd291a97ac3a",
"size": "7749",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "compose/container.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "2600"
},
{
"name": "Python",
"bytes": "628075"
},
{
"name": "Shell",
"bytes": "25585"
}
],
"symlink_target": ""
}
|
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'rapidsms-celery-router'
copyright = u'2012, RapidSMS development community'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'rapidsms-celery-routerdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'rapidsms-celery-router.tex', u'rapidsms-celery-router Documentation',
u'RapidSMS development community', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'rapidsms-celery-router', u'rapidsms-celery-router Documentation',
[u'RapidSMS development community'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'rapidsms-celery-router', u'rapidsms-celery-router Documentation',
u'RapidSMS development community', 'rapidsms-celery-router', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
{
"content_hash": "a4375734afb039d9f84f8f7b2dd485ce",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 99,
"avg_line_length": 32.81659388646288,
"alnum_prop": 0.7060545575515635,
"repo_name": "rapidsms/rapidsms-celery-router",
"id": "e179adeae5b02e9803ef005c7637dcbf7eca814f",
"size": "7948",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "14369"
}
],
"symlink_target": ""
}
|
import testtools
from tempest.api.compute import base
from tempest import config
from tempest import test
CONF = config.CONF
class LiveBlockMigrationTestJSON(base.BaseV2ComputeAdminTest):
_host_key = 'OS-EXT-SRV-ATTR:host'
@classmethod
def setup_clients(cls):
super(LiveBlockMigrationTestJSON, cls).setup_clients()
cls.admin_hosts_client = cls.os_adm.hosts_client
cls.admin_servers_client = cls.os_adm.servers_client
@classmethod
def resource_setup(cls):
super(LiveBlockMigrationTestJSON, cls).resource_setup()
cls.created_server_ids = []
def _get_compute_hostnames(self):
body = self.admin_hosts_client.list_hosts()
return [
host_record['host_name']
for host_record in body
if host_record['service'] == 'compute'
]
def _get_server_details(self, server_id):
body = self.admin_servers_client.get_server(server_id)
return body
def _get_host_for_server(self, server_id):
return self._get_server_details(server_id)[self._host_key]
def _migrate_server_to(self, server_id, dest_host):
body = self.admin_servers_client.live_migrate_server(
server_id, dest_host,
CONF.compute_feature_enabled.block_migration_for_live_migration)
return body
def _get_host_other_than(self, host):
for target_host in self._get_compute_hostnames():
if host != target_host:
return target_host
def _get_server_status(self, server_id):
return self._get_server_details(server_id)['status']
def _get_an_active_server(self):
for server_id in self.created_server_ids:
if 'ACTIVE' == self._get_server_status(server_id):
return server_id
else:
server = self.create_test_server(wait_until="ACTIVE")
server_id = server['id']
self.created_server_ids.append(server_id)
return server_id
def _volume_clean_up(self, server_id, volume_id):
body = self.volumes_client.get_volume(volume_id)
if body['status'] == 'in-use':
self.servers_client.detach_volume(server_id, volume_id)
self.volumes_client.wait_for_volume_status(volume_id, 'available')
self.volumes_client.delete_volume(volume_id)
def _test_live_block_migration(self, state='ACTIVE'):
"""Tests live block migration between two hosts.
Requires CONF.compute_feature_enabled.live_migration to be True.
:param state: The vm_state the migrated server should be in before and
after the live migration. Supported values are 'ACTIVE'
and 'PAUSED'.
"""
# Live block migrate an instance to another host
if len(self._get_compute_hostnames()) < 2:
raise self.skipTest(
"Less than 2 compute nodes, skipping migration test.")
server_id = self._get_an_active_server()
actual_host = self._get_host_for_server(server_id)
target_host = self._get_host_other_than(actual_host)
if state == 'PAUSED':
self.admin_servers_client.pause_server(server_id)
self.admin_servers_client.wait_for_server_status(server_id, state)
self._migrate_server_to(server_id, target_host)
self.servers_client.wait_for_server_status(server_id, state)
self.assertEqual(target_host, self._get_host_for_server(server_id))
@test.idempotent_id('1dce86b8-eb04-4c03-a9d8-9c1dc3ee0c7b')
@testtools.skipUnless(CONF.compute_feature_enabled.live_migration,
'Live migration not available')
def test_live_block_migration(self):
self._test_live_block_migration()
@test.idempotent_id('1e107f21-61b2-4988-8f22-b196e938ab88')
@testtools.skipUnless(CONF.compute_feature_enabled.live_migration,
'Live migration not available')
@testtools.skipUnless(CONF.compute_feature_enabled.pause,
'Pause is not available.')
@testtools.skipUnless(CONF.compute_feature_enabled
.live_migrate_paused_instances,
'Live migration of paused instances is not '
'available.')
def test_live_block_migration_paused(self):
self._test_live_block_migration(state='PAUSED')
@test.idempotent_id('e19c0cc6-6720-4ed8-be83-b6603ed5c812')
@testtools.skipIf(not CONF.compute_feature_enabled.live_migration or not
CONF.compute_feature_enabled.
block_migration_for_live_migration,
'Block Live migration not available')
@testtools.skipIf(not CONF.compute_feature_enabled.
block_migrate_cinder_iscsi,
'Block Live migration not configured for iSCSI')
def test_iscsi_volume(self):
# Live block migrate an instance to another host
if len(self._get_compute_hostnames()) < 2:
raise self.skipTest(
"Less than 2 compute nodes, skipping migration test.")
server_id = self._get_an_active_server()
actual_host = self._get_host_for_server(server_id)
target_host = self._get_host_other_than(actual_host)
volume = self.volumes_client.create_volume(display_name='test')
self.volumes_client.wait_for_volume_status(volume['id'],
'available')
self.addCleanup(self._volume_clean_up, server_id, volume['id'])
# Attach the volume to the server
self.servers_client.attach_volume(server_id, volume['id'],
device='/dev/xvdb')
self.volumes_client.wait_for_volume_status(volume['id'], 'in-use')
self._migrate_server_to(server_id, target_host)
self.servers_client.wait_for_server_status(server_id, 'ACTIVE')
self.assertEqual(target_host, self._get_host_for_server(server_id))
|
{
"content_hash": "d2700c6130f8315182c2a64719582604",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 78,
"avg_line_length": 42.05555555555556,
"alnum_prop": 0.6213672391017173,
"repo_name": "danielmellado/tempest",
"id": "d3b1f5e7fe21670d0336c6d33694d922bb00a49e",
"size": "6693",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tempest/api/compute/admin/test_live_migration.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2724850"
},
{
"name": "Shell",
"bytes": "8560"
}
],
"symlink_target": ""
}
|
from . import cpuinfo, progressbar
from .misc import *
from .time import *
|
{
"content_hash": "2ab4103f1304549b1f277c21ba9fbe21",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 34,
"avg_line_length": 25,
"alnum_prop": 0.7466666666666667,
"repo_name": "MAndelkovic/pybinding",
"id": "2aad9ea62f7bd0dd1af7793fc0ad1ee08a4f9cc3",
"size": "75",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pybinding/utils/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C++",
"bytes": "427572"
},
{
"name": "CMake",
"bytes": "12854"
},
{
"name": "Cuda",
"bytes": "10135"
},
{
"name": "Python",
"bytes": "332184"
},
{
"name": "Shell",
"bytes": "431"
}
],
"symlink_target": ""
}
|
import functools
import fixtures
import netaddr
from neutron_lib.api.definitions import portbindings
from neutron_lib import constants
from neutronclient.common import exceptions
from neutron.common import utils
def _safe_method(f):
@functools.wraps(f)
def delete(*args, **kwargs):
try:
return f(*args, **kwargs)
except exceptions.NotFound:
pass
return delete
class ClientFixture(fixtures.Fixture):
"""Manage and cleanup neutron resources."""
def __init__(self, client):
super(ClientFixture, self).__init__()
self.client = client
def _create_resource(self, resource_type, spec):
create = getattr(self.client, 'create_%s' % resource_type)
delete = getattr(self.client, 'delete_%s' % resource_type)
body = {resource_type: spec}
resp = create(body=body)
data = resp[resource_type]
self.addCleanup(_safe_method(delete), data['id'])
return data
def _update_resource(self, resource_type, id, spec):
update = getattr(self.client, 'update_%s' % resource_type)
body = {resource_type: spec}
resp = update(id, body=body)
return resp[resource_type]
def _delete_resource(self, resource_type, id):
delete = getattr(self.client, 'delete_%s' % resource_type)
return delete(id)
def create_router(self, tenant_id, name=None, ha=False,
external_network=None):
resource_type = 'router'
name = name or utils.get_rand_name(prefix=resource_type)
spec = {'tenant_id': tenant_id, 'name': name, 'ha': ha}
if external_network:
spec['external_gateway_info'] = {"network_id": external_network}
return self._create_resource(resource_type, spec)
def create_network(self, tenant_id, name=None, external=False,
network_type=None, segmentation_id=None,
physical_network=None, mtu=None):
resource_type = 'network'
name = name or utils.get_rand_name(prefix=resource_type)
spec = {'tenant_id': tenant_id, 'name': name}
spec['router:external'] = external
if segmentation_id is not None:
spec['provider:segmentation_id'] = segmentation_id
if network_type is not None:
spec['provider:network_type'] = network_type
if physical_network is not None:
spec['provider:physical_network'] = physical_network
if mtu is not None:
spec['mtu'] = mtu
return self._create_resource(resource_type, spec)
def update_network(self, id, **kwargs):
return self._update_resource('network', id, kwargs)
def delete_network(self, id):
return self._delete_resource('network', id)
def create_subnet(self, tenant_id, network_id,
cidr, gateway_ip=None, name=None, enable_dhcp=True,
ipv6_address_mode='slaac', ipv6_ra_mode='slaac'):
resource_type = 'subnet'
name = name or utils.get_rand_name(prefix=resource_type)
ip_version = netaddr.IPNetwork(cidr).version
spec = {'tenant_id': tenant_id, 'network_id': network_id, 'name': name,
'cidr': cidr, 'enable_dhcp': enable_dhcp,
'ip_version': ip_version}
if ip_version == constants.IP_VERSION_6:
spec['ipv6_address_mode'] = ipv6_address_mode
spec['ipv6_ra_mode'] = ipv6_ra_mode
if gateway_ip:
spec['gateway_ip'] = gateway_ip
return self._create_resource(resource_type, spec)
def list_ports(self, retrieve_all=True, **kwargs):
resp = self.client.list_ports(retrieve_all=retrieve_all, **kwargs)
return resp['ports']
def create_port(self, tenant_id, network_id, hostname=None,
qos_policy_id=None, security_groups=None, **kwargs):
spec = {
'network_id': network_id,
'tenant_id': tenant_id,
}
spec.update(kwargs)
if hostname is not None:
spec[portbindings.HOST_ID] = hostname
if qos_policy_id:
spec['qos_policy_id'] = qos_policy_id
if security_groups:
spec['security_groups'] = security_groups
return self._create_resource('port', spec)
def update_port(self, port_id, **kwargs):
return self._update_resource('port', port_id, kwargs)
def create_floatingip(self, tenant_id, floating_network_id,
fixed_ip_address, port_id):
spec = {
'floating_network_id': floating_network_id,
'tenant_id': tenant_id,
'fixed_ip_address': fixed_ip_address,
'port_id': port_id
}
return self._create_resource('floatingip', spec)
def add_router_interface(self, router_id, subnet_id):
body = {'subnet_id': subnet_id}
router_interface_info = self.client.add_interface_router(
router=router_id, body=body)
self.addCleanup(_safe_method(self.client.remove_interface_router),
router=router_id, body=body)
return router_interface_info
def create_qos_policy(self, tenant_id, name, description, shared,
is_default):
policy = self.client.create_qos_policy(
body={'policy': {'name': name,
'description': description,
'shared': shared,
'tenant_id': tenant_id,
'is_default': is_default}})
def detach_and_delete_policy():
qos_policy_id = policy['policy']['id']
ports_with_policy = self.client.list_ports()['ports']
for port in ports_with_policy:
if qos_policy_id == port['qos_policy_id']:
self.client.update_port(
port['id'],
body={'port': {'qos_policy_id': None}})
self.client.delete_qos_policy(qos_policy_id)
# NOTE: We'll need to add support for detaching from network once
# create_network() supports qos_policy_id.
self.addCleanup(_safe_method(detach_and_delete_policy))
return policy['policy']
def create_bandwidth_limit_rule(self, tenant_id, qos_policy_id, limit=None,
burst=None, direction=None):
rule = {'tenant_id': tenant_id}
if limit:
rule['max_kbps'] = limit
if burst:
rule['max_burst_kbps'] = burst
if direction:
rule['direction'] = direction
rule = self.client.create_bandwidth_limit_rule(
policy=qos_policy_id,
body={'bandwidth_limit_rule': rule})
self.addCleanup(_safe_method(self.client.delete_bandwidth_limit_rule),
rule['bandwidth_limit_rule']['id'],
qos_policy_id)
return rule['bandwidth_limit_rule']
def create_dscp_marking_rule(self, tenant_id, qos_policy_id, dscp_mark=0):
rule = {'tenant_id': tenant_id}
if dscp_mark:
rule['dscp_mark'] = dscp_mark
rule = self.client.create_dscp_marking_rule(
policy=qos_policy_id,
body={'dscp_marking_rule': rule})
self.addCleanup(_safe_method(self.client.delete_dscp_marking_rule),
rule['dscp_marking_rule']['id'],
qos_policy_id)
return rule['dscp_marking_rule']
def create_trunk(self, tenant_id, port_id, name=None,
admin_state_up=None, sub_ports=None):
"""Create a trunk via API.
:param tenant_id: ID of the tenant.
:param port_id: Parent port of trunk.
:param name: Name of the trunk.
:param admin_state_up: Admin state of the trunk.
:param sub_ports: List of subport dictionaries in format
{'port_id': <ID of neutron port for subport>,
'segmentation_type': 'vlan',
'segmentation_id': <VLAN tag>}
:return: Dictionary with trunk's data returned from Neutron API.
"""
spec = {
'port_id': port_id,
'tenant_id': tenant_id,
}
if name is not None:
spec['name'] = name
if sub_ports is not None:
spec['sub_ports'] = sub_ports
if admin_state_up is not None:
spec['admin_state_up'] = admin_state_up
trunk = self.client.create_trunk({'trunk': spec})['trunk']
if sub_ports:
self.addCleanup(
_safe_method(self.trunk_remove_subports),
tenant_id, trunk['id'], trunk['sub_ports'])
self.addCleanup(_safe_method(self.client.delete_trunk), trunk['id'])
return trunk
def trunk_add_subports(self, tenant_id, trunk_id, sub_ports):
"""Add subports to the trunk.
:param tenant_id: ID of the tenant.
:param trunk_id: ID of the trunk.
:param sub_ports: List of subport dictionaries to be added in format
{'port_id': <ID of neutron port for subport>,
'segmentation_type': 'vlan',
'segmentation_id': <VLAN tag>}
"""
spec = {
'tenant_id': tenant_id,
'sub_ports': sub_ports,
}
trunk = self.client.trunk_add_subports(trunk_id, spec)
sub_ports_to_remove = [
sub_port for sub_port in trunk['sub_ports']
if sub_port in sub_ports]
self.addCleanup(
_safe_method(self.trunk_remove_subports), tenant_id, trunk_id,
sub_ports_to_remove)
def trunk_remove_subports(self, tenant_id, trunk_id, sub_ports):
"""Remove subports from the trunk.
:param trunk_id: ID of the trunk.
:param sub_ports: List of subport port IDs.
"""
spec = {
'tenant_id': tenant_id,
'sub_ports': sub_ports,
}
return self.client.trunk_remove_subports(trunk_id, spec)
def create_security_group(self, tenant_id, name=None):
resource_type = 'security_group'
name = name or utils.get_rand_name(prefix=resource_type)
spec = {'tenant_id': tenant_id, 'name': name}
return self._create_resource(resource_type, spec)
def create_security_group_rule(self, tenant_id, security_group_id,
**kwargs):
resource_type = 'security_group_rule'
spec = {'tenant_id': tenant_id,
'security_group_id': security_group_id}
spec.update(kwargs)
return self._create_resource(resource_type, spec)
def create_network_log(self, tenant_id, resource_type,
enabled=True, **kwargs):
spec = {'project_id': tenant_id,
'resource_type': resource_type,
'enabled': enabled}
spec.update(kwargs)
net_log = self.client.create_network_log({'log': spec})
self.addCleanup(
_safe_method(self.client.delete_network_log), net_log['log']['id'])
return net_log
|
{
"content_hash": "9cc6971163a49a449272c8e2f4fd89c1",
"timestamp": "",
"source": "github",
"line_count": 306,
"max_line_length": 79,
"avg_line_length": 36.55882352941177,
"alnum_prop": 0.5669080182354519,
"repo_name": "noironetworks/neutron",
"id": "441c3e506617446b34eca7e8ff6b20c3a64fc283",
"size": "11804",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neutron/tests/fullstack/resources/client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "11420614"
},
{
"name": "Shell",
"bytes": "38791"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'GovInfoScraper'
db.create_table(u'scrapers_govinfoscraper', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('bill_name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('bill_code', self.gf('django.db.models.fields.CharField')(max_length=10)),
('comment_startdate', self.gf('django.db.models.fields.DateField')()),
('comment_enddate', self.gf('django.db.models.fields.DateField')()),
('scrape_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('reviewed', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal(u'scrapers', ['GovInfoScraper'])
# Adding model 'ParliamentMinutesScraper'
db.create_table(u'scrapers_parliamentminutesscraper', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('filename', self.gf('django.db.models.fields.files.FileField')(max_length=100)),
('house', self.gf('django.db.models.fields.CharField')(max_length=20)),
('language', self.gf('django.db.models.fields.CharField')(max_length=20)),
('date', self.gf('django.db.models.fields.DateField')()),
('scrape_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('url', self.gf('django.db.models.fields.URLField')(max_length=200)),
))
db.send_create_signal(u'scrapers', ['ParliamentMinutesScraper'])
def backwards(self, orm):
# Deleting model 'GovInfoScraper'
db.delete_table(u'scrapers_govinfoscraper')
# Deleting model 'ParliamentMinutesScraper'
db.delete_table(u'scrapers_parliamentminutesscraper')
models = {
u'scrapers.govinfoscraper': {
'Meta': {'object_name': 'GovInfoScraper'},
'bill_code': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'bill_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'comment_enddate': ('django.db.models.fields.DateField', [], {}),
'comment_startdate': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reviewed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'scrape_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'scrapers.parliamentminutesscraper': {
'Meta': {'object_name': 'ParliamentMinutesScraper'},
'date': ('django.db.models.fields.DateField', [], {}),
'filename': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'house': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'scrape_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
}
}
complete_apps = ['scrapers']
|
{
"content_hash": "82d3a480db0c5c8b9561f2ab760802c3",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 116,
"avg_line_length": 55.61764705882353,
"alnum_prop": 0.5978318350079324,
"repo_name": "adieyal/billtracker",
"id": "a0ab5406e2e2ea44a322561198b442f690516fe7",
"size": "3806",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/billtracker/scrapers/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "81095"
}
],
"symlink_target": ""
}
|
import json
from oauth2client import xsrfutil
from oauth2client.file import Storage
from django.conf import settings
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.http import HttpResponseBadRequest, HttpResponseRedirect, Http404
from django.utils.translation import ugettext as _
from django.views.generic import TemplateView, View, UpdateView
from conf import settings as ls
class AppOptionView(TemplateView):
template_name = 'admin/options.html'
def dispatch(self, request, *args, **kwargs):
if not request.user.has_perm('yawdadmin.change_appoption'):
raise PermissionDenied
return super(AppOptionView, self).dispatch(request, *args, **kwargs)
def get_form_kwargs(self):
"""
Returns the keyword arguments for instantiating the
form. Copied form the generic FormView class-based view
"""
kwargs = {}
if self.request.method in ('POST', 'PUT'):
kwargs.update({
'data':self.request.POST,
'files':self.request.FILES,
})
return kwargs
def get_context_data(self, **kwargs):
from yawdadmin import admin_site
context = super(AppOptionView, self).get_context_data(**kwargs)
context['optionset_admin'] = admin_site.get_optionset_admin(self.kwargs['optionset_label'])(**self.get_form_kwargs())
context['title'] = '%s' % (unicode(context['optionset_admin'].verbose_name))
return context
def post(self, request, *args, **kwargs):
"""
Validate the form and save the options upon success
"""
context = self.get_context_data()
if context['optionset_admin'].form.is_valid():
context['optionset_admin'].save()
messages.add_message(self.request, messages.SUCCESS, _('The options were succesfully saved.'))
return self.render_to_response(context)
def put(self, request, *args, **kwargs):
return self.post(request, *args, **kwargs)
class AnalyticsAuthView(View):
"""
This view implements the oauth2 authentication callback.
It stores the user credential to a file and redirects the user
to the admin index page on success.
"""
permanent = False
def get(self, request, *args, **kwargs):
#check view
valid_analytics_view(request)
if not ('state' in request.REQUEST and xsrfutil.validate_token(settings.SECRET_KEY, request.REQUEST['state'], request.user)):
return HttpResponseBadRequest()
credential = ls.ADMIN_GOOGLE_ANALYTICS_FLOW.step2_exchange(request.REQUEST) #@UndefinedVariable
storage = Storage(ls.ADMIN_GOOGLE_ANALYTICS['token_file_name'])
storage.put(credential)
messages.add_message(self.request, messages.SUCCESS, _('The user was successfully connected.'))
return HttpResponseRedirect(reverse('admin:analytics'))
class AnalyticsConfigView(TemplateView):
"""
Admin view for the google analytics functionality. The view is
accessible through the top bar navigation.
"""
template_name = 'admin/analytics.html'
def get_context_data(self, **kwargs):
#check view
valid_analytics_view(self.request)
#get original context data
context = super(AnalyticsConfigView, self).get_context_data(**kwargs)
#load the token file
try:
dat_file = open(ls.ADMIN_GOOGLE_ANALYTICS['token_file_name'], 'r')
analytics = json.loads(dat_file.read())
dat_file.close()
except (IOError, ValueError):
analytics = {}
context['analytics_info'] = {
'profile' : ls.ADMIN_GOOGLE_ANALYTICS['profile_id'],
'interval' : ls.ADMIN_GOOGLE_ANALYTICS['interval'],
'data' : analytics
}
return context
class AnalyticsConnectView(View):
"""
Connect a new user to the Google Analytics API
"""
def get(self, request, *args, **kwargs):
#check view
valid_analytics_view(request)
try:
#Empty the token file
dat_file = open(ls.ADMIN_GOOGLE_ANALYTICS['token_file_name'], 'w+')
dat_file.write('')
dat_file.close()
except:
messages.add_message(self.request, messages.ERROR, _('The server does not have permissions to write to the token file. Please contact your system administrator.'))
return HttpResponseRedirect(reverse('admin:analytics'))
#Initialize flow
ls.ADMIN_GOOGLE_ANALYTICS_FLOW.params['state'] = xsrfutil.generate_token(settings.SECRET_KEY, request.user) #@UndefinedVariable
return HttpResponseRedirect(ls.ADMIN_GOOGLE_ANALYTICS_FLOW.step1_get_authorize_url()) #@UndefinedVariable
def valid_analytics_view(request):
"""
Check if the user is superuser and analytics functionality is enabled.
"""
if not request.user.is_superuser:
raise PermissionDenied
if not ls.ADMIN_GOOGLE_ANALYTICS_FLOW:
raise Http404
class MyAccountView(UpdateView):
template_name = 'registration/my_account.html'
form_class = ls.ADMIN_USER_MODELFORM
def __init__(self, *args, **kwargs):
super(MyAccountView, self).__init__(*args, **kwargs)
self.success_url = reverse('admin:my-account')
def form_valid(self, form):
messages.add_message(self.request, messages.SUCCESS,
_('Your account has been updated successfuly.'))
return super(MyAccountView, self).form_valid(form)
def get_object(self):
return self.request.user
def get_context_data(self, **kwargs):
context = super(MyAccountView, self).get_context_data(**kwargs)
context['title'] = _('My account')
return context
|
{
"content_hash": "09354099afac1a319e41adebb405505d",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 175,
"avg_line_length": 37.298136645962735,
"alnum_prop": 0.6406328059950042,
"repo_name": "GDGLima/contentbox",
"id": "cd07dc6a48bf6ef60115051821a43b240e70782c",
"size": "6005",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "third_party/yawdadmin/views.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "420520"
},
{
"name": "HTML",
"bytes": "54100"
},
{
"name": "JavaScript",
"bytes": "1778"
},
{
"name": "Python",
"bytes": "49359"
},
{
"name": "Ruby",
"bytes": "413"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Weapon()
result.template = "object/weapon/melee/sword/shared_sword_lightsaber_sleekblack.iff"
result.attribute_template_id = 10
result.stfName("weapon_name","sword_lightsaber_sleekblack")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "af00ee36805b243052a9462cff7e98f4",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 85,
"avg_line_length": 25.615384615384617,
"alnum_prop": 0.7177177177177178,
"repo_name": "anhstudios/swganh",
"id": "8e12fd69ecb4cb1f27461df8c98894cf978f3c25",
"size": "478",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/weapon/melee/sword/shared_sword_lightsaber_sleekblack.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
}
|
import os
import json
from http import client
from unittest import mock
import pytest
import aiohttpretty
from waterbutler.core import metadata
from waterbutler.core import exceptions
from waterbutler.core.path import WaterButlerPath
from waterbutler.providers.osfstorage.provider import OSFStorageProvider
from waterbutler.providers.osfstorage.metadata import (OsfStorageFileMetadata,
OsfStorageFolderMetadata,
OsfStorageRevisionMetadata)
from waterbutler.providers.osfstorage.exceptions import OsfStorageQuotaExceededError
from tests import utils
from tests.providers.osfstorage.fixtures import (auth, credentials, settings,
settings_region_one, settings_region_two,
provider_one, provider_two,
provider_and_mock_one, provider_and_mock_two,
file_stream, file_like, file_content,
file_lineage, file_metadata,
file_metadata_object, file_path,
folder_lineage, folder_metadata,
folder_children_metadata, folder_path,
revisions_metadata, revision_metadata_object,
download_response, download_path,
upload_response, upload_path, root_path,
mock_time, mock_inner_provider,)
def build_signed_url_without_auth(provider, method, *segments, **params):
data = params.pop('data', None)
base_url = provider.build_url(*segments, **params)
url, _, params = provider.build_signed_url(method, base_url, data=data)
return url, params
def build_signed_url_with_auth(provider, method, *segments, **params):
data = params.pop('data', None)
base_url = provider.build_url(*segments, **params)
url, _, params = provider.build_signed_url(method,
base_url,
data=data,
params={'user': provider.auth['id']})
return url, params
class TestCreateFolder:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_create_folder(self, folder_path, provider_one, folder_metadata, mock_time):
data = json.dumps(folder_metadata)
url, params = build_signed_url_without_auth(provider_one, 'POST',
folder_path.parent.identifier,
'children', data=data)
aiohttpretty.register_json_uri('POST', url, body=folder_metadata, status=201, params=params)
resp = await provider_one.create_folder(folder_path)
assert isinstance(resp, OsfStorageFolderMetadata)
class TestDownload:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download_with_auth(self, provider_and_mock_one, download_response,
download_path, mock_time):
provider, inner_provider = provider_and_mock_one
uri, params = build_signed_url_with_auth(provider, 'GET', download_path.identifier,
'download', version=None, mode=None)
aiohttpretty.register_json_uri('GET', uri, body=download_response, params=params)
await provider.download(download_path)
assert provider.make_provider.called
assert inner_provider.download.called
assert aiohttpretty.has_call(method='GET', uri=uri, params=params)
provider.make_provider.assert_called_once_with(download_response['settings'])
expected_path = WaterButlerPath('/' + download_response['data']['path'])
expected_display_name = download_response['data']['name']
inner_provider.download.assert_called_once_with(path=expected_path,
display_name=expected_display_name)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download_without_auth(self, provider_and_mock_one, download_response,
download_path, mock_time):
provider, inner_provider = provider_and_mock_one
provider.auth = {}
url, params = build_signed_url_without_auth(provider, 'GET', download_path.identifier,
'download', version=None, mode=None)
aiohttpretty.register_json_uri('GET', url, params=params, body=download_response)
await provider.download(download_path)
assert provider.make_provider.called
assert inner_provider.download.called
assert aiohttpretty.has_call(method='GET', uri=url, params=params)
provider.make_provider.assert_called_once_with(download_response['settings'])
expected_path = WaterButlerPath('/' + download_response['data']['path'])
expected_display_name = download_response['data']['name']
inner_provider.download.assert_called_once_with(path=expected_path,
display_name=expected_display_name)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download_without_id(self, provider_one, download_response, file_path,
mock_time):
url, params = build_signed_url_without_auth(provider_one, 'GET', file_path.identifier,
'download', version=None, mode=None)
aiohttpretty.register_json_uri('GET', url, params=params, body=download_response)
file_path._parts[-1]._id = None
with pytest.raises(exceptions.NotFoundError):
await provider_one.download(file_path)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
@pytest.mark.parametrize("display_name_arg,expected_name", [
('meow.txt', 'meow.txt'),
('', 'doc.rst'),
(None, 'doc.rst'),
])
async def test_download_with_display_name(self, provider_and_mock_one, download_response,
download_path, mock_time, display_name_arg,
expected_name):
provider, inner_provider = provider_and_mock_one
uri, params = build_signed_url_with_auth(provider, 'GET', download_path.identifier,
'download', version=None, mode=None)
aiohttpretty.register_json_uri('GET', uri, body=download_response, params=params)
await provider.download(download_path, display_name=display_name_arg)
assert provider.make_provider.called
assert inner_provider.download.called
assert aiohttpretty.has_call(method='GET', uri=uri, params=params)
provider.make_provider.assert_called_once_with(download_response['settings'])
expected_path = WaterButlerPath('/' + download_response['data']['path'])
inner_provider.download.assert_called_once_with(path=expected_path,
display_name=expected_name)
class TestDelete:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_delete(self, provider_one, file_path, mock_time):
url, params = build_signed_url_with_auth(provider_one, 'DELETE', file_path.identifier)
aiohttpretty.register_uri('DELETE', url, status_code=200, params=params)
await provider_one.delete(file_path)
assert aiohttpretty.has_call(method='DELETE', uri=url, check_params=False)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_delete_without_id(self, provider_one, file_path, mock_time):
url, params = build_signed_url_without_auth(provider_one, 'DELETE', file_path.identifier)
aiohttpretty.register_uri('DELETE', url, status_code=200)
file_path._parts[-1]._id = None
with pytest.raises(exceptions.NotFoundError):
await provider_one.delete(file_path)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_delete_root(self, provider_one, root_path, mock_time):
provider_one._delete_folder_contents = utils.MockCoroutine()
url, params = build_signed_url_without_auth(provider_one, 'DELETE', root_path.identifier)
aiohttpretty.register_uri('DELETE', url, status_code=200)
with pytest.raises(exceptions.DeleteError):
await provider_one.delete(root_path)
provider_one._delete_folder_contents.assert_not_called()
await provider_one.delete(root_path, confirm_delete=1)
provider_one._delete_folder_contents.assert_called_once_with(root_path)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_delete_folder_contents(self, provider_one, file_path, folder_path,
folder_children_metadata, mock_time):
provider_one.validate_path = utils.MockCoroutine(return_value=file_path)
provider_one.delete = utils.MockCoroutine()
children_url, params = build_signed_url_without_auth(provider_one, 'GET',
folder_path.identifier, 'children',
user_id=provider_one.auth['id'])
aiohttpretty.register_json_uri('GET', children_url, params=params, status=200,
body=folder_children_metadata)
await provider_one._delete_folder_contents(folder_path)
provider_one.delete.assert_called_with(file_path)
assert provider_one.delete.call_count == 4
class TestMetadata:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_provider_metadata_empty(self, provider_one, folder_path, mock_time):
url, params = build_signed_url_without_auth(provider_one, 'GET', folder_path.identifier,
'children', user_id=provider_one.auth['id'])
aiohttpretty.register_json_uri('GET', url, params=params, status_code=200, body=[])
res = await provider_one.metadata(folder_path)
assert res == []
assert aiohttpretty.has_call(method='GET', uri=url, params=params)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_provider_metadata_folder(self, provider_one, folder_path,
folder_children_metadata, mock_time):
url, params = build_signed_url_without_auth(provider_one, 'GET', folder_path.identifier,
'children', user_id=provider_one.auth['id'])
aiohttpretty.register_json_uri('GET', url, params=params, status=200,
body=folder_children_metadata)
res = await provider_one.metadata(folder_path)
assert isinstance(res, list)
for item in res:
assert isinstance(item, metadata.BaseMetadata)
assert item.name is not None
assert item.path is not None
assert item.provider == 'osfstorage'
assert aiohttpretty.has_call(method='GET', uri=url, params=params)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_provider_metadata_file(self, provider_one, file_path, file_metadata, mock_time):
url, params = build_signed_url_without_auth(provider_one, 'GET', file_path.identifier)
aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=file_metadata)
res = await provider_one.metadata(file_path)
assert isinstance(res, OsfStorageFileMetadata)
assert res.name is not None
assert res.path is not None
assert res.provider == 'osfstorage'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_provider_metadata_without_id(self, provider_one, folder_path,
folder_children_metadata, mock_time):
url, params = build_signed_url_without_auth(provider_one, 'GET', folder_path.identifier,
'children')
aiohttpretty.register_json_uri('GET', url, params=params, status=200,
body=folder_children_metadata)
folder_path._parts[-1]._id = None
with pytest.raises(exceptions.MetadataError):
await provider_one.metadata(folder_path)
class TestRevisions:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_revisions(self, provider_one, file_path, revisions_metadata, mock_time):
url, params = build_signed_url_without_auth(provider_one, 'GET', file_path.identifier,
'revisions')
aiohttpretty.register_json_uri('GET', url, params=params, status=200,
body=revisions_metadata)
response = await provider_one.revisions(file_path)
assert isinstance(response, list)
for index, revision in enumerate(response):
assert isinstance(revision, OsfStorageRevisionMetadata)
assert revision.raw == revisions_metadata['revisions'][index]
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_revisions_without_id(self, provider_one, file_path, revisions_metadata,
mock_time):
url, params = build_signed_url_without_auth(provider_one, 'GET', file_path.identifier,
'revisions')
aiohttpretty.register_json_uri('GET', url, params=params, status=200,
body=revisions_metadata)
file_path._parts[-1]._id = None
with pytest.raises(exceptions.MetadataError):
await provider_one.revisions(file_path)
class TestIntraMoveCopy:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
@pytest.mark.parametrize('action, method_name', [
('move', 'intra_move'),
('copy', 'intra_copy'),
])
async def test_intra_foo_folder(self, provider_one, provider_two, folder_children_metadata,
mock_time, action, method_name):
src_provider = provider_one
src_provider.delete = utils.MockCoroutine()
src_provider.validate_v1_path = utils.MockCoroutine()
src_provider._children_metadata = utils.MockCoroutine()
dest_provider = provider_two
dest_provider.delete = utils.MockCoroutine()
dest_provider.validate_v1_path = utils.MockCoroutine(
return_value=WaterButlerPath('/folder1/', _ids=('RootId', 'folder1'))
)
dest_provider._children_metadata = utils.MockCoroutine(
return_value=folder_children_metadata
)
src_path = WaterButlerPath('/folder1/', _ids=['RootId', 'folder1'], folder=True)
dest_path = WaterButlerPath('/folder1/', _ids=['RootId'], folder=True)
data = json.dumps({
'user': src_provider.auth['id'],
'source': src_path.identifier,
'destination': {
'name': dest_path.name,
'node': dest_provider.nid,
'parent': dest_path.parent.identifier
}
})
url, params = build_signed_url_without_auth(src_provider, 'POST', 'hooks', action,
data=data)
body = {'path': '/folder1/', 'id': 'folder1', 'kind': 'folder', 'name': 'folder1'}
aiohttpretty.register_json_uri('POST', url, params=params, status=201, body=body)
method = getattr(src_provider, method_name)
folder_meta, created = await method(dest_provider, src_path, dest_path)
assert created
assert isinstance(folder_meta, OsfStorageFolderMetadata)
assert len(folder_meta.children) == 4
# these should be called on dest_provider (if at all), not src_provider
src_provider.delete.assert_not_called()
src_provider.validate_v1_path.assert_not_called()
src_provider._children_metadata.assert_not_called()
# delete isn't called, b/c dest_path doesn't already exist
dest_provider.delete.assert_not_called()
dest_provider.validate_v1_path.assert_called_once_with('/folder1/')
dest_provider._children_metadata.assert_called_once_with(WaterButlerPath('/folder1/'))
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
@pytest.mark.parametrize('action, method_name', [
('move', 'intra_move'),
('copy', 'intra_copy'),
])
async def test_intra_foo_file(self, provider_one, provider_two, file_metadata, mock_time,
action, method_name):
src_provider = provider_one
src_provider.delete = utils.MockCoroutine()
src_provider.validate_v1_path = utils.MockCoroutine()
src_provider._children_metadata = utils.MockCoroutine()
dest_provider = provider_two
dest_provider.delete = utils.MockCoroutine()
dest_provider.validate_v1_path = utils.MockCoroutine()
dest_provider._children_metadata = utils.MockCoroutine()
src_path = WaterButlerPath('/test_file', _ids=['RootId', 'fileId'], folder=False)
dest_path = WaterButlerPath('/folder1/test_file', _ids=['RootId', 'folderId'],
folder=False)
data = json.dumps({
'user': src_provider.auth['id'],
'source': src_path.identifier,
'destination': {
'name': dest_path.name,
'node': dest_provider.nid,
'parent': dest_path.parent.identifier
}
})
url, params = build_signed_url_without_auth(src_provider, 'POST', 'hooks', action,
data=data)
aiohttpretty.register_json_uri('POST', url, params=params, status=201, body=file_metadata)
method = getattr(src_provider, method_name)
file_meta, created = await method(dest_provider, src_path, dest_path)
assert created == True
assert isinstance(file_meta, OsfStorageFileMetadata)
assert file_meta.name == 'doc.rst'
# these should be called on dest_provider (if at all), not src_provider
src_provider.delete.assert_not_called()
src_provider.validate_v1_path.assert_not_called()
src_provider._children_metadata.assert_not_called()
# delete isn't called, b/c dest_path doesn't already exist
# others aren't called b/c copied entity isa file
dest_provider.delete.assert_not_called()
dest_provider.validate_v1_path.assert_not_called()
dest_provider._children_metadata.assert_not_called()
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
@pytest.mark.parametrize('action, method_name', [
('move', 'intra_move'),
('copy', 'intra_copy'),
])
async def test_intra_foo_folder_overwrite(self, provider_one, provider_two,
folder_children_metadata, mock_time, action,
method_name):
src_provider = provider_one
src_provider.delete = utils.MockCoroutine()
src_provider.validate_v1_path = utils.MockCoroutine()
src_provider._children_metadata = utils.MockCoroutine()
dest_provider = provider_two
dest_provider.delete = utils.MockCoroutine()
dest_provider.validate_v1_path = utils.MockCoroutine(
return_value=WaterButlerPath('/folder1/', _ids=('RootId', 'folder1'))
)
dest_provider._children_metadata = utils.MockCoroutine(
return_value=folder_children_metadata
)
src_path = WaterButlerPath('/folder1/', _ids=['RootId', 'folder1'], folder=True)
dest_path = WaterButlerPath('/folder1/', _ids=['RootId', 'doomedFolder'], folder=True)
data = json.dumps({
'user': src_provider.auth['id'],
'source': src_path.identifier,
'destination': {
'name': dest_path.name,
'node': dest_provider.nid,
'parent': dest_path.parent.identifier
}
})
url, params = build_signed_url_without_auth(src_provider, 'POST', 'hooks', action,
data=data)
body = {'path': '/folder1/', 'id': 'folder1', 'kind': 'folder', 'name': 'folder1'}
aiohttpretty.register_json_uri('POST', url, params=params, status=201, body=body)
method = getattr(src_provider, method_name)
folder_meta, created = await method(dest_provider, src_path, dest_path)
assert not created
assert isinstance(folder_meta, OsfStorageFolderMetadata)
assert len(folder_meta.children) == 4
# these should be called on dest_provider (if at all), not src_provider
src_provider.delete.assert_not_called()
src_provider.validate_v1_path.assert_not_called()
src_provider._children_metadata.assert_not_called()
dest_provider.delete.assert_called_once_with(WaterButlerPath('/folder1/'))
dest_provider.validate_v1_path.assert_called_once_with('/folder1/')
dest_provider._children_metadata.assert_called_once_with(WaterButlerPath('/folder1/'))
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
@pytest.mark.parametrize('action, method_name', [
('move', 'intra_move'),
('copy', 'intra_copy'),
])
async def test_intra_foo_file_overwrite(self, provider_one, provider_two,
file_metadata, mock_time, action, method_name):
src_provider = provider_one
src_provider.delete = utils.MockCoroutine()
src_provider.validate_v1_path = utils.MockCoroutine()
src_provider._children_metadata = utils.MockCoroutine()
dest_provider = provider_two
dest_provider.delete = utils.MockCoroutine()
dest_provider.validate_v1_path = utils.MockCoroutine()
dest_provider._children_metadata = utils.MockCoroutine()
src_path = WaterButlerPath('/test_file', _ids=['RootId', 'fileId'], folder=False)
dest_path = WaterButlerPath('/folder1/test_file',
_ids=['RootId', 'folder1Id', 'doomedFile'],
folder=False)
data = json.dumps({
'user': src_provider.auth['id'],
'source': src_path.identifier,
'destination': {
'name': dest_path.name,
'node': dest_provider.nid,
'parent': dest_path.parent.identifier
}
})
url, params = build_signed_url_without_auth(src_provider, 'POST', 'hooks', action,
data=data)
aiohttpretty.register_json_uri('POST', url, params=params, status=201, body=file_metadata)
method = getattr(src_provider, method_name)
file_meta, created = await method(dest_provider, src_path, dest_path)
assert not created
assert isinstance(file_meta, OsfStorageFileMetadata)
assert file_meta.name == 'doc.rst'
# these should be called on dest_provider (if at all), not src_provider
src_provider.delete.assert_not_called()
src_provider.validate_v1_path.assert_not_called()
src_provider._children_metadata.assert_not_called()
# vvp & _cm aren't called b/c copied entity isa file
dest_provider.delete.assert_called_once_with(WaterButlerPath('/folder1/test_file'))
dest_provider.validate_v1_path.assert_not_called()
dest_provider._children_metadata.assert_not_called()
class TestUtils:
def test_is_same_region_true(self, provider_one):
assert provider_one.is_same_region(provider_one)
def test_is_same_region_false(self, provider_one, provider_two):
assert not provider_one.is_same_region(provider_two)
def test_is_same_region_error(self, provider_one):
with pytest.raises(AssertionError) as exc:
provider_one.is_same_region(str())
assert str(exc.value) == 'Cannot compare region for providers of different provider ' \
'classes.'
def test_can_intra_move_copy_true(self, provider_one):
assert provider_one.can_intra_copy(provider_one)
assert provider_one.can_intra_move(provider_one)
def test_can_intra_move_copy_false_region_mismatch(self, provider_one, provider_two):
assert not provider_one.can_intra_copy(provider_two)
assert not provider_one.can_intra_move(provider_two)
def test_can_intra_move_copy_false_class_mismatch(self, provider_one):
assert not provider_one.can_intra_copy(str())
assert not provider_one.can_intra_move(str())
def test_can_duplicate_names(self, provider_one):
assert provider_one.can_duplicate_names()
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test__check_resource_quota_retries_zero(self, provider_one, monkeypatch, mock_time):
monkeypatch.setattr('waterbutler.providers.osfstorage.settings.QUOTA_RETRIES', 2)
monkeypatch.setattr('waterbutler.providers.osfstorage.settings.QUOTA_RETRIES_DELAY', 1)
responses = [
{
'body': json.dumps({'over_quota': True}),
'status': 200,
'headers': {'Content-Type': 'application/json'},
},
]
quota_url, quota_params = build_signed_url_without_auth(provider_one, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, responses=responses)
resp = await provider_one._check_resource_quota()
assert resp == {'over_quota': True}
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test__check_resource_quota_retries_one(self, provider_one, monkeypatch, mock_time):
monkeypatch.setattr('waterbutler.providers.osfstorage.settings.QUOTA_RETRIES', 2)
monkeypatch.setattr('waterbutler.providers.osfstorage.settings.QUOTA_RETRIES_DELAY', 1)
responses = [
{'status': 202,},
{
'body': json.dumps({'over_quota': True}),
'status': 200,
'headers': {'Content-Type': 'application/json'},
},
]
quota_url, quota_params = build_signed_url_without_auth(provider_one, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, responses=responses)
resp = await provider_one._check_resource_quota()
assert resp == {'over_quota': True}
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test__check_resource_quota_retries_two(self, provider_one, monkeypatch, mock_time):
monkeypatch.setattr('waterbutler.providers.osfstorage.settings.QUOTA_RETRIES', 2)
monkeypatch.setattr('waterbutler.providers.osfstorage.settings.QUOTA_RETRIES_DELAY', 1)
responses = [
{'status': 202,},
{'status': 202,},
{
'body': json.dumps({'over_quota': True}),
'status': 200,
'headers': {'Content-Type': 'application/json'},
},
]
quota_url, quota_params = build_signed_url_without_auth(provider_one, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, responses=responses)
resp = await provider_one._check_resource_quota()
assert resp == {'over_quota': True}
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test__check_resource_quota_exhaust_retries(self, provider_one, monkeypatch, mock_time):
monkeypatch.setattr('waterbutler.providers.osfstorage.settings.QUOTA_RETRIES', 2)
monkeypatch.setattr('waterbutler.providers.osfstorage.settings.QUOTA_RETRIES_DELAY', 1)
responses = [
{'status': 202,},
{'status': 202,},
{'status': 202,},
{
'body': json.dumps({'over_quota': True}),
'status': 200,
'headers': {'Content-Type': 'application/json'},
},
]
quota_url, quota_params = build_signed_url_without_auth(provider_one, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, responses=responses)
resp = await provider_one._check_resource_quota()
assert resp == {'over_quota': False}
class TestValidatePath:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_path_root(self, provider_one, root_path, mock_time):
assert root_path == await provider_one.validate_path('/')
assert root_path == await provider_one.validate_v1_path('/')
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_path_file(self, provider_one, file_lineage, mock_time):
file_id = file_lineage['data'][0]['id']
url, params = build_signed_url_without_auth(provider_one, 'GET', file_id, 'lineage')
aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=file_lineage)
with pytest.raises(exceptions.NotFoundError) as exc:
await provider_one.validate_v1_path('/' + file_id + '/')
assert exc.value.code == client.NOT_FOUND
wb_path_v0 = await provider_one.validate_path('/' + file_id)
wb_path_v1 = await provider_one.validate_v1_path('/' + file_id)
expected = WaterButlerPath('/doc.rst')
assert wb_path_v0 == expected
assert wb_path_v1 == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_path_folder(self, provider_one, folder_lineage, mock_time):
folder_id = folder_lineage['data'][0]['id']
url, params = build_signed_url_without_auth(provider_one, 'GET', folder_id, 'lineage')
aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=folder_lineage)
with pytest.raises(exceptions.NotFoundError):
await provider_one.validate_v1_path('/' + folder_id)
wb_path_v0 = await provider_one.validate_path('/' + folder_id)
wb_path_v1 = await provider_one.validate_v1_path('/' + folder_id + '/')
expected = WaterButlerPath('/New Folder/')
assert wb_path_v0 == expected
assert wb_path_v1 == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_path_404s(self, provider_one, file_lineage, mock_time):
file_id = file_lineage['data'][0]['id']
url, params = build_signed_url_without_auth(provider_one, 'GET', file_id, 'lineage')
aiohttpretty.register_json_uri('GET', url, params=params, status=404, body=file_lineage)
with pytest.raises(exceptions.UnhandledProviderError):
await provider_one.validate_v1_path('/' + file_id)
wb_path_v0 = await provider_one.validate_path('/' + file_id)
assert wb_path_v0 == WaterButlerPath(file_lineage['data'][0]['path'], prepend=None)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_revalidate_path_new(self, provider_one, folder_path, folder_children_metadata,
mock_time):
url, params = build_signed_url_without_auth(provider_one, 'GET', folder_path.identifier,
'children', user_id=provider_one.auth['id'])
aiohttpretty.register_json_uri('GET', url, params=params, status=200,
body=folder_children_metadata)
revalidated_path = await provider_one.revalidate_path(folder_path, 'new_file', folder=False)
assert revalidated_path.name == 'new_file'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_revalidate_path_existing(self, provider_one, folder_path,
folder_children_metadata, mock_time):
url, params = build_signed_url_without_auth(provider_one, 'GET', folder_path.identifier,
'children', user_id=provider_one.auth['id'])
aiohttpretty.register_json_uri('GET', url, params=params, status=200,
body=folder_children_metadata)
revalidated_path = await provider_one.revalidate_path(folder_path,
folder_children_metadata[1]['name'],
folder=False)
assert revalidated_path.name == 'one'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_path_nested(self, provider_one, file_lineage, folder_lineage,
mock_time):
file_id = file_lineage['data'][0]['id']
url, params = build_signed_url_without_auth(provider_one, 'GET', file_id, 'lineage')
aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=file_lineage)
url, params = build_signed_url_without_auth(provider_one, 'GET', 'New Folder', 'lineage')
aiohttpretty.register_json_uri('GET', url, params=params, status=200, body=folder_lineage)
wb_path_v0 = await provider_one.validate_path('New Folder/' + file_id)
assert len(wb_path_v0._parts) == 3
assert wb_path_v0.name == '59a9b628b7d1c903ab5a8f52'
class TestUploads:
def patch_uuid(self, monkeypatch):
basepath = 'waterbutler.providers.osfstorage.provider.{}'
monkeypatch.setattr(basepath.format('uuid.uuid4'), lambda: 'patched_path')
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload_new(self, monkeypatch, provider_and_mock_one, file_stream,
upload_response, upload_path, mock_time):
self.patch_uuid(monkeypatch)
url = 'https://waterbutler.io/{}/children/'.format(upload_path.parent.identifier)
aiohttpretty.register_json_uri('POST', url, status=201, body=upload_response)
provider, inner_provider = provider_and_mock_one
inner_provider.metadata = utils.MockCoroutine(return_value=utils.MockFileMetadata())
quota_url, quota_params = build_signed_url_without_auth(provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': False})
res, created = await provider.upload(file_stream, upload_path)
assert created is True
assert res.name == '[TEST]'
assert res.extra['version'] == 8
assert res.provider == 'osfstorage'
assert res.extra['downloads'] == 0
assert res.extra['checkout'] is None
assert upload_path.identifier_path == res.path
inner_provider.delete.assert_called_once_with(WaterButlerPath('/patched_path'))
expected_path = WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest)
inner_provider.metadata.assert_called_once_with(expected_path)
inner_provider.upload.assert_called_once_with(file_stream, WaterButlerPath('/patched_path'),
check_created=False, fetch_metadata=False)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload_existing(self, monkeypatch, provider_and_mock_one, file_stream,
upload_path, upload_response, mock_time):
self.patch_uuid(monkeypatch)
provider, inner_provider = provider_and_mock_one
url = 'https://waterbutler.io/{}/children/'.format(upload_path.parent.identifier)
quota_url, quota_params = build_signed_url_without_auth(provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': False})
inner_provider.move.return_value = (utils.MockFileMetadata(), True)
inner_provider.metadata.side_effect = exceptions.MetadataError('Boom!', code=404)
aiohttpretty.register_json_uri('POST', url, status=200, body=upload_response)
res, created = await provider.upload(file_stream, upload_path)
assert created is False
assert res.name == '[TEST]'
assert res.extra['version'] == 8
assert res.provider == 'osfstorage'
assert res.extra['downloads'] == 0
assert res.extra['checkout'] is None
assert upload_path.identifier_path == res.path
expected_path = WaterButlerPath('/' + file_stream.writers['sha256'].hexdigest)
inner_provider.metadata.assert_called_once_with(expected_path)
inner_provider.upload.assert_called_once_with(file_stream,
WaterButlerPath('/patched_path'),
check_created=False,
fetch_metadata=False)
inner_provider.move.assert_called_once_with(inner_provider,
WaterButlerPath('/patched_path'),
expected_path)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload_catch_non_404_errors(self, monkeypatch, provider_and_mock_one, file_stream,
upload_path, mock_time):
self.patch_uuid(monkeypatch)
provider, inner_provider = provider_and_mock_one
quota_url, quota_params = build_signed_url_without_auth(provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': False})
url = 'https://waterbutler.io/{}/children/'.format(upload_path.parent.identifier)
inner_provider.metadata.side_effect = exceptions.MetadataError('Boom!', code=500)
aiohttpretty.register_json_uri('POST', url, status=500)
with pytest.raises(exceptions.MetadataError):
await provider.upload(file_stream, upload_path)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload_fails(self, monkeypatch, provider_and_mock_one, file_stream,
upload_response, mock_time):
self.patch_uuid(monkeypatch)
provider, inner_provider = provider_and_mock_one
path = WaterButlerPath('/{}'.format(upload_response['data']['name']),
_ids=('Test', upload_response['data']['id']))
url = 'https://waterbutler.io/{}/children/'.format(path.parent.identifier)
aiohttpretty.register_json_uri('POST', url, status=201, body=upload_response)
inner_provider.metadata = utils.MockCoroutine(return_value=utils.MockFileMetadata())
inner_provider.upload.side_effect = Exception()
quota_url, quota_params = build_signed_url_without_auth(provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': False})
with pytest.raises(Exception):
await provider.upload(file_stream, path)
inner_provider.upload.assert_called_once_with(
file_stream,
WaterButlerPath('/patched_path'),
check_created=False,
fetch_metadata=False
)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload_reject_quota(self, monkeypatch, provider_and_mock_one, file_stream,
upload_path, mock_time):
self.patch_uuid(monkeypatch)
provider, inner_provider = provider_and_mock_one
provider._send_to_storage_provider = utils.MockCoroutine()
provider._send_to_metadata_provider = utils.MockCoroutine()
quota_url, quota_params = build_signed_url_without_auth(provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': True})
with pytest.raises(OsfStorageQuotaExceededError):
await provider.upload(file_stream, upload_path)
provider._send_to_storage_provider.assert_not_called()
provider._send_to_metadata_provider.assert_not_called()
class TestCrossRegionMove:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_move_file(self, provider_one, provider_two, file_stream, upload_response):
# aliased for clarity
src_provider, dst_provider = provider_one, provider_two
src_provider.download = utils.MockCoroutine(return_value=file_stream)
src_provider.intra_move = utils.MockCoroutine(return_value=(upload_response, True))
dst_provider._send_to_storage_provider = utils.MockCoroutine()
src_path = WaterButlerPath('/foo', _ids=('Test', '56ab34'))
dest_path = WaterButlerPath('/', _ids=('Test',))
quota_url, quota_params = build_signed_url_without_auth(dst_provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': False})
metadata, created = await src_provider.move(dst_provider, src_path, dest_path,
handle_naming=False);
assert metadata is not None
assert created is True
src_provider.download.assert_called_once_with(WaterButlerPath('/foo'))
dst_provider._send_to_storage_provider.assert_called_once_with(file_stream,
WaterButlerPath('/'),
rename=None,
conflict='replace')
src_provider.intra_move.assert_called_once_with(dst_provider, WaterButlerPath('/foo'),
WaterButlerPath('/'))
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_move_file_reject_quota(self, provider_one, provider_two):
# aliased for clarity
src_provider, dst_provider = provider_one, provider_two
src_provider.download = utils.MockCoroutine()
src_provider.intra_move = utils.MockCoroutine()
dst_provider._send_to_storage_provider = utils.MockCoroutine()
src_path = WaterButlerPath('/foo', _ids=('Test', '56ab34'))
dest_path = WaterButlerPath('/', _ids=('Test',))
quota_url, quota_params = build_signed_url_without_auth(dst_provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': True})
with pytest.raises(OsfStorageQuotaExceededError):
await src_provider.move(dst_provider, src_path, dest_path, handle_naming=False)
src_provider.download.assert_not_called()
dst_provider._send_to_storage_provider.assert_not_called()
src_provider.intra_move.assert_not_called()
@pytest.mark.asyncio
async def test_move_folder(self, provider_one, provider_two):
# aliased for clarity
src_provider, dst_provider = provider_one, provider_two
src_provider._folder_file_op = utils.MockCoroutine(return_value=(upload_response, True))
src_provider.delete = utils.MockCoroutine()
src_path = WaterButlerPath('/foo/', _ids=('Test', '56ab34'), folder=True)
dest_path = WaterButlerPath('/', _ids=('Test',), folder=True)
metadata, created = await src_provider.move(dst_provider, src_path, dest_path,
handle_naming=False);
assert metadata is not None
assert created is True
src_provider._folder_file_op.assert_called_once_with(src_provider.move,
dst_provider,
WaterButlerPath('/foo/'),
WaterButlerPath('/'),
rename=None,
conflict='replace')
src_provider.delete.assert_called_once_with(WaterButlerPath('/foo/'))
@pytest.mark.asyncio
async def test_move_cross_provider(self, monkeypatch, provider_one, provider_two):
# aliased for clarity
src_provider, dst_provider = provider_one, provider_two
src_provider.download = utils.MockCoroutine()
dst_provider.NAME = 'not-osfstorage'
core_move = utils.MockCoroutine()
monkeypatch.setattr('waterbutler.core.provider.BaseProvider.move', core_move)
src_path = WaterButlerPath('/foo', _ids=('Test', '56ab34'))
dest_path = WaterButlerPath('/', _ids=('Test',))
await src_provider.move(dst_provider, src_path, dest_path, handle_naming=False);
core_move.assert_called_once_with(dst_provider, src_path, dest_path, rename=None,
conflict='replace', handle_naming=False);
src_provider.download.assert_not_called()
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_move_but_intra_move(self, provider_one, auth, credentials,
settings_region_one):
"""OSFStorageProvider.move checks to see if intra_move can be called as an optimization.
If the destination is not `osfstorage`, delegate to the parent method. Otherwise, check
whether we can optimize by doing an `intra_move` action. `intra_move` is permissable when
both `osfstorage` providers are in the same region."""
# aliased for clarity
src_provider = provider_one
settings_region_one['nid'] = 'fake-nid'
dst_provider = OSFStorageProvider(auth, credentials, settings_region_one)
src_provider.can_intra_move = mock.Mock(return_value=True)
src_provider.intra_move = utils.MockCoroutine()
src_provider.download = utils.MockCoroutine()
src_path = WaterButlerPath('/foo', _ids=('Test', '56ab34'))
dest_path = WaterButlerPath('/', _ids=('Test',))
quota_url, quota_params = build_signed_url_without_auth(dst_provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': False})
await src_provider.move(dst_provider, src_path, dest_path, handle_naming=False);
src_provider.can_intra_move.assert_called_once_with(dst_provider, src_path)
src_provider.intra_move.assert_called_once_with(dst_provider, src_path, dest_path)
src_provider.download.assert_not_called()
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_intra_move_reject_by_quota(self, provider_one, auth, credentials,
settings_region_one):
"""Same as previous, but assume the destination node is not the same as the source node
and is subject to storage caps."""
# aliased for clarity
src_provider = provider_one
settings_region_one['nid'] = 'fake-nid'
dst_provider = OSFStorageProvider(auth, credentials, settings_region_one)
src_provider.can_intra_move = mock.Mock(return_value=True)
src_provider.intra_move = utils.MockCoroutine()
src_provider.download = utils.MockCoroutine()
src_path = WaterButlerPath('/foo', _ids=('Test', '56ab34'))
dest_path = WaterButlerPath('/', _ids=('Test',))
quota_url, quota_params = build_signed_url_without_auth(dst_provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': True})
with pytest.raises(OsfStorageQuotaExceededError):
await src_provider.move(dst_provider, src_path, dest_path, handle_naming=False);
src_provider.can_intra_move.assert_called_once_with(dst_provider, src_path)
src_provider.intra_move.assert_not_called()
src_provider.download.assert_not_called()
class TestCrossRegionCopy:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_copy_file(self, provider_one, provider_two, file_stream, upload_response):
# aliased for clarity
src_provider, dst_provider = provider_one, provider_two
src_provider.download = utils.MockCoroutine(return_value=file_stream)
src_provider.intra_copy = utils.MockCoroutine(return_value=(upload_response, True))
dst_provider._send_to_storage_provider = utils.MockCoroutine()
src_path = WaterButlerPath('/foo', _ids=('Test', '56ab34'))
dest_path = WaterButlerPath('/', _ids=('Test',))
quota_url, quota_params = build_signed_url_without_auth(dst_provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': False})
metadata, created = await src_provider.copy(dst_provider, src_path, dest_path,
handle_naming=False);
assert metadata is not None
assert created is True
src_provider.download.assert_called_once_with(WaterButlerPath('/foo'))
dst_provider._send_to_storage_provider.assert_called_once_with(file_stream,
WaterButlerPath('/'),
rename=None,
conflict='replace')
src_provider.intra_copy.assert_called_once_with(dst_provider, WaterButlerPath('/foo'),
WaterButlerPath('/'))
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_copy_file_reject_quota(self, provider_one, provider_two):
# aliased for clarity
src_provider, dst_provider = provider_one, provider_two
src_provider.download = utils.MockCoroutine()
src_provider.intra_copy = utils.MockCoroutine()
dst_provider._send_to_storage_provider = utils.MockCoroutine()
src_path = WaterButlerPath('/foo', _ids=('Test', '56ab34'))
dest_path = WaterButlerPath('/', _ids=('Test',))
quota_url, quota_params = build_signed_url_without_auth(dst_provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': True})
with pytest.raises(OsfStorageQuotaExceededError):
await src_provider.copy(dst_provider, src_path, dest_path, handle_naming=False)
src_provider.download.assert_not_called()
dst_provider._send_to_storage_provider.assert_not_called()
src_provider.intra_copy.assert_not_called()
@pytest.mark.asyncio
async def test_copy_folder(self, provider_one, provider_two):
# aliased for clarity
src_provider, dst_provider = provider_one, provider_two
src_provider._folder_file_op = utils.MockCoroutine(return_value=(upload_response, True))
src_path = WaterButlerPath('/foo/', _ids=('Test', '56ab34'), folder=True)
dest_path = WaterButlerPath('/', _ids=('Test',), folder=True)
metadata, created = await src_provider.copy(dst_provider, src_path, dest_path,
handle_naming=False);
assert metadata is not None
assert created is True
src_provider._folder_file_op.assert_called_once_with(src_provider.copy,
dst_provider,
WaterButlerPath('/foo/'),
WaterButlerPath('/'),
rename=None,
conflict='replace')
@pytest.mark.asyncio
async def test_copy_cross_provider(self, monkeypatch, provider_one, provider_two):
# aliased for clarity
src_provider, dst_provider = provider_one, provider_two
src_provider.download = utils.MockCoroutine()
dst_provider.NAME = 'not-osfstorage'
core_copy = utils.MockCoroutine()
monkeypatch.setattr('waterbutler.core.provider.BaseProvider.copy', core_copy)
src_path = WaterButlerPath('/foo', _ids=('Test', '56ab34'))
dest_path = WaterButlerPath('/', _ids=('Test',))
await src_provider.copy(dst_provider, src_path, dest_path, handle_naming=False);
core_copy.assert_called_once_with(dst_provider, src_path, dest_path, rename=None,
conflict='replace', handle_naming=False);
src_provider.download.assert_not_called()
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_copy_but_intra_copy(self, provider_one, auth, credentials,
settings_region_one):
"""OSFStorageProvider.copy checks to see if intra_copy can be called as an optimization.
If the destination is not `osfstorage`, delegate to the parent method. Otherwise, check
whether we can optimize by doing an `intra_copy` action. `intra_copy` is permissable when
both `osfstorage` providers are in the same region."""
# aliased for clarity
src_provider = provider_one
settings_region_one['nid'] = 'fake-nid'
dst_provider = OSFStorageProvider(auth, credentials, settings_region_one)
src_provider.can_intra_copy = mock.Mock(return_value=True)
src_provider.intra_copy = utils.MockCoroutine()
src_provider.download = utils.MockCoroutine()
src_path = WaterButlerPath('/foo', _ids=('Test', '56ab34'))
dest_path = WaterButlerPath('/', _ids=('Test',))
quota_url, quota_params = build_signed_url_without_auth(dst_provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': False})
await src_provider.copy(dst_provider, src_path, dest_path, handle_naming=False);
src_provider.can_intra_copy.assert_called_once_with(dst_provider, src_path)
src_provider.intra_copy.assert_called_once_with(dst_provider, src_path, dest_path)
src_provider.download.assert_not_called()
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_intra_copy_reject_by_quota(self, provider_one, auth, credentials,
settings_region_one):
"""Same as previous, but assume the destination node is not the same as the source node
and is subject to storage caps."""
# aliased for clarity
src_provider = provider_one
settings_region_one['nid'] = 'fake-nid'
dst_provider = OSFStorageProvider(auth, credentials, settings_region_one)
src_provider.can_intra_copy = mock.Mock(return_value=True)
src_provider.intra_copy = utils.MockCoroutine()
src_provider.download = utils.MockCoroutine()
src_path = WaterButlerPath('/foo', _ids=('Test', '56ab34'))
dest_path = WaterButlerPath('/', _ids=('Test',))
quota_url, quota_params = build_signed_url_without_auth(dst_provider, 'GET', 'quota_status')
aiohttpretty.register_json_uri('GET', quota_url, params=quota_params, status=200,
body={'over_quota': True})
with pytest.raises(OsfStorageQuotaExceededError):
await src_provider.copy(dst_provider, src_path, dest_path, handle_naming=False);
src_provider.can_intra_copy.assert_called_once_with(dst_provider, src_path)
src_provider.intra_copy.assert_not_called()
src_provider.download.assert_not_called()
|
{
"content_hash": "7d1fb0cf6c89968f9607b24472734f7c",
"timestamp": "",
"source": "github",
"line_count": 1255,
"max_line_length": 101,
"avg_line_length": 45.516334661354584,
"alnum_prop": 0.6024018346375365,
"repo_name": "felliott/waterbutler",
"id": "7ee414a7738e8151f88923e67b397a2e8238e7b6",
"size": "57123",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "tests/providers/osfstorage/test_provider.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "922"
},
{
"name": "Python",
"bytes": "1673806"
}
],
"symlink_target": ""
}
|
import ddt
import mock
from cinder import exception
from cinder import test
from cinder.tests.unit.volume.drivers.netapp.dataontap.performance \
import fakes as fake
from cinder.volume.drivers.netapp.dataontap.performance import perf_base
@ddt.ddt
class PerformanceLibraryTestCase(test.TestCase):
def setUp(self):
super(PerformanceLibraryTestCase, self).setUp()
with mock.patch.object(perf_base.PerformanceLibrary,
'_init_counter_info'):
self.zapi_client = mock.Mock()
self.perf_library = perf_base.PerformanceLibrary(self.zapi_client)
self.perf_library.system_object_name = 'system'
self.perf_library.avg_processor_busy_base_counter_name = (
'cpu_elapsed_time1')
def test_init(self):
mock_zapi_client = mock.Mock()
mock_init_counter_info = self.mock_object(
perf_base.PerformanceLibrary, '_init_counter_info')
library = perf_base.PerformanceLibrary(mock_zapi_client)
self.assertEqual(mock_zapi_client, library.zapi_client)
mock_init_counter_info.assert_called_once_with()
def test_init_counter_info(self):
self.perf_library._init_counter_info()
self.assertIsNone(self.perf_library.system_object_name)
self.assertIsNone(
self.perf_library.avg_processor_busy_base_counter_name)
def test_get_node_utilization_kahuna_overutilized(self):
mock_get_kahuna_utilization = self.mock_object(
self.perf_library, '_get_kahuna_utilization',
mock.Mock(return_value=61.0))
mock_get_average_cpu_utilization = self.mock_object(
self.perf_library, '_get_average_cpu_utilization',
mock.Mock(return_value=25.0))
result = self.perf_library._get_node_utilization('fake1',
'fake2',
'fake_node')
self.assertAlmostEqual(100.0, result)
mock_get_kahuna_utilization.assert_called_once_with('fake1', 'fake2')
self.assertFalse(mock_get_average_cpu_utilization.called)
@ddt.data({'cpu': -0.01, 'cp_time': 10000, 'poll_time': 0},
{'cpu': 1.01, 'cp_time': 0, 'poll_time': 1000},
{'cpu': 0.50, 'cp_time': 0, 'poll_time': 0})
@ddt.unpack
def test_get_node_utilization_zero_time(self, cpu, cp_time, poll_time):
mock_get_kahuna_utilization = self.mock_object(
self.perf_library, '_get_kahuna_utilization',
mock.Mock(return_value=59.0))
mock_get_average_cpu_utilization = self.mock_object(
self.perf_library, '_get_average_cpu_utilization',
mock.Mock(return_value=cpu))
mock_get_total_consistency_point_time = self.mock_object(
self.perf_library, '_get_total_consistency_point_time',
mock.Mock(return_value=cp_time))
mock_get_consistency_point_p2_flush_time = self.mock_object(
self.perf_library, '_get_consistency_point_p2_flush_time',
mock.Mock(return_value=cp_time))
mock_get_total_time = self.mock_object(
self.perf_library, '_get_total_time',
mock.Mock(return_value=poll_time))
mock_get_adjusted_consistency_point_time = self.mock_object(
self.perf_library, '_get_adjusted_consistency_point_time')
result = self.perf_library._get_node_utilization('fake1',
'fake2',
'fake_node')
expected = max(min(100.0, 100.0 * cpu), 0)
self.assertEqual(expected, result)
mock_get_kahuna_utilization.assert_called_once_with('fake1', 'fake2')
mock_get_average_cpu_utilization.assert_called_once_with('fake1',
'fake2')
mock_get_total_consistency_point_time.assert_called_once_with('fake1',
'fake2')
mock_get_consistency_point_p2_flush_time.assert_called_once_with(
'fake1', 'fake2')
mock_get_total_time.assert_called_once_with('fake1',
'fake2',
'total_cp_msecs')
self.assertFalse(mock_get_adjusted_consistency_point_time.called)
@ddt.data({'cpu': 0.75, 'adjusted_cp_time': 8000, 'expected': 80},
{'cpu': 0.80, 'adjusted_cp_time': 7500, 'expected': 80},
{'cpu': 0.50, 'adjusted_cp_time': 11000, 'expected': 100})
@ddt.unpack
def test_get_node_utilization(self, cpu, adjusted_cp_time, expected):
mock_get_kahuna_utilization = self.mock_object(
self.perf_library, '_get_kahuna_utilization',
mock.Mock(return_value=59.0))
mock_get_average_cpu_utilization = self.mock_object(
self.perf_library, '_get_average_cpu_utilization',
mock.Mock(return_value=cpu))
mock_get_total_consistency_point_time = self.mock_object(
self.perf_library, '_get_total_consistency_point_time',
mock.Mock(return_value=90.0))
mock_get_consistency_point_p2_flush_time = self.mock_object(
self.perf_library, '_get_consistency_point_p2_flush_time',
mock.Mock(return_value=50.0))
mock_get_total_time = self.mock_object(
self.perf_library, '_get_total_time',
mock.Mock(return_value=10000))
mock_get_adjusted_consistency_point_time = self.mock_object(
self.perf_library, '_get_adjusted_consistency_point_time',
mock.Mock(return_value=adjusted_cp_time))
result = self.perf_library._get_node_utilization('fake1',
'fake2',
'fake_node')
self.assertEqual(expected, result)
mock_get_kahuna_utilization.assert_called_once_with('fake1', 'fake2')
mock_get_average_cpu_utilization.assert_called_once_with('fake1',
'fake2')
mock_get_total_consistency_point_time.assert_called_once_with('fake1',
'fake2')
mock_get_consistency_point_p2_flush_time.assert_called_once_with(
'fake1', 'fake2')
mock_get_total_time.assert_called_once_with('fake1',
'fake2',
'total_cp_msecs')
mock_get_adjusted_consistency_point_time.assert_called_once_with(
90.0, 50.0)
def test_get_node_utilization_calculation_error(self):
self.mock_object(self.perf_library,
'_get_kahuna_utilization',
mock.Mock(return_value=59.0))
self.mock_object(self.perf_library,
'_get_average_cpu_utilization',
mock.Mock(return_value=25.0))
self.mock_object(self.perf_library,
'_get_total_consistency_point_time',
mock.Mock(return_value=90.0))
self.mock_object(self.perf_library,
'_get_consistency_point_p2_flush_time',
mock.Mock(return_value=50.0))
self.mock_object(self.perf_library,
'_get_total_time',
mock.Mock(return_value=10000))
self.mock_object(self.perf_library,
'_get_adjusted_consistency_point_time',
mock.Mock(side_effect=ZeroDivisionError))
result = self.perf_library._get_node_utilization('fake1',
'fake2',
'fake_node')
self.assertEqual(perf_base.DEFAULT_UTILIZATION, result)
def test_get_kahuna_utilization(self):
mock_get_performance_counter = self.mock_object(
self.perf_library,
'_get_performance_counter_average_multi_instance',
mock.Mock(return_value=[0.2, 0.3]))
result = self.perf_library._get_kahuna_utilization('fake_t1',
'fake_t2')
self.assertAlmostEqual(50.0, result)
mock_get_performance_counter.assert_called_once_with(
'fake_t1', 'fake_t2', 'domain_busy:kahuna',
'processor_elapsed_time')
def test_get_average_cpu_utilization(self):
mock_get_performance_counter_average = self.mock_object(
self.perf_library, '_get_performance_counter_average',
mock.Mock(return_value=0.45))
result = self.perf_library._get_average_cpu_utilization('fake_t1',
'fake_t2')
self.assertAlmostEqual(0.45, result)
mock_get_performance_counter_average.assert_called_once_with(
'fake_t1', 'fake_t2', 'avg_processor_busy', 'cpu_elapsed_time1')
def test_get_total_consistency_point_time(self):
mock_get_performance_counter_delta = self.mock_object(
self.perf_library, '_get_performance_counter_delta',
mock.Mock(return_value=500))
result = self.perf_library._get_total_consistency_point_time(
'fake_t1', 'fake_t2')
self.assertEqual(500, result)
mock_get_performance_counter_delta.assert_called_once_with(
'fake_t1', 'fake_t2', 'total_cp_msecs')
def test_get_consistency_point_p2_flush_time(self):
mock_get_performance_counter_delta = self.mock_object(
self.perf_library, '_get_performance_counter_delta',
mock.Mock(return_value=500))
result = self.perf_library._get_consistency_point_p2_flush_time(
'fake_t1', 'fake_t2')
self.assertEqual(500, result)
mock_get_performance_counter_delta.assert_called_once_with(
'fake_t1', 'fake_t2', 'cp_phase_times:p2_flush')
def test_get_total_time(self):
mock_find_performance_counter_timestamp = self.mock_object(
self.perf_library, '_find_performance_counter_timestamp',
mock.Mock(side_effect=[100, 105]))
result = self.perf_library._get_total_time('fake_t1',
'fake_t2',
'fake_counter')
self.assertEqual(5000, result)
mock_find_performance_counter_timestamp.assert_has_calls([
mock.call('fake_t1', 'fake_counter'),
mock.call('fake_t2', 'fake_counter')])
def test_get_adjusted_consistency_point_time(self):
result = self.perf_library._get_adjusted_consistency_point_time(
500, 200)
self.assertAlmostEqual(250, result)
def test_get_performance_counter_delta(self):
result = self.perf_library._get_performance_counter_delta(
fake.COUNTERS_T1, fake.COUNTERS_T2, 'total_cp_msecs')
self.assertEqual(1482, result)
def test_get_performance_counter_average(self):
result = self.perf_library._get_performance_counter_average(
fake.COUNTERS_T1, fake.COUNTERS_T2, 'domain_busy:kahuna',
'processor_elapsed_time', 'processor0')
self.assertAlmostEqual(0.00281954360981, result)
def test_get_performance_counter_average_multi_instance(self):
result = (
self.perf_library._get_performance_counter_average_multi_instance(
fake.COUNTERS_T1, fake.COUNTERS_T2, 'domain_busy:kahuna',
'processor_elapsed_time'))
expected = [0.002819543609809441, 0.0033421611147606135]
self.assertAlmostEqual(expected, result)
def test_find_performance_counter_value(self):
result = self.perf_library._find_performance_counter_value(
fake.COUNTERS_T1, 'domain_busy:kahuna',
instance_name='processor0')
self.assertEqual('2712467226', result)
def test_find_performance_counter_value_not_found(self):
self.assertRaises(
exception.NotFound,
self.perf_library._find_performance_counter_value,
fake.COUNTERS_T1, 'invalid', instance_name='processor0')
def test_find_performance_counter_timestamp(self):
result = self.perf_library._find_performance_counter_timestamp(
fake.COUNTERS_T1, 'domain_busy')
self.assertEqual('1453573777', result)
def test_find_performance_counter_timestamp_not_found(self):
self.assertRaises(
exception.NotFound,
self.perf_library._find_performance_counter_timestamp,
fake.COUNTERS_T1, 'invalid', instance_name='processor0')
def test_expand_performance_array(self):
counter_info = {
'labels': ['idle', 'kahuna', 'storage', 'exempt'],
'name': 'domain_busy',
}
self.zapi_client.get_performance_counter_info = mock.Mock(
return_value=counter_info)
counter = {
'node-name': 'cluster1-01',
'instance-uuid': 'cluster1-01:kernel:processor0',
'domain_busy': '969142314286,2567571412,2131582146,5383861579',
'instance-name': 'processor0',
'timestamp': '1453512244',
}
self.perf_library._expand_performance_array('wafl',
'domain_busy',
counter)
modified_counter = {
'node-name': 'cluster1-01',
'instance-uuid': 'cluster1-01:kernel:processor0',
'domain_busy': '969142314286,2567571412,2131582146,5383861579',
'instance-name': 'processor0',
'timestamp': '1453512244',
'domain_busy:idle': '969142314286',
'domain_busy:kahuna': '2567571412',
'domain_busy:storage': '2131582146',
'domain_busy:exempt': '5383861579',
}
self.assertEqual(modified_counter, counter)
def test_get_base_counter_name(self):
counter_info = {
'base-counter': 'cpu_elapsed_time',
'labels': [],
'name': 'avg_processor_busy',
}
self.zapi_client.get_performance_counter_info = mock.Mock(
return_value=counter_info)
result = self.perf_library._get_base_counter_name(
'system:constituent', 'avg_processor_busy')
self.assertEqual('cpu_elapsed_time', result)
|
{
"content_hash": "a4ed52b3a2001d202ae681fbcd55d639",
"timestamp": "",
"source": "github",
"line_count": 351,
"max_line_length": 78,
"avg_line_length": 42.12535612535613,
"alnum_prop": 0.5680373326119302,
"repo_name": "cloudbase/cinder",
"id": "a6efa60ca0d668c1ae5a75e61cf89009185bceef",
"size": "15420",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "cinder/tests/unit/volume/drivers/netapp/dataontap/performance/test_perf_base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17586629"
},
{
"name": "Shell",
"bytes": "8187"
}
],
"symlink_target": ""
}
|
import unittest
import os
from conans.paths import CONANFILE, CONAN_MANIFEST
from conans.util.files import save, load
from conans.model.ref import ConanFileReference
from conans.test.utils.cpp_test_files import cpp_hello_conan_files
from conans.model.manifest import FileTreeManifest
from conans.test.utils.tools import TestClient
import stat
from parameterized import parameterized
class ExportSettingsTest(unittest.TestCase):
def test_basic(self):
client = TestClient()
conanfile = """
from conans import ConanFile
class TestConan(ConanFile):
name = "Hello"
version = "1.2"
settings = {"os": ["Linux"]}
"""
files = {CONANFILE: conanfile}
client.save(files)
client.run("export . lasote/stable")
self.assertIn("WARN: Conanfile doesn't have 'license'", client.user_io.out)
client.run("install Hello/1.2@lasote/stable -s os=Windows", ignore_error=True)
self.assertIn("'Windows' is not a valid 'settings.os' value", client.user_io.out)
self.assertIn("Possible values are ['Linux']", client.user_io.out)
def export_without_full_reference_test(self):
client = TestClient()
client.save({"conanfile.py": """from conans import ConanFile
class MyPkg(ConanFile):
pass
"""})
error = client.run("export . lasote/stable", ignore_error=True)
self.assertTrue(error)
self.assertIn("conanfile didn't specify name", client.out)
client.save({"conanfile.py": """from conans import ConanFile
class MyPkg(ConanFile):
name="Lib"
"""})
error = client.run("export . lasote/stable", ignore_error=True)
self.assertTrue(error)
self.assertIn("conanfile didn't specify version", client.out)
client.save({"conanfile.py": """from conans import ConanFile
class MyPkg(ConanFile):
pass
"""})
client.run("export . lib/1.0@lasote/channel")
self.assertIn("lib/1.0@lasote/channel: A new conanfile.py version was exported",
client.out)
client.save({"conanfile.py": """from conans import ConanFile
class MyPkg(ConanFile):
name="Lib"
version="1.0"
"""})
error = client.run("export . lasote", ignore_error=True)
self.assertTrue(error)
self.assertIn("Invalid parameter 'lasote', specify the full reference or user/channel",
client.out)
def test_export_read_only(self):
client = TestClient()
conanfile = """
from conans import ConanFile
class TestConan(ConanFile):
name = "Hello"
version = "1.2"
exports = "file1.txt"
exports_sources = "file2.txt"
"""
ref = ConanFileReference.loads("Hello/1.2@lasote/stable")
export_path = client.client_cache.export(ref)
export_src_path = client.client_cache.export_sources(ref)
files = {CONANFILE: conanfile,
"file1.txt": "",
"file2.txt": ""}
client.save(files)
mode1 = os.stat(os.path.join(client.current_folder, "file1.txt")).st_mode
mode2 = os.stat(os.path.join(client.current_folder, "file2.txt")).st_mode
os.chmod(os.path.join(client.current_folder, "file1.txt"), mode1 &~ stat.S_IWRITE)
os.chmod(os.path.join(client.current_folder, "file2.txt"), mode2 &~ stat.S_IWRITE)
client.run("export . lasote/stable")
self.assertEqual(load(os.path.join(export_path, "file1.txt")), "")
self.assertEqual(load(os.path.join(export_src_path, "file2.txt")), "")
with self.assertRaises(IOError):
save(os.path.join(export_path, "file1.txt"), "")
with self.assertRaises(IOError):
save(os.path.join(export_src_path, "file2.txt"), "")
self.assertIn("WARN: Conanfile doesn't have 'license'", client.user_io.out)
files = {CONANFILE: conanfile,
"file1.txt": "file1",
"file2.txt": "file2"}
os.chmod(os.path.join(client.current_folder, "file1.txt"), mode1 | stat.S_IWRITE)
os.chmod(os.path.join(client.current_folder, "file2.txt"), mode2 | stat.S_IWRITE)
client.save(files)
client.run("export . lasote/stable")
self.assertEqual(load(os.path.join(export_path, "file1.txt")), "file1")
self.assertEqual(load(os.path.join(export_src_path, "file2.txt")), "file2")
client.run("install Hello/1.2@lasote/stable --build=missing")
self.assertIn("Hello/1.2@lasote/stable: Generating the package", client.out)
files = {CONANFILE: conanfile,
"file1.txt": "",
"file2.txt": ""}
client.save(files)
os.chmod(os.path.join(client.current_folder, "file1.txt"), mode1 &~ stat.S_IWRITE)
os.chmod(os.path.join(client.current_folder, "file2.txt"), mode2 &~ stat.S_IWRITE)
client.run("export . lasote/stable")
self.assertEqual(load(os.path.join(export_path, "file1.txt")), "")
self.assertEqual(load(os.path.join(export_src_path, "file2.txt")), "")
client.run("install Hello/1.2@lasote/stable --build=Hello")
self.assertIn("Hello/1.2@lasote/stable: Generating the package", client.out)
def test_code_parent(self):
""" when referencing the parent, the relative folder "sibling" will be kept
"""
base = """
from conans import ConanFile
class TestConan(ConanFile):
name = "Hello"
version = "1.2"
exports = "../*.txt"
"""
for conanfile in (base, base.replace("../*.txt", "../sibling*")):
client = TestClient()
files = {"recipe/conanfile.py": conanfile,
"sibling/file.txt": "Hello World!"}
client.save(files)
client.current_folder = os.path.join(client.current_folder, "recipe")
client.run("export . lasote/stable")
conan_ref = ConanFileReference("Hello", "1.2", "lasote", "stable")
export_path = client.paths.export(conan_ref)
content = load(os.path.join(export_path, "sibling/file.txt"))
self.assertEqual("Hello World!", content)
def test_code_sibling(self):
# if provided a path with slash, it will use as a export base
client = TestClient()
conanfile = """
from conans import ConanFile
class TestConan(ConanFile):
name = "Hello"
version = "1.2"
exports = "../sibling/*.txt"
"""
files = {"recipe/conanfile.py": conanfile,
"sibling/file.txt": "Hello World!"}
client.save(files)
client.current_folder = os.path.join(client.current_folder, "recipe")
client.run("export . lasote/stable")
conan_ref = ConanFileReference("Hello", "1.2", "lasote", "stable")
export_path = client.paths.export(conan_ref)
content = load(os.path.join(export_path, "file.txt"))
self.assertEqual("Hello World!", content)
def test_code_several_sibling(self):
# if provided a path with slash, it will use as a export base
client = TestClient()
conanfile = """
from conans import ConanFile
class TestConan(ConanFile):
name = "Hello"
version = "1.2"
exports_sources = "../test/src/*", "../cpp/*", "../include/*"
"""
files = {"recipe/conanfile.py": conanfile,
"test/src/file.txt": "Hello World!",
"cpp/file.cpp": "Hello World!",
"include/file.h": "Hello World!"}
client.save(files)
client.current_folder = os.path.join(client.current_folder, "recipe")
client.run("export . lasote/stable")
conan_ref = ConanFileReference("Hello", "1.2", "lasote", "stable")
export_path = client.paths.export_sources(conan_ref)
self.assertEqual(sorted(['file.txt', 'file.cpp', 'file.h']),
sorted(os.listdir(export_path)))
@parameterized.expand([("myconanfile.py", ), ("Conanfile.py", )])
def test_filename(self, filename):
client = TestClient()
conanfile = """
from conans import ConanFile
class TestConan(ConanFile):
name = "Hello"
version = "1.2"
"""
client.save({filename: conanfile})
client.run("export %s lasote/stable" % filename)
self.assertIn("Hello/1.2@lasote/stable: A new conanfile.py version was exported",
client.user_io.out)
conan_ref = ConanFileReference("Hello", "1.2", "lasote", "stable")
export_path = client.paths.export(conan_ref)
conanfile = load(os.path.join(export_path, "conanfile.py"))
self.assertIn('name = "Hello"', conanfile)
manifest = load(os.path.join(export_path, "conanmanifest.txt"))
self.assertIn('conanfile.py: cac514c81a0af0d87fa379b0bf16fbaa', manifest)
def test_exclude_basic(self):
client = TestClient()
conanfile = """
from conans import ConanFile
class TestConan(ConanFile):
name = "Hello"
version = "1.2"
exports = "*.txt", "!*file1.txt"
exports_sources = "*.cpp", "!*temp.cpp"
"""
client.save({CONANFILE: conanfile,
"file.txt": "",
"file1.txt": "",
"file.cpp": "",
"file_temp.cpp": ""})
client.run("export . lasote/stable")
conan_ref = ConanFileReference("Hello", "1.2", "lasote", "stable")
export_path = client.paths.export(conan_ref)
exports_sources_path = client.paths.export_sources(conan_ref)
self.assertTrue(os.path.exists(os.path.join(export_path, "file.txt")))
self.assertFalse(os.path.exists(os.path.join(export_path, "file1.txt")))
self.assertTrue(os.path.exists(os.path.join(exports_sources_path, "file.cpp")))
self.assertFalse(os.path.exists(os.path.join(exports_sources_path, "file_temp.cpp")))
def test_exclude_folders(self):
client = TestClient()
conanfile = """
from conans import ConanFile
class TestConan(ConanFile):
name = "Hello"
version = "1.2"
exports = "*.txt", "!*/temp/*"
"""
client.save({CONANFILE: conanfile,
"file.txt": "",
"any/temp/file1.txt": "",
"other/sub/file2.txt": ""})
client.run("export . lasote/stable")
conan_ref = ConanFileReference("Hello", "1.2", "lasote", "stable")
export_path = client.paths.export(conan_ref)
self.assertTrue(os.path.exists(os.path.join(export_path, "file.txt")))
self.assertFalse(os.path.exists(os.path.join(export_path, "any/temp/file1.txt")))
self.assertTrue(os.path.exists(os.path.join(export_path, "other/sub/file2.txt")))
class ExportTest(unittest.TestCase):
def setUp(self):
self.conan = TestClient()
self.files = cpp_hello_conan_files("Hello0", "0.1")
self.conan_ref = ConanFileReference("Hello0", "0.1", "lasote", "stable")
self.conan.save(self.files)
self.conan.run("export . lasote/stable")
def test_basic(self):
""" simple registration of a new conans
"""
reg_path = self.conan.paths.export(self.conan_ref)
manif = FileTreeManifest.load(self.conan.paths.export(self.conan_ref))
self.assertIn('%s: A new conanfile.py version was exported' % str(self.conan_ref),
self.conan.user_io.out)
self.assertIn('%s: Folder: %s' % (str(self.conan_ref), reg_path), self.conan.user_io.out)
self.assertTrue(os.path.exists(reg_path))
for name in list(self.files.keys()):
self.assertTrue(os.path.exists(os.path.join(reg_path, name)))
expected_sums = {'hello.cpp': '4f005274b2fdb25e6113b69774dac184',
'main.cpp': '0479f3c223c9a656a718f3148e044124',
'CMakeLists.txt': '52546396c42f16be3daf72ecf7ab7143',
'conanfile.py': '355949fbf0b4fc32b8f1c5a338dfe1ae',
'executable': '68b329da9893e34099c7d8ad5cb9c940',
'helloHello0.h': '9448df034392fc8781a47dd03ae71bdd'}
self.assertEqual(expected_sums, manif.file_sums)
def test_case_sensitive(self):
self.files = cpp_hello_conan_files("hello0", "0.1")
self.conan_ref = ConanFileReference("hello0", "0.1", "lasote", "stable")
self.conan.save(self.files)
error = self.conan.run("export . lasote/stable", ignore_error=True)
self.assertTrue(error)
self.assertIn("ERROR: Cannot export package with same name but different case",
self.conan.user_io.out)
def test_export_filter(self):
content = """
from conans import ConanFile
class OpenSSLConan(ConanFile):
name = "openssl"
version = "2.0.1"
"""
save(os.path.join(self.conan.current_folder, CONANFILE), content)
self.conan.run("export . lasote/stable")
reg_path = self.conan.paths.export(ConanFileReference.loads('openssl/2.0.1@lasote/stable'))
self.assertEqual(sorted(os.listdir(reg_path)),
[CONANFILE, CONAN_MANIFEST])
content = """
from conans import ConanFile
class OpenSSLConan(ConanFile):
name = "openssl"
version = "2.0.1"
exports = ('*.txt', '*.h')
"""
save(os.path.join(self.conan.current_folder, CONANFILE), content)
self.conan.run("export . lasote/stable")
reg_path = self.conan.paths.export(ConanFileReference.loads('openssl/2.0.1@lasote/stable'))
self.assertEqual(sorted(os.listdir(reg_path)),
['CMakeLists.txt', CONANFILE, CONAN_MANIFEST,
'helloHello0.h'])
# Now exports being a list instead a tuple
content = """
from conans import ConanFile
class OpenSSLConan(ConanFile):
name = "openssl"
version = "2.0.1"
exports = ['*.txt', '*.h']
"""
save(os.path.join(self.conan.current_folder, CONANFILE), content)
self.conan.run("export . lasote/stable")
reg_path = self.conan.paths.export(ConanFileReference.loads('openssl/2.0.1@lasote/stable'))
self.assertEqual(sorted(os.listdir(reg_path)),
['CMakeLists.txt', CONANFILE, CONAN_MANIFEST, 'helloHello0.h'])
def test_export_the_same_code(self):
file_list = self._create_packages_and_builds()
# Export the same conans
conan2 = TestClient(self.conan.base_folder)
files2 = cpp_hello_conan_files("Hello0", "0.1")
conan2.save(files2)
conan2.run("export . lasote/stable")
reg_path2 = conan2.paths.export(self.conan_ref)
digest2 = FileTreeManifest.load(conan2.paths.export(self.conan_ref))
self.assertNotIn('A new Conan version was exported', conan2.user_io.out)
self.assertNotIn('Cleaning the old builds ...', conan2.user_io.out)
self.assertNotIn('Cleaning the old packs ...', conan2.user_io.out)
self.assertNotIn('All the previous packs were cleaned', conan2.user_io.out)
self.assertIn('%s: A new conanfile.py version was exported' % str(self.conan_ref),
self.conan.user_io.out)
self.assertIn('%s: Folder: %s' % (str(self.conan_ref), reg_path2), self.conan.user_io.out)
self.assertTrue(os.path.exists(reg_path2))
for name in list(files2.keys()):
self.assertTrue(os.path.exists(os.path.join(reg_path2, name)))
expected_sums = {'hello.cpp': '4f005274b2fdb25e6113b69774dac184',
'main.cpp': '0479f3c223c9a656a718f3148e044124',
'CMakeLists.txt': '52546396c42f16be3daf72ecf7ab7143',
'conanfile.py': '355949fbf0b4fc32b8f1c5a338dfe1ae',
'executable': '68b329da9893e34099c7d8ad5cb9c940',
'helloHello0.h': '9448df034392fc8781a47dd03ae71bdd'}
self.assertEqual(expected_sums, digest2.file_sums)
for f in file_list:
self.assertTrue(os.path.exists(f))
def test_export_a_new_version(self):
self._create_packages_and_builds()
# Export an update of the same conans
conan2 = TestClient(self.conan.base_folder)
files2 = cpp_hello_conan_files("Hello0", "0.1")
files2[CONANFILE] = "# insert comment\n %s" % files2[CONANFILE]
conan2.save(files2)
conan2.run("export . lasote/stable")
reg_path3 = conan2.paths.export(self.conan_ref)
digest3 = FileTreeManifest.load(conan2.paths.export(self.conan_ref))
self.assertIn('%s: A new conanfile.py version was exported' % str(self.conan_ref),
self.conan.user_io.out)
self.assertIn('%s: Folder: %s' % (str(self.conan_ref), reg_path3), self.conan.user_io.out)
self.assertTrue(os.path.exists(reg_path3))
for name in list(files2.keys()):
self.assertTrue(os.path.exists(os.path.join(reg_path3, name)))
expected_sums = {'hello.cpp': '4f005274b2fdb25e6113b69774dac184',
'main.cpp': '0479f3c223c9a656a718f3148e044124',
'CMakeLists.txt': '52546396c42f16be3daf72ecf7ab7143',
'conanfile.py': 'ad17cf00b3142728b03ac37782b9acd9',
'executable': '68b329da9893e34099c7d8ad5cb9c940',
'helloHello0.h': '9448df034392fc8781a47dd03ae71bdd'}
self.assertEqual(expected_sums, digest3.file_sums)
# for f in file_list:
# self.assertFalse(os.path.exists(f))
def _create_packages_and_builds(self):
reg_builds = self.conan.paths.builds(self.conan_ref)
reg_packs = self.conan.paths.packages(self.conan_ref)
folders = [os.path.join(reg_builds, '342525g4f52f35f'),
os.path.join(reg_builds, 'ew9o8asdf908asdf80'),
os.path.join(reg_packs, '342525g4f52f35f'),
os.path.join(reg_packs, 'ew9o8asdf908asdf80')]
file_list = []
for f in folders:
for name, content in {'file1.h': 'asddfasdf', 'file1.dll': 'asddfasdf'}.items():
file_path = os.path.join(f, name)
save(file_path, content)
file_list.append(file_path)
return file_list
|
{
"content_hash": "2a5ac7d2b0291363e976857919217698",
"timestamp": "",
"source": "github",
"line_count": 422,
"max_line_length": 99,
"avg_line_length": 43.02132701421801,
"alnum_prop": 0.6109611677223905,
"repo_name": "birsoyo/conan",
"id": "f837466e7d88e73d96b0ac52376f8b4a75d3f906",
"size": "18155",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "conans/test/command/export_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1100"
},
{
"name": "Groovy",
"bytes": "6251"
},
{
"name": "Python",
"bytes": "3101477"
},
{
"name": "Shell",
"bytes": "1864"
}
],
"symlink_target": ""
}
|
import mock
from rally.benchmark.context import secgroup
from tests.unit import fakes
from tests.unit import test
class SecGroupContextTestCase(test.TestCase):
def setUp(self):
super(SecGroupContextTestCase, self).setUp()
self.users = 2
task = {"uuid": "foo_task_id"}
self.secgroup_name = secgroup.SSH_GROUP_NAME + "_foo"
self.ctx_with_secgroup = {
"users": [
{
"tenant_id": "uuid1",
"endpoint": "endpoint",
"secgroup": {"id": "secgroup_id", "name": "secgroup"}
}
] * self.users,
"admin": {"tenant_id": "uuid2", "endpoint": "admin_endpoint"},
"tenants": {"uuid1": {"id": "uuid1", "name": "uuid1"}},
"task": task
}
self.ctx_without_secgroup = {
"users": [{"tenant_id": "uuid1",
"endpoint": "endpoint"},
{"tenant_id": "uuid1",
"endpoint": "endpoint"}],
"admin": {"tenant_id": "uuid2", "endpoint": "admin_endpoint"},
"tenants": {"uuid1": {"id": "uuid1", "name": "uuid1"}},
"task": task
}
@mock.patch("rally.benchmark.context.secgroup.osclients.Clients")
def test__prepare_open_secgroup(self, mock_osclients):
fake_nova = fakes.FakeNovaClient()
self.assertEqual(len(fake_nova.security_groups.list()), 1)
mock_cl = mock.MagicMock()
mock_cl.nova.return_value = fake_nova
mock_osclients.return_value = mock_cl
ret = secgroup._prepare_open_secgroup("endpoint", self.secgroup_name)
self.assertEqual(self.secgroup_name, ret["name"])
self.assertEqual(2, len(fake_nova.security_groups.list()))
self.assertIn(
self.secgroup_name,
[sg.name for sg in fake_nova.security_groups.list()])
# run prep again, check that another security group is not created
secgroup._prepare_open_secgroup("endpoint", self.secgroup_name)
self.assertEqual(2, len(fake_nova.security_groups.list()))
@mock.patch("rally.benchmark.context.secgroup.osclients.Clients")
def test__prepare_open_secgroup_rules(self, mock_osclients):
fake_nova = fakes.FakeNovaClient()
# NOTE(hughsaunders) Default security group is precreated
self.assertEqual(1, len(fake_nova.security_groups.list()))
mock_cl = mock.MagicMock()
mock_cl.nova.return_value = fake_nova
mock_osclients.return_value = mock_cl
secgroup._prepare_open_secgroup("endpoint", self.secgroup_name)
self.assertEqual(2, len(fake_nova.security_groups.list()))
rally_open = fake_nova.security_groups.find(self.secgroup_name)
self.assertEqual(3, len(rally_open.rules))
# run prep again, check that extra rules are not created
secgroup._prepare_open_secgroup("endpoint", self.secgroup_name)
rally_open = fake_nova.security_groups.find(self.secgroup_name)
self.assertEqual(3, len(rally_open.rules))
@mock.patch("rally.benchmark.context.secgroup.osclients.Clients")
@mock.patch("rally.benchmark.context.secgroup._prepare_open_secgroup")
@mock.patch("rally.benchmark.wrappers.network.wrap")
def test_secgroup_setup_cleanup_with_secgroup_supported(
self, mock_network_wrap, mock_prepare_open_secgroup,
mock_osclients):
mock_network_wrapper = mock.MagicMock()
mock_network_wrapper.supports_security_group.return_value = (
True, "")
mock_network_wrap.return_value = mock_network_wrapper
mock_prepare_open_secgroup.return_value = {
"name": "secgroup",
"id": "secgroup_id"}
mock_osclients.return_value = mock.MagicMock()
secgrp_ctx = secgroup.AllowSSH(self.ctx_without_secgroup)
secgrp_ctx.setup()
self.assertEqual(self.ctx_with_secgroup, secgrp_ctx.context)
secgrp_ctx.cleanup()
self.assertEqual(
[
mock.call("admin_endpoint"),
mock.call("endpoint"),
mock.call().nova(),
mock.call().nova().security_groups.get("secgroup_id"),
mock.call().nova().security_groups.get().delete()
],
mock_osclients.mock_calls)
mock_network_wrap.assert_called_once_with(
mock_osclients.return_value, {})
@mock.patch("rally.benchmark.context.secgroup.osclients.Clients")
@mock.patch("rally.benchmark.wrappers.network.wrap")
def test_secgroup_setup_with_secgroup_unsupported(self,
mock_network_wrap,
mock_osclients):
mock_network_wrapper = mock.MagicMock()
mock_network_wrapper.supports_security_group.return_value = (
False, "Not supported")
mock_network_wrap.return_value = mock_network_wrapper
mock_osclients.return_value = mock.MagicMock()
secgrp_ctx = secgroup.AllowSSH(dict(self.ctx_without_secgroup))
secgrp_ctx.setup()
self.assertEqual(self.ctx_without_secgroup, secgrp_ctx.context)
mock_osclients.assert_called_once_with("admin_endpoint")
mock_network_wrap.assert_called_once_with(
mock_osclients.return_value, {})
|
{
"content_hash": "6b625729f587924f514821e55f3cb198",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 77,
"avg_line_length": 41.93023255813954,
"alnum_prop": 0.5986319097799963,
"repo_name": "pandeyop/rally",
"id": "34805b2f5a11bfbda82059048ce0ef58bf9fccc6",
"size": "6039",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/unit/benchmark/context/test_secgroup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "46741"
},
{
"name": "Python",
"bytes": "2053831"
},
{
"name": "Shell",
"bytes": "18078"
}
],
"symlink_target": ""
}
|
"""Tests the filesystem backend store"""
import __builtin__
import errno
import hashlib
import json
import mock
import os
import stat
import StringIO
import uuid
import fixtures
from oslo_utils import units
import six
from glance_store._drivers.filesystem import ChunkedFile
from glance_store._drivers.filesystem import Store
from glance_store import exceptions
from glance_store import location
from glance_store.tests import base
from tests.unit import test_store_capabilities
KB = 1024
class TestStore(base.StoreBaseTest,
test_store_capabilities.TestStoreCapabilitiesChecking):
def setUp(self):
"""Establish a clean test environment."""
super(TestStore, self).setUp()
self.orig_chunksize = Store.READ_CHUNKSIZE
Store.READ_CHUNKSIZE = 10
self.store = Store(self.conf)
self.config(filesystem_store_datadir=self.test_dir,
group="glance_store")
self.store.configure()
def tearDown(self):
"""Clear the test environment."""
super(TestStore, self).tearDown()
ChunkedFile.CHUNKSIZE = self.orig_chunksize
def _create_metadata_json_file(self, metadata):
expected_image_id = str(uuid.uuid4())
jsonfilename = os.path.join(self.test_dir,
"storage_metadata.%s" % expected_image_id)
self.config(filesystem_store_metadata_file=jsonfilename,
group="glance_store")
with open(jsonfilename, 'w') as fptr:
json.dump(metadata, fptr)
def _store_image(self, in_metadata):
expected_image_id = str(uuid.uuid4())
expected_file_size = 10
expected_file_contents = "*" * expected_file_size
image_file = StringIO.StringIO(expected_file_contents)
self.store.FILESYSTEM_STORE_METADATA = in_metadata
return self.store.add(expected_image_id, image_file,
expected_file_size)
def test_get(self):
"""Test a "normal" retrieval of an image in chunks."""
# First add an image...
image_id = str(uuid.uuid4())
file_contents = "chunk00000remainder"
image_file = StringIO.StringIO(file_contents)
loc, size, checksum, _ = self.store.add(image_id,
image_file,
len(file_contents))
# Now read it back...
uri = "file:///%s/%s" % (self.test_dir, image_id)
loc = location.get_location_from_uri(uri, conf=self.conf)
(image_file, image_size) = self.store.get(loc)
expected_data = "chunk00000remainder"
expected_num_chunks = 2
data = ""
num_chunks = 0
for chunk in image_file:
num_chunks += 1
data += chunk
self.assertEqual(expected_data, data)
self.assertEqual(expected_num_chunks, num_chunks)
def test_get_random_access(self):
"""Test a "normal" retrieval of an image in chunks."""
# First add an image...
image_id = str(uuid.uuid4())
file_contents = "chunk00000remainder"
image_file = StringIO.StringIO(file_contents)
loc, size, checksum, _ = self.store.add(image_id,
image_file,
len(file_contents))
# Now read it back...
uri = "file:///%s/%s" % (self.test_dir, image_id)
loc = location.get_location_from_uri(uri, conf=self.conf)
data = ""
for offset in range(len(file_contents)):
(image_file, image_size) = self.store.get(loc,
offset=offset,
chunk_size=1)
for chunk in image_file:
data += chunk
self.assertEqual(data, file_contents)
data = ""
chunk_size = 5
(image_file, image_size) = self.store.get(loc,
offset=chunk_size,
chunk_size=chunk_size)
for chunk in image_file:
data += chunk
self.assertEqual(data, '00000')
self.assertEqual(image_size, chunk_size)
def test_get_non_existing(self):
"""
Test that trying to retrieve a file that doesn't exist
raises an error
"""
loc = location.get_location_from_uri(
"file:///%s/non-existing" % self.test_dir, conf=self.conf)
self.assertRaises(exceptions.NotFound,
self.store.get,
loc)
def test_add(self):
"""Test that we can add an image via the filesystem backend."""
ChunkedFile.CHUNKSIZE = 1024
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * KB # 5K
expected_file_contents = "*" * expected_file_size
expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (self.test_dir,
expected_image_id)
image_file = StringIO.StringIO(expected_file_contents)
loc, size, checksum, _ = self.store.add(expected_image_id,
image_file,
expected_file_size)
self.assertEqual(expected_location, loc)
self.assertEqual(expected_file_size, size)
self.assertEqual(expected_checksum, checksum)
uri = "file:///%s/%s" % (self.test_dir, expected_image_id)
loc = location.get_location_from_uri(uri, conf=self.conf)
(new_image_file, new_image_size) = self.store.get(loc)
new_image_contents = ""
new_image_file_size = 0
for chunk in new_image_file:
new_image_file_size += len(chunk)
new_image_contents += chunk
self.assertEqual(expected_file_contents, new_image_contents)
self.assertEqual(expected_file_size, new_image_file_size)
def test_add_check_metadata_with_invalid_mountpoint_location(self):
in_metadata = [{'id': 'abcdefg',
'mountpoint': '/xyz/images'}]
location, size, checksum, metadata = self._store_image(in_metadata)
self.assertEqual({}, metadata)
def test_add_check_metadata_list_with_invalid_mountpoint_locations(self):
in_metadata = [{'id': 'abcdefg', 'mountpoint': '/xyz/images'},
{'id': 'xyz1234', 'mountpoint': '/pqr/images'}]
location, size, checksum, metadata = self._store_image(in_metadata)
self.assertEqual({}, metadata)
def test_add_check_metadata_list_with_valid_mountpoint_locations(self):
in_metadata = [{'id': 'abcdefg', 'mountpoint': '/tmp'},
{'id': 'xyz1234', 'mountpoint': '/xyz'}]
location, size, checksum, metadata = self._store_image(in_metadata)
self.assertEqual(in_metadata[0], metadata)
def test_add_check_metadata_bad_nosuch_file(self):
expected_image_id = str(uuid.uuid4())
jsonfilename = os.path.join(self.test_dir,
"storage_metadata.%s" % expected_image_id)
self.config(filesystem_store_metadata_file=jsonfilename,
group="glance_store")
expected_file_size = 10
expected_file_contents = "*" * expected_file_size
image_file = StringIO.StringIO(expected_file_contents)
location, size, checksum, metadata = self.store.add(expected_image_id,
image_file,
expected_file_size)
self.assertEqual(metadata, {})
def test_add_already_existing(self):
"""
Tests that adding an image with an existing identifier
raises an appropriate exception
"""
ChunkedFile.CHUNKSIZE = 1024
image_id = str(uuid.uuid4())
file_size = 5 * KB # 5K
file_contents = "*" * file_size
image_file = StringIO.StringIO(file_contents)
location, size, checksum, _ = self.store.add(image_id,
image_file,
file_size)
image_file = StringIO.StringIO("nevergonnamakeit")
self.assertRaises(exceptions.Duplicate,
self.store.add,
image_id, image_file, 0)
def _do_test_add_write_failure(self, errno, exception):
ChunkedFile.CHUNKSIZE = 1024
image_id = str(uuid.uuid4())
file_size = 5 * KB # 5K
file_contents = "*" * file_size
path = os.path.join(self.test_dir, image_id)
image_file = StringIO.StringIO(file_contents)
with mock.patch.object(__builtin__, 'open') as popen:
e = IOError()
e.errno = errno
popen.side_effect = e
self.assertRaises(exception,
self.store.add,
image_id, image_file, 0)
self.assertFalse(os.path.exists(path))
def test_add_storage_full(self):
"""
Tests that adding an image without enough space on disk
raises an appropriate exception
"""
self._do_test_add_write_failure(errno.ENOSPC, exceptions.StorageFull)
def test_add_file_too_big(self):
"""
Tests that adding an excessively large image file
raises an appropriate exception
"""
self._do_test_add_write_failure(errno.EFBIG, exceptions.StorageFull)
def test_add_storage_write_denied(self):
"""
Tests that adding an image with insufficient filestore permissions
raises an appropriate exception
"""
self._do_test_add_write_failure(errno.EACCES,
exceptions.StorageWriteDenied)
def test_add_other_failure(self):
"""
Tests that a non-space-related IOError does not raise a
StorageFull exceptions.
"""
self._do_test_add_write_failure(errno.ENOTDIR, IOError)
def test_add_cleanup_on_read_failure(self):
"""
Tests the partial image file is cleaned up after a read
failure.
"""
ChunkedFile.CHUNKSIZE = 1024
image_id = str(uuid.uuid4())
file_size = 5 * KB # 5K
file_contents = "*" * file_size
path = os.path.join(self.test_dir, image_id)
image_file = StringIO.StringIO(file_contents)
def fake_Error(size):
raise AttributeError()
with mock.patch.object(image_file, 'read') as mock_read:
mock_read.side_effect = fake_Error
self.assertRaises(AttributeError,
self.store.add,
image_id, image_file, 0)
self.assertFalse(os.path.exists(path))
def test_delete(self):
"""
Test we can delete an existing image in the filesystem store
"""
# First add an image
image_id = str(uuid.uuid4())
file_size = 5 * KB # 5K
file_contents = "*" * file_size
image_file = StringIO.StringIO(file_contents)
loc, size, checksum, _ = self.store.add(image_id,
image_file,
file_size)
# Now check that we can delete it
uri = "file:///%s/%s" % (self.test_dir, image_id)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.store.delete(loc)
self.assertRaises(exceptions.NotFound, self.store.get, loc)
def test_delete_non_existing(self):
"""
Test that trying to delete a file that doesn't exist
raises an error
"""
loc = location.get_location_from_uri(
"file:///tmp/glance-tests/non-existing", conf=self.conf)
self.assertRaises(exceptions.NotFound,
self.store.delete,
loc)
def test_configure_add_with_multi_datadirs(self):
"""
Tests multiple filesystem specified by filesystem_store_datadirs
are parsed correctly.
"""
store_map = [self.useFixture(fixtures.TempDir()).path,
self.useFixture(fixtures.TempDir()).path]
self.conf.clear_override('filesystem_store_datadir',
group='glance_store')
self.conf.set_override('filesystem_store_datadirs',
[store_map[0] + ":100",
store_map[1] + ":200"],
group='glance_store')
self.store.configure_add()
expected_priority_map = {100: [store_map[0]], 200: [store_map[1]]}
expected_priority_list = [200, 100]
self.assertEqual(self.store.priority_data_map, expected_priority_map)
self.assertEqual(self.store.priority_list, expected_priority_list)
def test_configure_add_with_metadata_file_success(self):
metadata = {'id': 'asdf1234',
'mountpoint': '/tmp'}
self._create_metadata_json_file(metadata)
self.store.configure_add()
self.assertEqual([metadata], self.store.FILESYSTEM_STORE_METADATA)
def test_configure_add_check_metadata_list_of_dicts_success(self):
metadata = [{'id': 'abcdefg', 'mountpoint': '/xyz/images'},
{'id': 'xyz1234', 'mountpoint': '/tmp/'}]
self._create_metadata_json_file(metadata)
self.store.configure_add()
self.assertEqual(metadata, self.store.FILESYSTEM_STORE_METADATA)
def test_configure_add_check_metadata_success_list_val_for_some_key(self):
metadata = {'akey': ['value1', 'value2'], 'id': 'asdf1234',
'mountpoint': '/tmp'}
self._create_metadata_json_file(metadata)
self.store.configure_add()
self.assertEqual([metadata], self.store.FILESYSTEM_STORE_METADATA)
def test_configure_add_check_metadata_bad_data(self):
metadata = {'akey': 10, 'id': 'asdf1234',
'mountpoint': '/tmp'} # only unicode is allowed
self._create_metadata_json_file(metadata)
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.configure_add)
def test_configure_add_check_metadata_with_no_id_or_mountpoint(self):
metadata = {'mountpoint': '/tmp'}
self._create_metadata_json_file(metadata)
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.configure_add)
metadata = {'id': 'asdfg1234'}
self._create_metadata_json_file(metadata)
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.configure_add)
def test_configure_add_check_metadata_id_or_mountpoint_is_not_string(self):
metadata = {'id': 10, 'mountpoint': '/tmp'}
self._create_metadata_json_file(metadata)
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.configure_add)
metadata = {'id': 'asdf1234', 'mountpoint': 12345}
self._create_metadata_json_file(metadata)
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.configure_add)
def test_configure_add_check_metadata_list_with_no_id_or_mountpoint(self):
metadata = [{'id': 'abcdefg', 'mountpoint': '/xyz/images'},
{'mountpoint': '/pqr/images'}]
self._create_metadata_json_file(metadata)
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.configure_add)
metadata = [{'id': 'abcdefg'},
{'id': 'xyz1234', 'mountpoint': '/pqr/images'}]
self._create_metadata_json_file(metadata)
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.configure_add)
def test_add_check_metadata_list_id_or_mountpoint_is_not_string(self):
metadata = [{'id': 'abcdefg', 'mountpoint': '/xyz/images'},
{'id': 1234, 'mountpoint': '/pqr/images'}]
self._create_metadata_json_file(metadata)
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.configure_add)
metadata = [{'id': 'abcdefg', 'mountpoint': 1234},
{'id': 'xyz1234', 'mountpoint': '/pqr/images'}]
self._create_metadata_json_file(metadata)
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.configure_add)
def test_configure_add_same_dir_multiple_times(self):
"""
Tests BadStoreConfiguration exception is raised if same directory
is specified multiple times in filesystem_store_datadirs.
"""
store_map = [self.useFixture(fixtures.TempDir()).path,
self.useFixture(fixtures.TempDir()).path]
self.conf.clear_override('filesystem_store_datadir',
group='glance_store')
self.conf.set_override('filesystem_store_datadirs',
[store_map[0] + ":100",
store_map[1] + ":200",
store_map[0] + ":300"],
group='glance_store')
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.configure_add)
def test_add_with_multiple_dirs(self):
"""Test adding multiple filesystem directories."""
store_map = [self.useFixture(fixtures.TempDir()).path,
self.useFixture(fixtures.TempDir()).path]
self.conf.clear_override('filesystem_store_datadir',
group='glance_store')
self.conf.set_override('filesystem_store_datadirs',
[store_map[0] + ":100",
store_map[1] + ":200"],
group='glance_store')
self.store.configure()
"""Test that we can add an image via the filesystem backend"""
ChunkedFile.CHUNKSIZE = 1024
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = "*" * expected_file_size
expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (store_map[1],
expected_image_id)
image_file = six.StringIO(expected_file_contents)
loc, size, checksum, _ = self.store.add(expected_image_id,
image_file,
expected_file_size)
self.assertEqual(expected_location, loc)
self.assertEqual(expected_file_size, size)
self.assertEqual(expected_checksum, checksum)
loc = location.get_location_from_uri(expected_location,
conf=self.conf)
(new_image_file, new_image_size) = self.store.get(loc)
new_image_contents = ""
new_image_file_size = 0
for chunk in new_image_file:
new_image_file_size += len(chunk)
new_image_contents += chunk
self.assertEqual(expected_file_contents, new_image_contents)
self.assertEqual(expected_file_size, new_image_file_size)
def test_add_with_multiple_dirs_storage_full(self):
"""
Test StorageFull exception is raised if no filesystem directory
is found that can store an image.
"""
store_map = [self.useFixture(fixtures.TempDir()).path,
self.useFixture(fixtures.TempDir()).path]
self.conf.clear_override('filesystem_store_datadir',
group='glance_store')
self.conf.set_override('filesystem_store_datadirs',
[store_map[0] + ":100",
store_map[1] + ":200"],
group='glance_store')
self.store.configure_add()
def fake_get_capacity_info(mount_point):
return 0
with mock.patch.object(self.store, '_get_capacity_info') as capacity:
capacity.return_value = 0
ChunkedFile.CHUNKSIZE = 1024
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = "*" * expected_file_size
image_file = six.StringIO(expected_file_contents)
self.assertRaises(exceptions.StorageFull, self.store.add,
expected_image_id, image_file,
expected_file_size)
def test_configure_add_with_file_perm(self):
"""
Tests filesystem specified by filesystem_store_file_perm
are parsed correctly.
"""
store = self.useFixture(fixtures.TempDir()).path
self.conf.set_override('filesystem_store_datadir', store,
group='glance_store')
self.conf.set_override('filesystem_store_file_perm', 700, # -rwx------
group='glance_store')
self.store.configure_add()
self.assertEqual(self.store.datadir, store)
def test_configure_add_with_unaccessible_file_perm(self):
"""
Tests BadStoreConfiguration exception is raised if an invalid
file permission specified in filesystem_store_file_perm.
"""
store = self.useFixture(fixtures.TempDir()).path
self.conf.set_override('filesystem_store_datadir', store,
group='glance_store')
self.conf.set_override('filesystem_store_file_perm', 7, # -------rwx
group='glance_store')
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.configure_add)
def test_add_with_file_perm_for_group_other_users_access(self):
"""
Test that we can add an image via the filesystem backend with a
required image file permission.
"""
store = self.useFixture(fixtures.TempDir()).path
self.conf.set_override('filesystem_store_datadir', store,
group='glance_store')
self.conf.set_override('filesystem_store_file_perm', 744, # -rwxr--r--
group='glance_store')
# -rwx------
os.chmod(store, 0o700)
self.assertEqual(0o700, stat.S_IMODE(os.stat(store)[stat.ST_MODE]))
self.store.configure_add()
Store.WRITE_CHUNKSIZE = 1024
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = "*" * expected_file_size
expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (store,
expected_image_id)
image_file = six.StringIO(expected_file_contents)
location, size, checksum, _ = self.store.add(expected_image_id,
image_file,
expected_file_size)
self.assertEqual(expected_location, location)
self.assertEqual(expected_file_size, size)
self.assertEqual(expected_checksum, checksum)
# -rwx--x--x for store directory
self.assertEqual(0o711, stat.S_IMODE(os.stat(store)[stat.ST_MODE]))
# -rwxr--r-- for image file
mode = os.stat(expected_location[len('file:/'):])[stat.ST_MODE]
perm = int(str(self.conf.glance_store.filesystem_store_file_perm), 8)
self.assertEqual(perm, stat.S_IMODE(mode))
def test_add_with_file_perm_for_owner_users_access(self):
"""
Test that we can add an image via the filesystem backend with a
required image file permission.
"""
store = self.useFixture(fixtures.TempDir()).path
self.conf.set_override('filesystem_store_datadir', store,
group='glance_store')
self.conf.set_override('filesystem_store_file_perm', 600, # -rw-------
group='glance_store')
# -rwx------
os.chmod(store, 0o700)
self.assertEqual(0o700, stat.S_IMODE(os.stat(store)[stat.ST_MODE]))
self.store.configure_add()
Store.WRITE_CHUNKSIZE = 1024
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = "*" * expected_file_size
expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (store,
expected_image_id)
image_file = six.StringIO(expected_file_contents)
location, size, checksum, _ = self.store.add(expected_image_id,
image_file,
expected_file_size)
self.assertEqual(expected_location, location)
self.assertEqual(expected_file_size, size)
self.assertEqual(expected_checksum, checksum)
# -rwx------ for store directory
self.assertEqual(0o700, stat.S_IMODE(os.stat(store)[stat.ST_MODE]))
# -rw------- for image file
mode = os.stat(expected_location[len('file:/'):])[stat.ST_MODE]
perm = int(str(self.conf.glance_store.filesystem_store_file_perm), 8)
self.assertEqual(perm, stat.S_IMODE(mode))
|
{
"content_hash": "8ca6b7ab7f0938e896f5330633822393",
"timestamp": "",
"source": "github",
"line_count": 622,
"max_line_length": 79,
"avg_line_length": 41.58038585209003,
"alnum_prop": 0.5597958473494954,
"repo_name": "hmakkapati/glance_store",
"id": "f237a6057560b37dcf04851e8f4ae1643695d23d",
"size": "26499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unit/test_filesystem_store.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "995"
},
{
"name": "Python",
"bytes": "416221"
},
{
"name": "Shell",
"bytes": "6514"
}
],
"symlink_target": ""
}
|
import os
from contextlib import contextmanager
from copy import copy
import logging
from peyutil import (assure_dir_exists,
read_as_json,
write_as_json, )
from .ott_schema import HEADER_TO_LINE_PARSER
from .taxon import Taxon
from .tree import TaxonForest
from .util import unlink, OutFile
INP_TAXONOMY_DIRNAME = '__inputs__'
OUTP_TAXONOMY_DIRNAME = '__outputs__'
MISC_DIRNAME = '__misc__'
GEN_MAPPING_FILENAME = '__mapping__.json'
ROOTS_FILENAME = '__roots__.json'
TAXONOMY_FN = 'taxonomy.tsv'
SYNONYMS_FN = 'synonyms.tsv'
ACCUM_DES_FILENAME = '__accum_des__.json'
_LOG = logging.getLogger(__name__)
def get_taxonomies_for_dir(taxon_dir):
inps_dir = os.path.join(taxon_dir, INP_TAXONOMY_DIRNAME)
resource_ids = []
for name in os.listdir(inps_dir):
fp = os.path.join(inps_dir, name)
if os.path.isdir(fp) and os.path.isfile(os.path.join(fp, ROOTS_FILENAME)):
resource_ids.append(name)
return resource_ids
def get_roots_for_subset(tax_dir, misc_tax_dir):
return _read_json_and_coerce_to_otttaxon(tax_dir, misc_tax_dir, ROOTS_FILENAME)
def get_accum_des_for_subset(tax_dir, misc_tax_dir):
return _read_json_and_coerce_to_otttaxon(tax_dir, misc_tax_dir, ACCUM_DES_FILENAME)
def _read_json_and_coerce_to_otttaxon(tax_dir, misc_tax_dir, fn):
r = {}
for td in [tax_dir, misc_tax_dir]:
rf = os.path.join(td, fn)
if os.path.exists(rf):
rd = read_as_json(rf)
for k, v in rd.items():
try:
k = int(k)
except:
pass
r[k] = Taxon(d=v)
return r
# noinspection PyProtectedMember
class TaxonomySliceCache(object):
def __init__(self):
self._ck_to_obj = {}
def get(self, key):
assert isinstance(key, tuple) and len(key) == 3
return self._ck_to_obj.get(key)
def __setitem__(self, key, vttrs):
assert isinstance(key, tuple) and len(key) == 3
old_val = self._ck_to_obj.get(key)
if old_val is not None and old_val is not vttrs:
assert False, 'should not be creating a new object for a cached taxdi!'
self._ck_to_obj[key] = vttrs
def __getitem__(self, ck):
return self._ck_to_obj[ck]
def __delitem__(self, ck):
obj = self._ck_to_obj.get(ck)
if obj:
del self._ck_to_obj[ck]
obj._flush()
def try_del(self, ck):
try:
if ck in self._ck_to_obj:
self.__delitem__(ck)
except:
_LOG.exception("caught and suppressed removal of key")
if ck in self._ck_to_obj:
del self._ck_to_obj[ck]
def flush(self):
kv = [(k, v) for k, v in self._ck_to_obj.items()]
self._ck_to_obj = {}
_ex = None
for k, v in kv:
try:
v._flush()
except Exception as x:
_LOG.exception('exception in flushing')
_ex = x
if _ex is not None:
raise _ex
# noinspection PyMethodMayBeStatic
def get_taxon_partition(self, res, fragment):
return get_taxon_partition(res, fragment)
def clear_without_flush(self, ck):
if ck in self._ck_to_obj:
del self._ck_to_obj[ck]
TAX_SLICE_CACHE = TaxonomySliceCache()
@contextmanager
def use_tax_partitions():
yield TAX_SLICE_CACHE
TAX_SLICE_CACHE.flush()
class PartitionedTaxDirBase(object):
def __init__(self, res, fragment):
self.fragment = fragment
self.res = res
self.src_id = res.id
self.scaffold_dir = os.path.join(res.partitioned_filepath, fragment)
self.tax_fp_unpartitioned = res.get_taxon_filepath_for_part(fragment)
self.tax_fp_misc = res.get_misc_taxon_filepath_for_part(fragment)
self.tax_dir_unpartitioned = res.get_taxon_dir_for_part(fragment)
self.tax_dir_misc = res.get_misc_taxon_dir_for_part(fragment)
self.synonyms_filename = self.res.synonyms_filename
self.cache_key = (self.__class__, self.src_id, self.fragment)
assert TAX_SLICE_CACHE.get(self.cache_key) is None
TAX_SLICE_CACHE[self.cache_key] = self
@property
def input_taxdir(self):
return os.path.split(self.tax_fp)[0]
@property
def input_synonyms_filepath(self):
if self.synonyms_filename:
return os.path.join(self.input_taxdir, self.synonyms_filename)
return None
@property
def output_synonyms_filepath(self):
if not self.synonyms_filename:
return None
pd = self.tax_dir_misc if self._subdirname_to_tp_roots else self.tax_dir_unpartitioned
return os.path.join(pd, self.synonyms_filename)
def scaffold_tax_subdir_names(self):
"""Returns a list of subdirectory names for self.scaffold_dir with __misc__ suppressed"""
if not os.path.isdir(self.scaffold_dir):
return []
n = []
for x in os.listdir(self.scaffold_dir):
if x == INP_TAXONOMY_DIRNAME or x == MISC_DIRNAME:
continue
if os.path.isdir(os.path.join(self.scaffold_dir, x)):
n.append(x)
return n
class Synonym(object):
def __init__(self, valid_tax_id, name, syn_type=None, syn_id=None):
if syn_type is None:
syn_type = 'synonym'
self.valid_tax_id = valid_tax_id
self.name = name
self.syn_type = syn_type
self.syn_id = syn_id
def to_serializable_dict(self):
d = {'valid_tax_id': self.valid_tax_id,
'name': self.name
}
sis = ', syn_id={}'.format(self.syn_id)
if self.syn_id:
d['synonym_id'] = self.syn_id
if self.syn_type != 'synonym':
d['synonym'] = self.syn_type
return d
def __repr__(self):
sis = ', syn_id={}'.format(self.syn_id) if self.syn_id else ''
tis = ', syn_type={}'.format(repr(self.syn_type)) if self.syn_type != 'synonym' else ''
m = 'Synonym({}, {}{}{})'
return m.format(self.valid_tax_id, repr(self.name), tis, sis)
def __str__(self):
return self.__repr__()
def __hash__(self):
return hash((self.syn_id, self.valid_tax_id, self.name, self.syn_type))
def __eq__(self, other):
if isinstance(other, Synonym):
return self.valid_tax_id == other.valid_tax_id \
and self.name == other.name \
and self.syn_type == other.syn_type \
and self.syn_id == self.syn_id
_VALID_SYN_TYPES = {'acronym', 'ambiguous synonym',
'authority', 'basionym',
'blast name',
'common name',
'equivalent name',
'genbank common name',
'genbank synonym',
'homotypic synonym', 'heterotypic synonym',
'includes', 'misapplied name', 'invalid',
'misnomer',
'misspelling',
'orthographia',
'proparte synonym',
'synonym',
'type material', 'unavailable',
}
IGNORE_SYN_TYPES = {'acronym',
'authority',
'blast name',
'common name',
'genbank common name',
'genbank synonym',
'type material',
}
IGNORE_COMMON_NAME_SYN_TYPES = {'common name', 'genbank common name', }
class SynonymInterpreter(object):
def __init__(self, header):
if header.endswith('\n'):
header = header[:-1]
self.fields = [i.strip() for i in header.split('\t|\t') if i.strip()]
assert 'uid' in self.fields
self._uid_ind = self.fields.index('uid')
self._name_ind = self.fields.index('name')
self._type_ind = self.fields.index('type')
def interpret(self, uid, syn_id_line_tuple):
syn_id, line = syn_id_line_tuple
sl = line.split('\t|\t')
suid = sl[self._uid_ind]
name = sl[self._name_ind].strip()
syn_type = sl[self._type_ind].strip().lower()
if syn_type not in _VALID_SYN_TYPES:
m = 'synonym_type "{}" not recognized in for ({}, "{}")'
raise ValueError(m.format(syn_type, uid, name))
assert uid == int(suid)
return Synonym(valid_tax_id=uid, name=name, syn_type=syn_type, syn_id=syn_id)
# noinspection PyProtectedMember
class LightTaxonomyHolder(object):
_DATT = ['_des_in_other_slices',
'_id_order',
'_id_to_child_set',
'_id_to_el',
'_id_to_line',
'_syn_by_id',
'_roots',
]
def __init__(self, fragment):
self.fragment = fragment
self._id_order = []
self._id_to_line = {} # id -> line
self._id_to_child_set = {} # id -> set of child IDs
self._id_to_el = {}
self._roots = {}
self._des_in_other_slices = {}
self._syn_by_id = {} # accepted_id -> list of synonym lines
self._parsed_syn_by_id = None
self.taxon_header = None
self.syn_header = None
self.treat_syn_as_taxa = False
self._populated = False
self._has_unread_tax_inp = False
self._has_moved_taxa = False # true when taxa have been moved to another partition
@property
def synonyms_by_id(self):
return copy(self._syn_by_id)
def parsed_synonyms_by_id(self, ignored_syn_types=None):
if self._parsed_syn_by_id is None:
p = {}
if self._syn_by_id:
si = SynonymInterpreter(self.syn_header)
for uid, line_stub_list in self._syn_by_id.items():
ps = set()
for i in line_stub_list:
syn = si.interpret(uid, i)
st = syn.syn_type
if ignored_syn_types is None or st not in ignored_syn_types:
ps.add(syn)
if ps:
p[uid] = ps
self._parsed_syn_by_id = p
return copy(self._parsed_syn_by_id)
def _del_data(self):
for el in LightTaxonomyHolder._DATT:
setattr(self, el, None)
self._populated = False
def add_taxon(self, uid, par_id, line):
old = self._id_to_line.get(uid)
assert old is None or old == line
self._id_to_line[uid] = line
self._id_order.append(uid)
self._id_to_child_set.setdefault(par_id, set()).add(uid)
add_taxon_from_higher_tax_part = add_taxon
def contained_ids(self):
c = set()
if self._id_to_child_set:
c.update(self._id_to_child_set.keys())
if self._id_to_line:
c.update(self._id_to_line.keys())
return c
def _add_root(self, uid, taxon):
self._roots[uid] = taxon.to_serializable_dict()
def line_to_taxon(self, line=None, uid=None):
if line is None:
line = self._id_to_line[uid]
return Taxon(line, line_parser=HEADER_TO_LINE_PARSER[self.taxon_header])
def _transfer_line(self, uid, dest_part,
as_root=False): # type (int, LightTaxonomyHolder, bool) -> None
line = self._id_to_line[uid]
taxon = self.line_to_taxon(line)
if as_root:
dest_part._add_root(uid, taxon)
d = taxon.to_serializable_dict()
d['fragment'] = dest_part.fragment
self._des_in_other_slices[uid] = d
dest_part._id_to_line[uid] = line
del self._id_to_line[uid]
def _transfer_subtree(self, par_id, dest_part,
as_root=False): # type (int, LightTaxonomyHolder) -> None
self._has_moved_taxa = True
taxon = self.line_to_taxon(uid=par_id)
if as_root:
dest_part._add_root(par_id, taxon)
d = taxon.to_serializable_dict()
d['fragment'] = dest_part.fragment
self._des_in_other_slices[par_id] = d
self._transfer_subtree_rec(par_id, dest_part)
def _transfer_subtree_rec(self, par_id, dest_part): # type (int, LightTaxonomyHolder) -> None
assert self is not dest_part
assert self.fragment != dest_part.fragment
child_set = self._id_to_child_set[par_id]
self._id_to_el[par_id] = dest_part
line = self._id_to_line.get(par_id)
if line is not None:
dest_part._id_to_line[par_id] = line
del self._id_to_line[par_id]
del self._id_to_child_set[par_id]
dest_part._id_to_child_set.setdefault(par_id, set()).update(child_set)
for child_id in child_set:
self._id_to_el[child_id] = dest_part
if child_id in self._id_to_child_set:
self._transfer_subtree_rec(child_id, dest_part)
else:
line = self._id_to_line.get(child_id)
if line:
dest_part.add_taxon(child_id, par_id, line)
del self._id_to_line[child_id]
def move_matched_synonyms(self, dest_tax_part): # type: (PartitioningLightTaxHolder) -> None
sk = set(self._syn_by_id.keys())
sk.intersection_update(dest_tax_part.contained_ids())
for s in sk:
sd = self._syn_by_id[s]
for pair in sd:
dest_tax_part.add_synonym(s, pair[0], pair[1])
del self._syn_by_id[s]
def move_from_self_to_new_part(self, other): # type: (PartitioningLightTaxHolder) -> None
self._has_moved_taxa = True
if other._has_unread_tax_inp:
other._read_inputs(False)
cids = set(self._id_to_child_set.keys())
lth_frag_to_root_id_set = {}
for root_id, dest_tax_part in other._root_to_lth.items():
lth_frag_to_root_id_set.setdefault(dest_tax_part.fragment, set()).add(root_id)
for dest_tax_part in other._root_to_lth.values():
tpids = set(dest_tax_part.contained_ids())
tpids.update(lth_frag_to_root_id_set[dest_tax_part.fragment])
common = cids.intersection(tpids)
if common:
if dest_tax_part._has_unread_tax_inp:
dest_tax_part._read_inputs(False)
for com_id in common:
if com_id in self._id_to_child_set:
m = "Transferring {} from {} to {}"
_LOG.info(m.format(com_id, self.fragment, dest_tax_part.fragment))
self._transfer_subtree(com_id, dest_tax_part)
self.move_matched_synonyms(dest_tax_part)
dest_tax_part._populated = True
cids = set(self._id_to_child_set.keys())
def add_synonym(self, accept_id, syn_id, line):
if self.treat_syn_as_taxa:
# CoL uses the taxonomy file for synonyms.
assert syn_id is not None
self.add_taxon(syn_id, None, line)
else:
self._syn_by_id.setdefault(accept_id, []).append((syn_id, line))
# noinspection PyProtectedMember
class PartitioningLightTaxHolder(LightTaxonomyHolder):
def __init__(self, fragment):
ls = fragment.split('/')
if len(ls) > 1:
assert ls[-2] != ls[-1]
LightTaxonomyHolder.__init__(self, fragment)
self._subdirname_to_tp_roots = {}
self._misc_part = LightTaxonomyHolder(os.path.join(fragment, MISC_DIRNAME))
self._roots_for_sub = set()
self._root_to_lth = {}
self._during_parse_root_to_par = {}
def read_taxon_line(self, uid, par_id, line):
if par_id:
try:
par_id = int(par_id)
except:
pass
self._id_to_child_set.setdefault(par_id, set()).add(uid)
if uid in self._id_to_line:
raise ValueError("Repeated uid {} in line {}".format(uid, line))
self._id_to_line[uid] = line
if uid in self._roots_for_sub:
self._during_parse_root_to_par[uid] = par_id
def sub_tax_parts(self, include_misc=True):
ret = [i for i in self._root_to_lth.values()]
if include_misc:
# noinspection PyTypeChecker
ret.append(self._misc_part)
return ret
def _finish_partition_after_parse(self):
"""On entry _id_to_el will be set for the root elements (and some of their
children), but taxa processed before their ancestors may have been missed.
_id_to_child_list and _id_to_line are only filled for these
"""
for tp in self.sub_tax_parts(include_misc=False):
if tp._has_unread_tax_inp:
tp._read_inputs(False)
for uid, par_id in self._during_parse_root_to_par.items():
match_el = self._root_to_lth[uid]
if uid in self._id_to_child_set:
self._transfer_subtree(uid, match_el, as_root=True)
elif uid in self._id_to_line:
self._transfer_line(uid, match_el, as_root=True)
pc = self._id_to_child_set.get(par_id)
if pc:
pc.remove(uid)
self._id_to_el[uid] = match_el
assert not self._misc_part._id_to_child_set
assert not self._misc_part._id_to_line
assert not self._misc_part._id_to_el
# Move all data to misc, but make a copy of th id to el that we'l
self._move_data_to_empty_misc()
# _partition_synonyms that have now moved to the misc part
to_del = set()
for accept_id, i_l_list in self._misc_part._syn_by_id.items():
match_el = self._misc_part._id_to_el.get(accept_id)
if match_el is not None:
for syn_id, line in i_l_list:
match_el.add_synonym(accept_id, syn_id, line)
to_del.add(accept_id)
for i in to_del:
del self._misc_part._syn_by_id[i]
def _read_inputs(self, do_part_if_reading=True):
raise NotImplementedError("_read_input pure virtual in PartitioningLightTaxHolder")
def move_from_misc_to_new_part(self, other):
self._has_moved_taxa = True
if not self._populated:
self._read_inputs()
if not self._misc_part._populated:
self._move_data_to_empty_misc()
return self._misc_part.move_from_self_to_new_part(other)
def _move_data_to_empty_misc(self):
assert not self._misc_part._populated
m = self._misc_part
for a in LightTaxonomyHolder._DATT:
setattr(m, a, getattr(self, a))
setattr(self, a, None)
self._copy_shared_fields(m)
m._populated = True
def _copy_shared_fields(self, other):
other.taxon_header = self.taxon_header
other.syn_header = self.syn_header
other.treat_syn_as_taxa = self.treat_syn_as_taxa
# noinspection PyProtectedMember
class TaxonPartition(PartitionedTaxDirBase, PartitioningLightTaxHolder):
def __init__(self, res, fragment):
PartitioningLightTaxHolder.__init__(self, fragment)
PartitionedTaxDirBase.__init__(self, res, fragment)
self.treat_syn_as_taxa = self.synonyms_filename is None
self._read_from_fs = False
self._read_from_partitioning_scratch = False
self._read_from_misc = False
self._fs_is_partitioned = None
self._has_flushed = False
self._external_inp_fp = None
@property
def write_taxon_header(self):
from taxalotl.ott_schema import INP_FLAGGED_OTT_TAXONOMY_HEADER, FULL_OTT_HEADER
from taxalotl.parsing.ott import OTTaxonomyWrapper
return FULL_OTT_HEADER if isinstance(self.res, OTTaxonomyWrapper) else INP_FLAGGED_OTT_TAXONOMY_HEADER
@property
def external_input_fp(self):
return self._external_inp_fp
@external_input_fp.setter
def external_input_fp(self, value):
self._external_inp_fp = value
def _diagnose_state_of_fs(self):
if os.path.exists(self.tax_fp_misc):
self._fs_is_partitioned = True
elif os.path.exists(self.tax_fp_unpartitioned):
self._fs_is_partitioned = False
else:
self._fs_is_unpartitioned = None
def taxa_files_exist_for_a_frag(self, frag):
if os.path.exists(self.res.get_taxon_filepath_for_part(frag)):
return True
return os.path.exists(self.res.get_misc_taxon_dir_for_part(frag))
def do_partition(self, list_of_subdirname_and_roots):
if self._subdirname_to_tp_roots:
raise ValueError("do_partition called twice for {}".format(self.fragment))
if not self._populated:
self._diagnose_state_of_fs()
if (self._fs_is_partitioned is None) and (not self._external_inp_fp):
m = "Taxa files not found for {} and TaxonPartition is empty"
_LOG.info(m.format(self.fragment))
cur_sub_names = self.scaffold_tax_subdir_names()
do_part_if_reading = True
having_inp_to_read = set()
if cur_sub_names:
req_fulfilled = True
some_part_found = False
for x in list_of_subdirname_and_roots:
subname = x[0]
if subname in cur_sub_names:
subfrag = os.path.join(self.fragment, subname)
if self.taxa_files_exist_for_a_frag(subfrag):
_LOG.info("previous content for {}".format(subfrag))
some_part_found = True
having_inp_to_read.add(subname)
else:
_LOG.warning("no previous taxonomic content for {}".format(subfrag))
req_fulfilled = False
else:
_LOG.warning("no previous subdir for {}".format(subname))
req_fulfilled = False
if some_part_found:
do_part_if_reading = False
quant = 'All' if req_fulfilled else 'Some'
m = "{} subdir partitions found for {}. No more partitioning will be done!"
_LOG.warning(m.format(quant, self.fragment))
for subname, subroot in list_of_subdirname_and_roots:
subfrag = os.path.join(self.fragment, subname)
subtp = get_taxon_partition(self.res, subfrag)
if subname in having_inp_to_read:
subtp._has_unread_tax_inp = True
self._roots_for_sub.update(subroot)
for r in subroot:
self._root_to_lth[r] = subtp
self._subdirname_to_tp_roots[subname] = (subtp, subroot)
if self._populated:
self._partition_from_in_mem()
else:
self._read_inputs(do_part_if_reading)
def _partition_from_in_mem(self):
_LOG.info("_partition_from_in_mem for fragment \"{}\"".format(self.fragment))
if self._misc_part._populated:
m = "_partition_from_in_mem called for {}, but misc already has {}"
raise ValueError(m.format(self.fragment, self.contained_ids()))
self._has_moved_taxa = True
for sub_tp, subroot in self._subdirname_to_tp_roots.values():
if sub_tp._has_unread_tax_inp:
sub_tp._read_inputs(False)
x = self._id_to_child_set
for r in subroot:
if r in x:
self._transfer_subtree(r, sub_tp, as_root=True)
elif r in self._id_to_line:
self._transfer_line(r, sub_tp, as_root=True)
else:
pass
self.move_matched_synonyms(sub_tp)
self._copy_shared_fields(sub_tp)
sub_tp._populated = True
self._move_data_to_empty_misc()
def _debug_validity_check(self):
self.read_inputs_for_read_only()
_LOG.debug('{} roots = {}'.format(self.fragment, self._roots))
id_to_par = {}
errs = []
warnings = []
for par_id, cs in self._id_to_child_set.items():
for c in cs:
id_to_par[c] = par_id
known_id_set = set(self._id_to_line.keys())
roots_set = set(self._roots.keys())
for uid in self._id_to_line.keys():
par_id = id_to_par.get(uid)
if not par_id:
if uid not in self._roots:
m = 'ID {} does not have a parent in this slice, but is not listed in the roots'
if self.fragment == 'Life':
warnings.append(m.format(uid))
else:
errs.append(m.format(uid))
roots_set.add(uid)
_LOG.debug('{} elements in self._id_to_line'.format(len(self._id_to_line)))
for uid in self._syn_by_id.keys():
if uid not in self._id_to_line:
m = 'synonyms ID for {}, in syn_by_id but not in id_to_line'.format(uid)
errs.append(m)
known_id_set.update(self._roots.keys())
for k in self._roots.keys():
if k not in self._id_to_line:
m = 'root ID {}, but roots not in id_to_line'.format(k)
errs.append(m)
for k, v in self._des_in_other_slices.items():
if k in self._id_to_line:
m = 'ID {} flagged as being in another slice, but it is still in id_to_line'.format(
k)
errs.append(m)
if v.get('par_id'):
if v.get('par_id') not in self._id_to_line:
m = 'slice does not hold parent {} of id {} which is flagged as being in another slice'
m = m.format(v.get('par_id'), k)
warnings.append(m)
if warnings:
m = '{} warning(s): {}'.format(len(warnings), '\n'.join(warnings))
_LOG.warning(m)
if errs:
m = '{} error(s): {}'.format(len(errs), '\n'.join(errs))
raise ValueError(m)
return roots_set, known_id_set
def _debug_check_subtree_ids(self, root_id_set, all_id_set):
self.read_inputs_for_read_only()
self_ids_set = set()
to_deal_with = set(root_id_set)
unrecognized_set = set()
while to_deal_with:
ntdw = set()
for r in to_deal_with:
cs = self._id_to_child_set.get(r)
if cs is None:
if r not in self._id_to_line:
unrecognized_set.add(r)
elif cs:
self_ids_set.add(r)
self_ids_set.update(cs)
ntdw.update(cs)
to_deal_with = ntdw
missed_ids = self_ids_set - all_id_set
extra_ids = all_id_set - self_ids_set
errs = []
# _LOG.debug('root_id_set = {}'.format(root_id_set))
# _LOG.debug('len(self._id_to_child_set) = {} len(self_ids_set) = {} '.format(
# len(self._id_to_child_set), len(self_ids_set)))
if unrecognized_set:
x = list(unrecognized_set)
x.sort()
mind = len(x) if len(x) < 100 else 100
m = 'IDs not known to {} read from {}: {}'
errs.append(m.format(self.fragment, self.tax_fp, x[:mind]))
if missed_ids:
x = list(missed_ids)
x.sort()
mind = len(x) if len(x) < 100 else 100
m = 'IDs expected in subtree according to {} read from {}, but not found: {}'
errs.append(m.format(self.fragment, self.tax_fp, x[:mind]))
if extra_ids:
x = list(extra_ids)
x.sort()
mind = len(x) if len(x) < 100 else 100
m = 'IDs included in subtree, but not expected by {} read from {}: {}'
errs.append(m.format(self.fragment, self.tax_fp, x[:mind]))
if errs:
m = '{} error(s): {}'.format(len(errs), '\n'.join(errs))
raise ValueError(m)
def read_inputs_for_read_only(self):
# Only to be used for accessors
if not self._read_from_fs:
assert not self._populated
self._read_inputs(do_part_if_reading=False)
def get_root_ids(self):
return set(self._roots.keys())
def get_id_to_ott_taxon(self):
id_to_obj = {}
lp = HEADER_TO_LINE_PARSER[self.taxon_header]
for line in self._id_to_line.values():
obj = Taxon(line, line_parser=lp)
oid = obj.id
assert oid not in id_to_obj
id_to_obj[oid] = obj
return id_to_obj
def get_taxa_as_forest(self):
return TaxonForest(id_to_taxon=self.get_id_to_ott_taxon(), taxon_partition=self)
def active_tax_dir(self):
if self._populated:
return os.path.split(self.tax_fp)[0]
raise NotImplementedError('active_tax_dir on unpopulated')
def _read_inputs(self, do_part_if_reading=True):
self._has_unread_tax_inp = False
if self._external_inp_fp:
self._read_from_partitioning_scratch = True
self.tax_fp = self._external_inp_fp
else:
if os.path.exists(self.tax_fp_misc):
self._read_from_partitioning_scratch = True
self.tax_fp = self.tax_fp_misc
self._read_from_misc = True
else:
self._read_from_partitioning_scratch = True
self.tax_fp = self.tax_fp_unpartitioned
self._read_from_misc = False
try:
self.res.partition_parsing_fn(self)
read_roots = self._read_roots()
self._roots.update(read_roots)
self._des_in_other_slices.update(self.read_acccumulated_des())
self._read_from_fs = True
if do_part_if_reading:
self._has_moved_taxa = True
self._finish_partition_after_parse()
for el in self.sub_tax_parts():
self._copy_shared_fields(el)
el._populated = True
self._populated = True
except:
self._read_from_fs = False
self._read_from_misc = None
self._read_from_partitioning_scratch = False
raise
def _read_roots(self):
return get_roots_for_subset(self.tax_dir_unpartitioned, self.tax_dir_misc)
def read_acccumulated_des(self):
return get_accum_des_for_subset(self.tax_dir_unpartitioned, self.tax_dir_misc)
def _flush(self):
if self._has_flushed:
_LOG.info("duplicate flush of TaxonPartition for {} ignored.".format(self.fragment))
return
if not self._has_moved_taxa:
if self._read_from_fs:
_LOG.info("Flush of unaltered TaxonPartition for {} ignored".format(self.fragment))
return
_LOG.info("flushing TaxonPartition for {}".format(self.fragment))
self.write_if_needed()
if self._read_from_misc is False and self._read_from_partitioning_scratch:
tr = [self.tax_fp_unpartitioned]
if self.output_synonyms_filepath:
tr.append(self.output_synonyms_filepath)
tr.append(os.path.join(self.tax_dir_unpartitioned, ACCUM_DES_FILENAME))
for f in tr:
if os.path.exists(f):
try:
unlink(f)
except:
_LOG.exception("could not remove {}".format(f))
self._has_flushed = True
TAX_SLICE_CACHE.try_del(self.cache_key)
self._del_data()
def write_if_needed(self):
if not self._populated:
_LOG.info("write not needed for {} not populated".format(self.fragment))
return False
if self._subdirname_to_tp_roots:
# _LOG.debug("write from misc for {}".format(self.fragment))
dh = self._misc_part
dest = self.tax_fp_misc
out_dir = self.tax_dir_misc
else:
# _LOG.debug("write from self for {}".format(self.fragment))
dh = self
dest = self.tax_fp_unpartitioned
out_dir = self.tax_dir_unpartitioned
roots_file = os.path.join(out_dir, ROOTS_FILENAME)
if not dh._id_to_line:
_LOG.debug("write not needed for {} no records".format(self.fragment))
syn_id_order = []
else:
syn_id_order = _write_d_as_tsv(self.write_taxon_header, dh._id_to_line, dh._id_order, dest)
if not dh._roots:
_LOG.debug('No root ids need to be written to "{}"'.format(roots_file))
else:
_LOG.debug('Writing {} root_ids to "{}"'.format(len(dh._roots), roots_file))
write_taxon_json(dh._roots, roots_file)
syndest = self.output_synonyms_filepath
if syndest is not None:
_write_syn_d_as_tsv(self.syn_header, dh._syn_by_id, syn_id_order, syndest)
if dh._des_in_other_slices:
write_taxon_json(dh._des_in_other_slices, os.path.join(out_dir, ACCUM_DES_FILENAME))
return True
def write_taxon_json(obj, filepath):
out_dir = os.path.split(filepath)[0]
if out_dir:
assure_dir_exists(out_dir)
dtw = {}
for k, v in obj.items():
if isinstance(v, Taxon):
dtw[k] = v.to_serializable_dict()
else:
dtw[k] = v
with OutFile(filepath) as outs:
write_as_json(dtw, outs, indent=1)
def get_taxon_partition(res, fragment):
ck = (TaxonPartition, res.id, fragment)
c = TAX_SLICE_CACHE.get(ck)
if c is not None:
return c
return TaxonPartition(res, fragment)
def _write_d_as_tsv(header, dict_to_write, id_order, dest_path):
if not dict_to_write:
return
ret = []
pd = os.path.split(dest_path)[0]
assure_dir_exists(pd)
_LOG.info('Writing {} tax records to "{}"'.format(len(dict_to_write), dest_path))
with OutFile(dest_path) as outp:
outp.write(header)
for i in id_order:
el = dict_to_write.get(i)
if el is not None:
ret.append(i)
outp.write(el)
oset = frozenset(ret)
for key, line in dict_to_write.items():
if key not in oset:
ret.append(key)
outp.write(line)
return ret
def _write_syn_d_as_tsv(header, dict_to_write, id_order, dest_path):
ltw = []
for i in id_order:
synlist = dict_to_write.get(i)
if synlist is not None:
for p in synlist:
ltw.append(p[1])
oset = frozenset(id_order)
if dict_to_write:
for key, synlist in dict_to_write.items():
if key not in oset:
for syn_pair in synlist:
ltw.append(syn_pair[1])
if not ltw:
return
x = len(ltw)
pd = os.path.split(dest_path)[0]
assure_dir_exists(pd)
_LOG.info('Writing {} syn. records to "{}"'.format(x, dest_path))
with OutFile(dest_path) as outp:
outp.write(header)
for line in ltw:
outp.write(line)
|
{
"content_hash": "045130b1d330276408d10851066e8ee2",
"timestamp": "",
"source": "github",
"line_count": 913,
"max_line_length": 110,
"avg_line_length": 38.606790799561885,
"alnum_prop": 0.548031093962778,
"repo_name": "mtholder/taxalotl",
"id": "3775f4332c5b5590a2c3fdfbb398b6e76594245c",
"size": "35270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "taxalotl/tax_partition.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "380257"
},
{
"name": "Shell",
"bytes": "1597"
}
],
"symlink_target": ""
}
|
import collections
import sys
import mock
from neutron_lib import constants
from oslo_config import cfg
from neutron.agent.linux import bridge_lib
from neutron.agent.linux import ip_lib
from neutron.agent.linux import utils
from neutron.common import exceptions
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2.drivers.agent import _agent_manager_base as amb
from neutron.plugins.ml2.drivers.linuxbridge.agent.common \
import constants as lconst
from neutron.plugins.ml2.drivers.linuxbridge.agent \
import linuxbridge_neutron_agent
from neutron.tests import base
LOCAL_IP = '192.168.0.33'
LOCAL_IPV6 = '2001:db8:1::33'
VXLAN_GROUPV6 = 'ff05::/120'
PORT_1 = 'abcdef01-12ddssdfds-fdsfsd'
DEVICE_1 = 'tapabcdef01-12'
NETWORK_ID = '57653b20-ed5b-4ed0-a31d-06f84e3fd909'
BRIDGE_MAPPING_VALUE = 'br-eth2'
BRIDGE_MAPPINGS = {'physnet0': BRIDGE_MAPPING_VALUE}
INTERFACE_MAPPINGS = {'physnet1': 'eth1'}
FAKE_DEFAULT_DEV = mock.Mock()
FAKE_DEFAULT_DEV.name = 'eth1'
PORT_DATA = {
"port_id": PORT_1,
"device": DEVICE_1
}
class FakeIpLinkCommand(object):
def set_up(self):
pass
class FakeIpDevice(object):
def __init__(self):
self.link = FakeIpLinkCommand()
def disable_ipv6(self):
pass
def get_linuxbridge_manager(bridge_mappings, interface_mappings):
with mock.patch.object(ip_lib.IPWrapper, 'get_device_by_ip',
return_value=FAKE_DEFAULT_DEV),\
mock.patch.object(ip_lib, 'device_exists', return_value=True),\
mock.patch.object(linuxbridge_neutron_agent.LinuxBridgeManager,
'check_vxlan_support'):
cfg.CONF.set_override('local_ip', LOCAL_IP, 'VXLAN')
return linuxbridge_neutron_agent.LinuxBridgeManager(
bridge_mappings, interface_mappings)
class TestLinuxBridge(base.BaseTestCase):
def setUp(self):
super(TestLinuxBridge, self).setUp()
self.linux_bridge = get_linuxbridge_manager(
BRIDGE_MAPPINGS, INTERFACE_MAPPINGS)
def test_ensure_physical_in_bridge_invalid(self):
result = self.linux_bridge.ensure_physical_in_bridge('network_id',
p_const.TYPE_VLAN,
'physnetx',
7)
self.assertFalse(result)
def test_ensure_physical_in_bridge_flat(self):
with mock.patch.object(self.linux_bridge,
'ensure_flat_bridge') as flat_bridge_func:
self.linux_bridge.ensure_physical_in_bridge(
'network_id', p_const.TYPE_FLAT, 'physnet1', None)
self.assertTrue(flat_bridge_func.called)
def test_ensure_physical_in_bridge_vlan(self):
with mock.patch.object(self.linux_bridge,
'ensure_vlan_bridge') as vlan_bridge_func:
self.linux_bridge.ensure_physical_in_bridge(
'network_id', p_const.TYPE_VLAN, 'physnet1', 7)
self.assertTrue(vlan_bridge_func.called)
def test_ensure_physical_in_bridge_vxlan(self):
self.linux_bridge.vxlan_mode = lconst.VXLAN_UCAST
with mock.patch.object(self.linux_bridge,
'ensure_vxlan_bridge') as vxlan_bridge_func:
self.linux_bridge.ensure_physical_in_bridge(
'network_id', 'vxlan', 'physnet1', 7)
self.assertTrue(vxlan_bridge_func.called)
class TestLinuxBridgeManager(base.BaseTestCase):
def setUp(self):
super(TestLinuxBridgeManager, self).setUp()
self.lbm = get_linuxbridge_manager(
BRIDGE_MAPPINGS, INTERFACE_MAPPINGS)
def test_local_ip_validation_with_valid_ip(self):
with mock.patch.object(ip_lib.IPWrapper,
'get_device_by_ip',
return_value=FAKE_DEFAULT_DEV):
self.lbm.local_ip = LOCAL_IP
result = self.lbm.get_local_ip_device()
self.assertEqual(FAKE_DEFAULT_DEV, result)
def test_local_ip_validation_with_invalid_ip(self):
with mock.patch.object(ip_lib.IPWrapper,
'get_device_by_ip',
return_value=None),\
mock.patch.object(sys, 'exit') as exit,\
mock.patch.object(linuxbridge_neutron_agent.LOG,
'error') as log:
self.lbm.local_ip = LOCAL_IP
self.lbm.get_local_ip_device()
self.assertEqual(1, log.call_count)
exit.assert_called_once_with(1)
def _test_vxlan_group_validation(self, bad_local_ip, bad_vxlan_group):
with mock.patch.object(ip_lib.IPWrapper,
'get_device_by_ip',
return_value=FAKE_DEFAULT_DEV),\
mock.patch.object(sys, 'exit') as exit,\
mock.patch.object(linuxbridge_neutron_agent.LOG,
'error') as log:
self.lbm.local_ip = bad_local_ip
cfg.CONF.set_override('vxlan_group', bad_vxlan_group, 'VXLAN')
self.lbm.validate_vxlan_group_with_local_ip()
self.assertEqual(1, log.call_count)
exit.assert_called_once_with(1)
def test_vxlan_group_validation_with_mismatched_local_ip(self):
self._test_vxlan_group_validation(LOCAL_IP, VXLAN_GROUPV6)
def test_vxlan_group_validation_with_unicast_group(self):
self._test_vxlan_group_validation(LOCAL_IP, '240.0.0.0')
def test_vxlan_group_validation_with_invalid_cidr(self):
self._test_vxlan_group_validation(LOCAL_IP, '224.0.0.1/')
def test_vxlan_group_validation_with_v6_unicast_group(self):
self._test_vxlan_group_validation(LOCAL_IPV6, '2001:db8::')
def test_get_existing_bridge_name(self):
phy_net = 'physnet0'
self.assertEqual('br-eth2',
self.lbm.get_existing_bridge_name(phy_net))
phy_net = ''
self.assertIsNone(self.lbm.get_existing_bridge_name(phy_net))
def test_get_bridge_name(self):
nw_id = "123456789101112"
self.assertEqual("brq" + nw_id[0:11],
self.lbm.get_bridge_name(nw_id))
nw_id = ""
self.assertEqual("brq", self.lbm.get_bridge_name(nw_id))
def test_get_subinterface_name_backwards_compatibility(self):
self.assertEqual("abcdefghijklm.1",
self.lbm.get_subinterface_name("abcdefghijklm", "1"))
self.assertEqual("abcdefghijkl.11",
self.lbm.get_subinterface_name("abcdefghijkl", "11"))
self.assertEqual("abcdefghij.1111",
self.lbm.get_subinterface_name("abcdefghij",
"1111"))
def test_get_subinterface_name_advanced(self):
"""Ensure the same hash is used for long interface names.
If the generated vlan device name would be too long, make sure that
everything before the '.' is equal. This might be helpful when
debugging problems.
"""
max_device_name = "abcdefghijklmno"
vlan_dev_name1 = self.lbm.get_subinterface_name(max_device_name, "1")
vlan_dev_name2 = self.lbm.get_subinterface_name(max_device_name,
"1111")
self.assertEqual(vlan_dev_name1.partition(".")[0],
vlan_dev_name2.partition(".")[0])
def test_get_tap_device_name(self):
if_id = "123456789101112"
self.assertEqual(constants.TAP_DEVICE_PREFIX + if_id[0:11],
self.lbm.get_tap_device_name(if_id))
if_id = ""
self.assertEqual(constants.TAP_DEVICE_PREFIX,
self.lbm.get_tap_device_name(if_id))
def test_get_vxlan_device_name(self):
vn_id = p_const.MAX_VXLAN_VNI
self.assertEqual("vxlan-" + str(vn_id),
self.lbm.get_vxlan_device_name(vn_id))
self.assertIsNone(self.lbm.get_vxlan_device_name(vn_id + 1))
def test_get_vxlan_group(self):
cfg.CONF.set_override('vxlan_group', '239.1.2.3/24', 'VXLAN')
vn_id = p_const.MAX_VXLAN_VNI
self.assertEqual('239.1.2.255', self.lbm.get_vxlan_group(vn_id))
vn_id = 256
self.assertEqual('239.1.2.0', self.lbm.get_vxlan_group(vn_id))
vn_id = 257
self.assertEqual('239.1.2.1', self.lbm.get_vxlan_group(vn_id))
def test_get_vxlan_group_with_ipv6(self):
cfg.CONF.set_override('local_ip', LOCAL_IPV6, 'VXLAN')
self.lbm.local_ip = LOCAL_IPV6
cfg.CONF.set_override('vxlan_group', VXLAN_GROUPV6, 'VXLAN')
vn_id = p_const.MAX_VXLAN_VNI
self.assertEqual('ff05::ff', self.lbm.get_vxlan_group(vn_id))
vn_id = 256
self.assertEqual('ff05::', self.lbm.get_vxlan_group(vn_id))
vn_id = 257
self.assertEqual('ff05::1', self.lbm.get_vxlan_group(vn_id))
def test_get_deletable_bridges(self):
br_list = ["br-int", "brq1", "brq2", "brq-user"]
expected = set(br_list[1:3])
lbm = get_linuxbridge_manager(
bridge_mappings={"physnet0": "brq-user"}, interface_mappings={})
with mock.patch.object(
bridge_lib, 'get_bridge_names', return_value=br_list):
self.assertEqual(expected, lbm.get_deletable_bridges())
def test_get_tap_devices_count(self):
with mock.patch.object(
bridge_lib.BridgeDevice, 'get_interfaces') as get_ifs_fn:
get_ifs_fn.return_value = ['tap2101', 'eth0.100', 'vxlan-1000']
self.assertEqual(1, self.lbm.get_tap_devices_count('br0'))
def test_get_interface_details(self):
with mock.patch.object(ip_lib.IpAddrCommand, 'list') as list_fn,\
mock.patch.object(ip_lib.IpRouteCommand,
'get_gateway') as getgw_fn:
gwdict = dict(gateway='1.1.1.1')
getgw_fn.return_value = gwdict
ipdict = dict(cidr='1.1.1.1/24',
broadcast='1.1.1.255',
scope='global',
ip_version=4,
dynamic=False)
list_fn.return_value = ipdict
ret = self.lbm.get_interface_details("eth0")
self.assertTrue(list_fn.called)
self.assertTrue(getgw_fn.called)
self.assertEqual(ret, (ipdict, gwdict))
def test_ensure_flat_bridge(self):
with mock.patch.object(ip_lib.IpAddrCommand, 'list') as list_fn,\
mock.patch.object(ip_lib.IpRouteCommand,
'get_gateway') as getgw_fn:
gwdict = dict(gateway='1.1.1.1')
getgw_fn.return_value = gwdict
ipdict = dict(cidr='1.1.1.1/24',
broadcast='1.1.1.255',
scope='global',
ip_version=4,
dynamic=False)
list_fn.return_value = ipdict
with mock.patch.object(self.lbm, 'ensure_bridge') as ens:
self.assertEqual(
"eth0",
self.lbm.ensure_flat_bridge("123", None, "eth0"))
self.assertTrue(list_fn.called)
self.assertTrue(getgw_fn.called)
ens.assert_called_once_with("brq123", "eth0",
ipdict, gwdict)
def test_ensure_flat_bridge_with_existed_brq(self):
with mock.patch.object(self.lbm, 'ensure_bridge') as ens:
ens.return_value = "br-eth2"
self.assertEqual("br-eth2",
self.lbm.ensure_flat_bridge("123",
"br-eth2",
None))
ens.assert_called_with("br-eth2")
def test_ensure_vlan_bridge(self):
with mock.patch.object(self.lbm, 'ensure_vlan') as ens_vl_fn,\
mock.patch.object(self.lbm, 'ensure_bridge') as ens,\
mock.patch.object(self.lbm,
'get_interface_details') as get_int_det_fn:
ens_vl_fn.return_value = "eth0.1"
get_int_det_fn.return_value = (None, None)
self.assertEqual("eth0.1",
self.lbm.ensure_vlan_bridge("123",
None,
"eth0",
"1"))
ens.assert_called_with("brq123", "eth0.1", None, None)
get_int_det_fn.return_value = ("ips", "gateway")
self.assertEqual("eth0.1",
self.lbm.ensure_vlan_bridge("123",
None,
"eth0",
"1"))
ens.assert_called_with("brq123", "eth0.1", "ips", "gateway")
def test_ensure_vlan_bridge_with_existed_brq(self):
with mock.patch.object(self.lbm, 'ensure_vlan') as ens_vl_fn,\
mock.patch.object(self.lbm, 'ensure_bridge') as ens:
ens_vl_fn.return_value = None
ens.return_value = "br-eth2"
self.assertEqual("br-eth2",
self.lbm.ensure_vlan_bridge("123",
"br-eth2",
None,
None))
ens.assert_called_with("br-eth2")
def test_ensure_local_bridge(self):
with mock.patch.object(self.lbm, 'ensure_bridge') as ens_fn:
self.lbm.ensure_local_bridge("54321", None)
ens_fn.assert_called_once_with("brq54321")
def test_ensure_local_bridge_with_existed_brq(self):
with mock.patch.object(self.lbm, 'ensure_bridge') as ens_fn:
ens_fn.return_value = "br-eth2"
self.lbm.ensure_local_bridge("54321", 'br-eth2')
ens_fn.assert_called_once_with("br-eth2")
def test_ensure_vlan(self):
with mock.patch.object(ip_lib, 'device_exists') as de_fn:
de_fn.return_value = True
self.assertEqual("eth0.1", self.lbm.ensure_vlan("eth0", "1"))
de_fn.return_value = False
vlan_dev = FakeIpDevice()
with mock.patch.object(vlan_dev, 'disable_ipv6') as dv6_fn,\
mock.patch.object(self.lbm.ip, 'add_vlan',
return_value=vlan_dev) as add_vlan_fn:
retval = self.lbm.ensure_vlan("eth0", "1")
self.assertEqual("eth0.1", retval)
add_vlan_fn.assert_called_with('eth0.1', 'eth0', '1')
dv6_fn.assert_called_once_with()
def test_ensure_vxlan(self, expected_proxy=False):
seg_id = "12345678"
self.lbm.local_int = 'eth0'
self.lbm.vxlan_mode = lconst.VXLAN_MCAST
with mock.patch.object(ip_lib, 'device_exists') as de_fn:
de_fn.return_value = True
self.assertEqual("vxlan-" + seg_id, self.lbm.ensure_vxlan(seg_id))
de_fn.return_value = False
vxlan_dev = FakeIpDevice()
with mock.patch.object(vxlan_dev, 'disable_ipv6') as dv6_fn,\
mock.patch.object(self.lbm.ip, 'add_vxlan',
return_value=vxlan_dev) as add_vxlan_fn:
retval = self.lbm.ensure_vxlan(seg_id)
self.assertEqual("vxlan-" + seg_id, retval)
add_vxlan_fn.assert_called_with("vxlan-" + seg_id, seg_id,
group="224.0.0.1",
dev=self.lbm.local_int)
dv6_fn.assert_called_once_with()
cfg.CONF.set_override('l2_population', 'True', 'VXLAN')
self.assertEqual("vxlan-" + seg_id,
self.lbm.ensure_vxlan(seg_id))
add_vxlan_fn.assert_called_with("vxlan-" + seg_id, seg_id,
group="224.0.0.1",
dev=self.lbm.local_int,
proxy=expected_proxy)
def test_ensure_vxlan_arp_responder_enabled(self):
cfg.CONF.set_override('arp_responder', True, 'VXLAN')
self.test_ensure_vxlan(expected_proxy=True)
def test_update_interface_ip_details(self):
gwdict = dict(gateway='1.1.1.1',
metric=50)
ipdict = dict(cidr='1.1.1.1/24',
broadcast='1.1.1.255',
scope='global',
ip_version=4,
dynamic=False)
with mock.patch.object(ip_lib.IpAddrCommand, 'add') as add_fn,\
mock.patch.object(ip_lib.IpAddrCommand, 'delete') as del_fn:
self.lbm.update_interface_ip_details("br0", "eth0",
[ipdict], None)
self.assertTrue(add_fn.called)
self.assertTrue(del_fn.called)
with mock.patch.object(ip_lib.IpRouteCommand,
'add_gateway') as addgw_fn,\
mock.patch.object(ip_lib.IpRouteCommand,
'delete_gateway') as delgw_fn:
self.lbm.update_interface_ip_details("br0", "eth0",
None, gwdict)
self.assertTrue(addgw_fn.called)
self.assertTrue(delgw_fn.called)
def test_bridge_exists_and_ensure_up(self):
ip_lib_mock = mock.Mock()
with mock.patch.object(ip_lib, 'IPDevice', return_value=ip_lib_mock):
# device exists
self.assertTrue(self.lbm._bridge_exists_and_ensure_up("br0"))
self.assertTrue(ip_lib_mock.link.set_up.called)
# device doesn't exists
ip_lib_mock.link.set_up.side_effect = RuntimeError
self.assertFalse(self.lbm._bridge_exists_and_ensure_up("br0"))
def test_ensure_bridge(self):
bridge_device = mock.Mock()
bridge_device_old = mock.Mock()
with mock.patch.object(self.lbm,
'_bridge_exists_and_ensure_up') as de_fn,\
mock.patch.object(bridge_lib, "BridgeDevice",
return_value=bridge_device) as br_fn,\
mock.patch.object(self.lbm,
'update_interface_ip_details') as upd_fn,\
mock.patch.object(bridge_lib, 'is_bridged_interface'),\
mock.patch.object(bridge_lib.BridgeDevice,
'get_interface_bridge') as get_if_br_fn:
de_fn.return_value = False
br_fn.addbr.return_value = bridge_device
bridge_device.setfd.return_value = False
bridge_device.disable_stp.return_value = False
bridge_device.disable_ipv6.return_value = False
bridge_device.link.set_up.return_value = False
self.assertEqual("br0", self.lbm.ensure_bridge("br0", None))
bridge_device.owns_interface.return_value = False
self.lbm.ensure_bridge("br0", "eth0")
upd_fn.assert_called_with("br0", "eth0", None, None)
bridge_device.owns_interface.assert_called_with("eth0")
self.lbm.ensure_bridge("br0", "eth0", "ips", "gateway")
upd_fn.assert_called_with("br0", "eth0", "ips", "gateway")
bridge_device.owns_interface.assert_called_with("eth0")
de_fn.return_value = True
bridge_device.delif.side_effect = Exception()
self.lbm.ensure_bridge("br0", "eth0")
bridge_device.owns_interface.assert_called_with("eth0")
de_fn.return_value = True
bridge_device.owns_interface.return_value = False
get_if_br_fn.return_value = bridge_device_old
bridge_device.addif.reset_mock()
self.lbm.ensure_bridge("br0", "eth0")
bridge_device_old.delif.assert_called_once_with('eth0')
bridge_device.addif.assert_called_once_with('eth0')
def test_ensure_physical_in_bridge(self):
self.assertFalse(
self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_VLAN,
"phys", "1")
)
with mock.patch.object(self.lbm, "ensure_flat_bridge") as flbr_fn:
self.assertTrue(
self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_FLAT,
"physnet1", None)
)
self.assertTrue(flbr_fn.called)
with mock.patch.object(self.lbm, "ensure_vlan_bridge") as vlbr_fn:
self.assertTrue(
self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_VLAN,
"physnet1", "1")
)
self.assertTrue(vlbr_fn.called)
with mock.patch.object(self.lbm, "ensure_vxlan_bridge") as vlbr_fn:
self.lbm.vxlan_mode = lconst.VXLAN_MCAST
self.assertTrue(
self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_VXLAN,
"physnet1", "1")
)
self.assertTrue(vlbr_fn.called)
def test_ensure_physical_in_bridge_with_existed_brq(self):
with mock.patch.object(linuxbridge_neutron_agent.LOG, 'error') as log:
self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_FLAT,
"physnet9", "1")
self.assertEqual(1, log.call_count)
@mock.patch.object(ip_lib, "device_exists", return_value=False)
def test_add_tap_interface_with_interface_disappearing(self, exists):
with mock.patch.object(self.lbm, "_add_tap_interface",
side_effect=RuntimeError("No such dev")):
self.assertFalse(self.lbm.add_tap_interface("123",
p_const.TYPE_VLAN,
"physnet1", None,
"tap1", "foo"))
@mock.patch.object(ip_lib, "device_exists", return_value=True)
def test_add_tap_interface_with_other_error(self, exists):
with mock.patch.object(self.lbm, "_add_tap_interface",
side_effect=RuntimeError("No more fuel")):
self.assertRaises(RuntimeError, self.lbm.add_tap_interface, "123",
p_const.TYPE_VLAN, "physnet1", None, "tap1",
"foo")
def test_add_tap_interface_owner_other(self):
with mock.patch.object(ip_lib, "device_exists"):
with mock.patch.object(self.lbm, "ensure_local_bridge"):
self.assertTrue(self.lbm.add_tap_interface("123",
p_const.TYPE_LOCAL,
"physnet1", None,
"tap1", "foo"))
def _test_add_tap_interface(self, dev_owner_prefix):
with mock.patch.object(ip_lib, "device_exists") as de_fn:
de_fn.return_value = False
self.assertFalse(
self.lbm.add_tap_interface("123", p_const.TYPE_VLAN,
"physnet1", "1", "tap1",
dev_owner_prefix))
de_fn.return_value = True
bridge_device = mock.Mock()
with mock.patch.object(self.lbm, "ensure_local_bridge") as en_fn,\
mock.patch.object(bridge_lib, "BridgeDevice",
return_value=bridge_device), \
mock.patch.object(bridge_lib.BridgeDevice,
"get_interface_bridge") as get_br:
bridge_device.addif.retun_value = False
get_br.return_value = True
self.assertTrue(self.lbm.add_tap_interface("123",
p_const.TYPE_LOCAL,
"physnet1", None,
"tap1",
dev_owner_prefix))
en_fn.assert_called_with("123", "brq123")
self.lbm.bridge_mappings = {"physnet1": "brq999"}
self.assertTrue(self.lbm.add_tap_interface("123",
p_const.TYPE_LOCAL,
"physnet1", None,
"tap1",
dev_owner_prefix))
en_fn.assert_called_with("123", "brq999")
get_br.return_value = False
bridge_device.addif.retun_value = True
self.assertFalse(self.lbm.add_tap_interface("123",
p_const.TYPE_LOCAL,
"physnet1", None,
"tap1",
dev_owner_prefix))
with mock.patch.object(self.lbm,
"ensure_physical_in_bridge") as ens_fn,\
mock.patch.object(self.lbm,
"ensure_tap_mtu") as en_mtu_fn,\
mock.patch.object(bridge_lib.BridgeDevice,
"get_interface_bridge") as get_br:
ens_fn.return_value = False
self.assertFalse(self.lbm.add_tap_interface("123",
p_const.TYPE_VLAN,
"physnet1", "1",
"tap1",
dev_owner_prefix))
ens_fn.return_value = "eth0.1"
get_br.return_value = "brq123"
self.lbm.add_tap_interface("123", p_const.TYPE_VLAN,
"physnet1", "1", "tap1",
dev_owner_prefix)
en_mtu_fn.assert_called_once_with("tap1", "eth0.1")
bridge_device.addif.assert_called_once_with("tap1")
def test_add_tap_interface_owner_network(self):
self._test_add_tap_interface(constants.DEVICE_OWNER_NETWORK_PREFIX)
def test_add_tap_interface_owner_neutron(self):
self._test_add_tap_interface(constants.DEVICE_OWNER_NEUTRON_PREFIX)
def test_plug_interface(self):
segment = amb.NetworkSegment(p_const.TYPE_VLAN, "physnet-1", "1")
with mock.patch.object(self.lbm, "add_tap_interface") as add_tap:
self.lbm.plug_interface("123", segment, "tap234",
constants.DEVICE_OWNER_NETWORK_PREFIX)
add_tap.assert_called_with("123", p_const.TYPE_VLAN, "physnet-1",
"1", "tap234",
constants.DEVICE_OWNER_NETWORK_PREFIX)
def test_delete_bridge(self):
with mock.patch.object(ip_lib.IPDevice, "exists") as de_fn,\
mock.patch.object(ip_lib, "IpLinkCommand") as link_cmd,\
mock.patch.object(bridge_lib.BridgeDevice,
"get_interfaces") as getif_fn,\
mock.patch.object(self.lbm, "remove_interface"),\
mock.patch.object(self.lbm,
"get_interface_details") as if_det_fn,\
mock.patch.object(self.lbm,
"update_interface_ip_details") as updif_fn,\
mock.patch.object(self.lbm, "delete_interface") as delif_fn:
de_fn.return_value = False
self.lbm.delete_bridge("br0")
self.assertFalse(getif_fn.called)
de_fn.return_value = True
getif_fn.return_value = ["eth0", "eth1", "vxlan-1002"]
if_det_fn.return_value = ("ips", "gateway")
link_cmd.set_down.return_value = False
self.lbm.delete_bridge("br0")
updif_fn.assert_called_with("eth1", "br0", "ips", "gateway")
delif_fn.assert_called_with("vxlan-1002")
def test_delete_bridge_not_exist(self):
self.lbm.interface_mappings.update({})
bridge_device = mock.Mock()
with mock.patch.object(bridge_lib, "BridgeDevice",
return_value=bridge_device):
bridge_device.exists.side_effect = [True, False]
bridge_device.get_interfaces.return_value = []
bridge_device.link.set_down.side_effect = RuntimeError
self.lbm.delete_bridge("br0")
self.assertEqual(2, bridge_device.exists.call_count)
bridge_device.exists.side_effect = [True, True]
self.assertRaises(RuntimeError, self.lbm.delete_bridge, "br0")
def test_delete_bridge_with_ip(self):
bridge_device = mock.Mock()
with mock.patch.object(ip_lib, "device_exists") as de_fn,\
mock.patch.object(self.lbm, "remove_interface"),\
mock.patch.object(self.lbm,
"get_interface_details") as if_det_fn,\
mock.patch.object(self.lbm,
"update_interface_ip_details") as updif_fn,\
mock.patch.object(self.lbm, "delete_interface") as del_interface,\
mock.patch.object(bridge_lib, "BridgeDevice",
return_value=bridge_device):
de_fn.return_value = True
bridge_device.get_interfaces.return_value = ["eth0", "eth1.1"]
if_det_fn.return_value = ("ips", "gateway")
bridge_device.link.set_down.return_value = False
self.lbm.delete_bridge("br0")
updif_fn.assert_called_with("eth1.1", "br0", "ips", "gateway")
self.assertFalse(del_interface.called)
def test_delete_bridge_no_ip(self):
bridge_device = mock.Mock()
with mock.patch.object(ip_lib, "device_exists") as de_fn,\
mock.patch.object(self.lbm, "remove_interface"),\
mock.patch.object(self.lbm,
"get_interface_details") as if_det_fn,\
mock.patch.object(self.lbm,
"update_interface_ip_details") as updif_fn,\
mock.patch.object(self.lbm, "delete_interface") as del_interface,\
mock.patch.object(bridge_lib, "BridgeDevice",
return_value=bridge_device):
de_fn.return_value = True
bridge_device.get_interfaces.return_value = ["eth0", "eth1.1"]
bridge_device.link.set_down.return_value = False
if_det_fn.return_value = ([], None)
self.lbm.delete_bridge("br0")
del_interface.assert_called_with("eth1.1")
self.assertFalse(updif_fn.called)
def test_delete_bridge_no_int_mappings(self):
lbm = get_linuxbridge_manager(
bridge_mappings={}, interface_mappings={})
with mock.patch.object(ip_lib.IPDevice, "exists") as de_fn,\
mock.patch.object(ip_lib, "IpLinkCommand") as link_cmd,\
mock.patch.object(bridge_lib.BridgeDevice,
"get_interfaces") as getif_fn,\
mock.patch.object(lbm, "remove_interface"),\
mock.patch.object(lbm, "delete_interface") as del_interface:
de_fn.return_value = False
lbm.delete_bridge("br0")
self.assertFalse(getif_fn.called)
de_fn.return_value = True
getif_fn.return_value = ["vxlan-1002"]
link_cmd.set_down.return_value = False
lbm.delete_bridge("br0")
del_interface.assert_called_with("vxlan-1002")
def test_delete_bridge_with_physical_vlan(self):
self.lbm.interface_mappings.update({"physnet2": "eth1.4000"})
bridge_device = mock.Mock()
with mock.patch.object(ip_lib, "device_exists") as de_fn,\
mock.patch.object(self.lbm, "remove_interface"),\
mock.patch.object(self.lbm, "get_interface_details") as if_det_fn,\
mock.patch.object(self.lbm, "delete_interface") as del_int,\
mock.patch.object(bridge_lib, "BridgeDevice",
return_value=bridge_device):
de_fn.return_value = True
bridge_device.get_interfaces.return_value = ["eth1.1", "eth1.4000"]
if_det_fn.return_value = ([], None)
bridge_device.link.set_down.return_value = False
self.lbm.delete_bridge("br0")
del_int.assert_called_once_with("eth1.1")
def test_remove_interface(self):
with mock.patch.object(ip_lib.IPDevice, "exists") as de_fn,\
mock.patch.object(bridge_lib,
'is_bridged_interface') as isdev_fn,\
mock.patch.object(bridge_lib.BridgeDevice,
"delif") as delif_fn:
de_fn.return_value = False
self.assertFalse(self.lbm.remove_interface("br0", "eth0"))
self.assertFalse(isdev_fn.called)
de_fn.return_value = True
isdev_fn.return_value = False
self.assertTrue(self.lbm.remove_interface("br0", "eth0"))
isdev_fn.return_value = True
delif_fn.return_value = True
self.assertFalse(self.lbm.remove_interface("br0", "eth0"))
delif_fn.return_value = False
self.assertTrue(self.lbm.remove_interface("br0", "eth0"))
def test_delete_interface(self):
with mock.patch.object(ip_lib.IPDevice, "exists") as de_fn,\
mock.patch.object(ip_lib.IpLinkCommand, "set_down") as down_fn,\
mock.patch.object(ip_lib.IpLinkCommand, "delete") as delete_fn:
de_fn.return_value = False
self.lbm.delete_interface("eth1.1")
self.assertFalse(down_fn.called)
self.assertFalse(delete_fn.called)
de_fn.return_value = True
self.lbm.delete_interface("eth1.1")
self.assertTrue(down_fn.called)
self.assertTrue(delete_fn.called)
def _check_vxlan_support(self, expected, vxlan_ucast_supported,
vxlan_mcast_supported):
with mock.patch.object(self.lbm,
'vxlan_ucast_supported',
return_value=vxlan_ucast_supported),\
mock.patch.object(self.lbm,
'vxlan_mcast_supported',
return_value=vxlan_mcast_supported):
if expected == lconst.VXLAN_NONE:
self.assertRaises(exceptions.VxlanNetworkUnsupported,
self.lbm.check_vxlan_support)
self.assertEqual(expected, self.lbm.vxlan_mode)
else:
self.lbm.check_vxlan_support()
self.assertEqual(expected, self.lbm.vxlan_mode)
def test_check_vxlan_support(self):
self._check_vxlan_support(expected=lconst.VXLAN_UCAST,
vxlan_ucast_supported=True,
vxlan_mcast_supported=True)
self._check_vxlan_support(expected=lconst.VXLAN_MCAST,
vxlan_ucast_supported=False,
vxlan_mcast_supported=True)
self._check_vxlan_support(expected=lconst.VXLAN_NONE,
vxlan_ucast_supported=False,
vxlan_mcast_supported=False)
self._check_vxlan_support(expected=lconst.VXLAN_NONE,
vxlan_ucast_supported=False,
vxlan_mcast_supported=False)
def _check_vxlan_ucast_supported(
self, expected, l2_population, iproute_arg_supported, fdb_append):
cfg.CONF.set_override('l2_population', l2_population, 'VXLAN')
with mock.patch.object(ip_lib, 'device_exists', return_value=False),\
mock.patch.object(ip_lib, 'vxlan_in_use', return_value=False),\
mock.patch.object(self.lbm,
'delete_interface',
return_value=None),\
mock.patch.object(self.lbm,
'ensure_vxlan',
return_value=None),\
mock.patch.object(
utils,
'execute',
side_effect=None if fdb_append else RuntimeError()),\
mock.patch.object(ip_lib,
'iproute_arg_supported',
return_value=iproute_arg_supported):
self.assertEqual(expected, self.lbm.vxlan_ucast_supported())
def test_vxlan_ucast_supported(self):
self._check_vxlan_ucast_supported(
expected=False,
l2_population=False, iproute_arg_supported=True, fdb_append=True)
self._check_vxlan_ucast_supported(
expected=False,
l2_population=True, iproute_arg_supported=False, fdb_append=True)
self._check_vxlan_ucast_supported(
expected=False,
l2_population=True, iproute_arg_supported=True, fdb_append=False)
self._check_vxlan_ucast_supported(
expected=True,
l2_population=True, iproute_arg_supported=True, fdb_append=True)
def _check_vxlan_mcast_supported(
self, expected, vxlan_group, iproute_arg_supported):
cfg.CONF.set_override('vxlan_group', vxlan_group, 'VXLAN')
with mock.patch.object(
ip_lib, 'iproute_arg_supported',
return_value=iproute_arg_supported):
self.assertEqual(expected, self.lbm.vxlan_mcast_supported())
def test_vxlan_mcast_supported(self):
self._check_vxlan_mcast_supported(
expected=False,
vxlan_group='',
iproute_arg_supported=True)
self._check_vxlan_mcast_supported(
expected=False,
vxlan_group='224.0.0.1',
iproute_arg_supported=False)
self._check_vxlan_mcast_supported(
expected=True,
vxlan_group='224.0.0.1',
iproute_arg_supported=True)
def _test_ensure_port_admin_state(self, admin_state):
port_id = 'fake_id'
with mock.patch.object(ip_lib, 'IPDevice') as dev_mock:
self.lbm.ensure_port_admin_state(port_id, admin_state)
tap_name = self.lbm.get_tap_device_name(port_id)
self.assertEqual(admin_state,
dev_mock(tap_name).link.set_up.called)
self.assertNotEqual(admin_state,
dev_mock(tap_name).link.set_down.called)
def test_ensure_port_admin_state_up(self):
self._test_ensure_port_admin_state(True)
def test_ensure_port_admin_state_down(self):
self._test_ensure_port_admin_state(False)
def test_get_agent_id_bridge_mappings(self):
lbm = get_linuxbridge_manager(BRIDGE_MAPPINGS, INTERFACE_MAPPINGS)
with mock.patch.object(utils,
"get_interface_mac",
return_value='16:63:69:10:a0:59') as mock_gim:
agent_id = lbm.get_agent_id()
self.assertEqual("lb16636910a059", agent_id)
mock_gim.assert_called_with(BRIDGE_MAPPING_VALUE)
def test_get_agent_id_no_bridge_mappings(self):
devices_mock = [
mock.MagicMock(),
mock.MagicMock()
]
devices_mock[0].name = "eth1"
devices_mock[1].name = "eth2"
bridge_mappings = {}
lbm = get_linuxbridge_manager(bridge_mappings, INTERFACE_MAPPINGS)
with mock.patch.object(ip_lib.IPWrapper,
'get_devices',
return_value=devices_mock), \
mock.patch.object(
utils,
"get_interface_mac",
return_value='16:63:69:10:a0:59') as mock_gim:
agent_id = lbm.get_agent_id()
self.assertEqual("lb16636910a059", agent_id)
mock_gim.assert_called_with("eth1")
class TestLinuxBridgeRpcCallbacks(base.BaseTestCase):
def setUp(self):
super(TestLinuxBridgeRpcCallbacks, self).setUp()
class FakeLBAgent(object):
def __init__(self):
self.agent_id = 1
self.mgr = get_linuxbridge_manager(
BRIDGE_MAPPINGS, INTERFACE_MAPPINGS)
self.mgr.vxlan_mode = lconst.VXLAN_UCAST
self.network_ports = collections.defaultdict(list)
self.lb_rpc = linuxbridge_neutron_agent.LinuxBridgeRpcCallbacks(
object(),
FakeLBAgent(),
object()
)
segment = mock.Mock()
segment.network_type = 'vxlan'
segment.segmentation_id = 1
self.lb_rpc.network_map['net_id'] = segment
def test_network_delete(self):
mock_net = mock.Mock()
mock_net.physical_network = None
self.lb_rpc.network_map = {NETWORK_ID: mock_net}
with mock.patch.object(self.lb_rpc.agent.mgr,
"get_bridge_name") as get_br_fn,\
mock.patch.object(self.lb_rpc.agent.mgr,
"delete_bridge") as del_fn:
get_br_fn.return_value = "br0"
self.lb_rpc.network_delete("anycontext", network_id=NETWORK_ID)
get_br_fn.assert_called_with(NETWORK_ID)
del_fn.assert_called_with("br0")
def test_port_update(self):
port = {'id': PORT_1}
self.lb_rpc.port_update(context=None, port=port)
self.assertEqual(set([DEVICE_1]), self.lb_rpc.updated_devices)
def test_network_update(self):
updated_network = {'id': NETWORK_ID}
self.lb_rpc.agent.network_ports = {
NETWORK_ID: [PORT_DATA]
}
self.lb_rpc.network_update(context=None, network=updated_network)
self.assertEqual(set([DEVICE_1]), self.lb_rpc.updated_devices)
def test_network_delete_with_existed_brq(self):
mock_net = mock.Mock()
mock_net.physical_network = 'physnet0'
self.lb_rpc.network_map = {'123': mock_net}
with mock.patch.object(linuxbridge_neutron_agent.LOG, 'info') as log,\
mock.patch.object(self.lb_rpc.agent.mgr,
"delete_bridge") as del_fn:
self.lb_rpc.network_delete("anycontext", network_id="123")
self.assertEqual(0, del_fn.call_count)
self.assertEqual(1, log.call_count)
def test_fdb_add(self):
fdb_entries = {'net_id':
{'ports':
{'agent_ip': [constants.FLOODING_ENTRY,
['port_mac', 'port_ip']]},
'network_type': 'vxlan',
'segment_id': 1}}
with mock.patch.object(utils, 'execute',
return_value='') as execute_fn, \
mock.patch.object(ip_lib.IpNeighCommand, 'add',
return_value='') as add_fn:
self.lb_rpc.fdb_add(None, fdb_entries)
expected = [
mock.call(['bridge', 'fdb', 'show', 'dev', 'vxlan-1'],
run_as_root=True),
mock.call(['bridge', 'fdb', 'add',
constants.FLOODING_ENTRY[0],
'dev', 'vxlan-1', 'dst', 'agent_ip'],
run_as_root=True,
check_exit_code=False),
mock.call(['bridge', 'fdb', 'replace', 'port_mac', 'dev',
'vxlan-1', 'dst', 'agent_ip'],
run_as_root=True,
check_exit_code=False),
]
execute_fn.assert_has_calls(expected)
add_fn.assert_called_with('port_ip', 'port_mac')
def test_fdb_ignore(self):
fdb_entries = {'net_id':
{'ports':
{LOCAL_IP: [constants.FLOODING_ENTRY,
['port_mac', 'port_ip']]},
'network_type': 'vxlan',
'segment_id': 1}}
with mock.patch.object(utils, 'execute',
return_value='') as execute_fn:
self.lb_rpc.fdb_add(None, fdb_entries)
self.lb_rpc.fdb_remove(None, fdb_entries)
self.assertFalse(execute_fn.called)
fdb_entries = {'other_net_id':
{'ports':
{'192.168.0.67': [constants.FLOODING_ENTRY,
['port_mac', 'port_ip']]},
'network_type': 'vxlan',
'segment_id': 1}}
with mock.patch.object(utils, 'execute',
return_value='') as execute_fn:
self.lb_rpc.fdb_add(None, fdb_entries)
self.lb_rpc.fdb_remove(None, fdb_entries)
self.assertFalse(execute_fn.called)
def test_fdb_remove(self):
fdb_entries = {'net_id':
{'ports':
{'agent_ip': [constants.FLOODING_ENTRY,
['port_mac', 'port_ip']]},
'network_type': 'vxlan',
'segment_id': 1}}
with mock.patch.object(utils, 'execute',
return_value='') as execute_fn, \
mock.patch.object(ip_lib.IpNeighCommand, 'delete',
return_value='') as del_fn:
self.lb_rpc.fdb_remove(None, fdb_entries)
expected = [
mock.call(['bridge', 'fdb', 'del',
constants.FLOODING_ENTRY[0],
'dev', 'vxlan-1', 'dst', 'agent_ip'],
run_as_root=True,
check_exit_code=False),
mock.call(['bridge', 'fdb', 'del', 'port_mac',
'dev', 'vxlan-1', 'dst', 'agent_ip'],
run_as_root=True,
check_exit_code=False),
]
execute_fn.assert_has_calls(expected)
del_fn.assert_called_with('port_ip', 'port_mac')
def test_fdb_update_chg_ip(self):
fdb_entries = {'chg_ip':
{'net_id':
{'agent_ip':
{'before': [['port_mac', 'port_ip_1']],
'after': [['port_mac', 'port_ip_2']]}}}}
with mock.patch.object(ip_lib.IpNeighCommand, 'add',
return_value='') as add_fn, \
mock.patch.object(ip_lib.IpNeighCommand, 'delete',
return_value='') as del_fn:
self.lb_rpc.fdb_update(None, fdb_entries)
del_fn.assert_called_with('port_ip_1', 'port_mac')
add_fn.assert_called_with('port_ip_2', 'port_mac')
def test_fdb_update_chg_ip_empty_lists(self):
fdb_entries = {'chg_ip': {'net_id': {'agent_ip': {}}}}
self.lb_rpc.fdb_update(None, fdb_entries)
|
{
"content_hash": "b234995247f6074084a52e00b85b29bd",
"timestamp": "",
"source": "github",
"line_count": 1034,
"max_line_length": 83,
"avg_line_length": 46.80947775628627,
"alnum_prop": 0.5171380756595938,
"repo_name": "bigswitch/neutron",
"id": "fe6228d42866a5ade573e67db2325253c49fc48e",
"size": "49019",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neutron/tests/unit/plugins/ml2/drivers/linuxbridge/agent/test_linuxbridge_neutron_agent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "8468247"
},
{
"name": "Shell",
"bytes": "14648"
}
],
"symlink_target": ""
}
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = 'u%fk(_dcx=zl_wz@5u#tw9sw*h58848luyf7jzbi^u%_%vbi_v'
DEBUG = True
ALLOWED_HOSTS = ['*']
WSGI_APPLICATION = 'example.wsgi.application'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'celery',
'statsy',
'tests',
'example'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'example.urls'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'example/static')
STATICFILES_FINDERS = (
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder"
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.csrf',
],
'debug': DEBUG
},
},
]
CELERY_APP = 'example.celery_app.app'
BROKER_URL = 'redis://localhost:6379/0'
STATSY_ASYNC = False
try:
from local_settings import *
except ImportError:
pass
|
{
"content_hash": "9d0ed588ccb1db5ec9600b7ce8cc3820",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 69,
"avg_line_length": 23.658227848101266,
"alnum_prop": 0.6575708935259497,
"repo_name": "fata1ex/django-statsy",
"id": "812d19ffc84bccc78c0ea836e01dccbfd31054f5",
"size": "1886",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "example/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1082"
},
{
"name": "HTML",
"bytes": "7718"
},
{
"name": "JavaScript",
"bytes": "812491"
},
{
"name": "Python",
"bytes": "53631"
}
],
"symlink_target": ""
}
|
class Solution:
# @param candidates, a list of integers
# @param target, integer
# @return a list of lists of integers
def combinationSum2(self, candidates, target):
candidates.sort()
res = []
cand = []
self.combination_sum(candidates, target, cand, res)
return res
def combination_sum(self, candidates, target, cand, res):
if target == 0:
res.append(cand[:])
elif target < 0:
return
else:
for i, c in enumerate(candidates):
if i == 0:
prev = c
elif prev == c:
continue
else:
prev = c
cand.append(c)
self.combination_sum(candidates[i + 1:], target - c, cand, res)
cand.pop()
|
{
"content_hash": "ee3ec61a0f2a3b05d7cf49409b0c0862",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 79,
"avg_line_length": 31.555555555555557,
"alnum_prop": 0.4788732394366197,
"repo_name": "shichao-an/leetcode-python",
"id": "a6709849dd2eb4b17b4cf74d0a4e66c18a13a765",
"size": "852",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "combination_sum_ii/solution.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "228552"
},
{
"name": "Shell",
"bytes": "353"
}
],
"symlink_target": ""
}
|
"""
Module for editing date/time settings on macOS
.. versionadded:: 2016.3.0
"""
from datetime import datetime
import salt.utils.mac_utils
import salt.utils.platform
from salt.exceptions import SaltInvocationError
__virtualname__ = "timezone"
def __virtual__():
"""
Only for macOS
"""
if not salt.utils.platform.is_darwin():
return (
False,
"The mac_timezone module could not be loaded: "
"module only works on macOS systems.",
)
return __virtualname__
def _get_date_time_format(dt_string):
"""
Function that detects the date/time format for the string passed.
:param str dt_string:
A date/time string
:return: The format of the passed dt_string
:rtype: str
:raises: SaltInvocationError on Invalid Date/Time string
"""
valid_formats = [
"%H:%M",
"%H:%M:%S",
"%m:%d:%y",
"%m:%d:%Y",
"%m/%d/%y",
"%m/%d/%Y",
]
for dt_format in valid_formats:
try:
datetime.strptime(dt_string, dt_format)
return dt_format
except ValueError:
continue
msg = "Invalid Date/Time Format: {}".format(dt_string)
raise SaltInvocationError(msg)
def get_date():
"""
Displays the current date
:return: the system date
:rtype: str
CLI Example:
.. code-block:: bash
salt '*' timezone.get_date
"""
ret = salt.utils.mac_utils.execute_return_result("systemsetup -getdate")
return salt.utils.mac_utils.parse_return(ret)
def set_date(date):
"""
Set the current month, day, and year
:param str date: The date to set. Valid date formats are:
- %m:%d:%y
- %m:%d:%Y
- %m/%d/%y
- %m/%d/%Y
:return: True if successful, False if not
:rtype: bool
:raises: SaltInvocationError on Invalid Date format
:raises: CommandExecutionError on failure
CLI Example:
.. code-block:: bash
salt '*' timezone.set_date 1/13/2016
"""
date_format = _get_date_time_format(date)
dt_obj = datetime.strptime(date, date_format)
cmd = "systemsetup -setdate {}".format(dt_obj.strftime("%m:%d:%Y"))
return salt.utils.mac_utils.execute_return_success(cmd)
def get_time():
"""
Get the current system time.
:return: The current time in 24 hour format
:rtype: str
CLI Example:
.. code-block:: bash
salt '*' timezone.get_time
"""
ret = salt.utils.mac_utils.execute_return_result("systemsetup -gettime")
return salt.utils.mac_utils.parse_return(ret)
def set_time(time):
"""
Sets the current time. Must be in 24 hour format.
:param str time: The time to set in 24 hour format. The value must be
double quoted. ie: '"17:46"'
:return: True if successful, False if not
:rtype: bool
:raises: SaltInvocationError on Invalid Time format
:raises: CommandExecutionError on failure
CLI Example:
.. code-block:: bash
salt '*' timezone.set_time '"17:34"'
"""
# time must be double quoted '"17:46"'
time_format = _get_date_time_format(time)
dt_obj = datetime.strptime(time, time_format)
cmd = "systemsetup -settime {}".format(dt_obj.strftime("%H:%M:%S"))
return salt.utils.mac_utils.execute_return_success(cmd)
def get_zone():
"""
Displays the current time zone
:return: The current time zone
:rtype: str
CLI Example:
.. code-block:: bash
salt '*' timezone.get_zone
"""
ret = salt.utils.mac_utils.execute_return_result("systemsetup -gettimezone")
return salt.utils.mac_utils.parse_return(ret)
def get_zonecode():
"""
Displays the current time zone abbreviated code
:return: The current time zone code
:rtype: str
CLI Example:
.. code-block:: bash
salt '*' timezone.get_zonecode
"""
return salt.utils.mac_utils.execute_return_result("date +%Z")
def get_offset():
"""
Displays the current time zone offset
:return: The current time zone offset
:rtype: str
CLI Example:
.. code-block:: bash
salt '*' timezone.get_offset
"""
return salt.utils.mac_utils.execute_return_result("date +%z")
def list_zones():
"""
Displays a list of available time zones. Use this list when setting a
time zone using ``timezone.set_zone``
:return: a list of time zones
:rtype: list
CLI Example:
.. code-block:: bash
salt '*' timezone.list_zones
"""
ret = salt.utils.mac_utils.execute_return_result("systemsetup -listtimezones")
zones = salt.utils.mac_utils.parse_return(ret)
return [x.strip() for x in zones.splitlines()]
def set_zone(time_zone):
"""
Set the local time zone. Use ``timezone.list_zones`` to list valid time_zone
arguments
:param str time_zone: The time zone to apply
:return: True if successful, False if not
:rtype: bool
:raises: SaltInvocationError on Invalid Timezone
:raises: CommandExecutionError on failure
CLI Example:
.. code-block:: bash
salt '*' timezone.set_zone America/Denver
"""
if time_zone not in list_zones():
raise SaltInvocationError("Invalid Timezone: {}".format(time_zone))
salt.utils.mac_utils.execute_return_success(
"systemsetup -settimezone {}".format(time_zone)
)
return time_zone in get_zone()
def zone_compare(time_zone):
"""
Compares the given timezone name with the system timezone name.
:return: True if they are the same, False if not
:rtype: bool
CLI Example:
.. code-block:: bash
salt '*' timezone.zone_compare America/Boise
"""
return time_zone == get_zone()
def get_using_network_time():
"""
Display whether network time is on or off
:return: True if network time is on, False if off
:rtype: bool
CLI Example:
.. code-block:: bash
salt '*' timezone.get_using_network_time
"""
ret = salt.utils.mac_utils.execute_return_result("systemsetup -getusingnetworktime")
return (
salt.utils.mac_utils.validate_enabled(salt.utils.mac_utils.parse_return(ret))
== "on"
)
def set_using_network_time(enable):
"""
Set whether network time is on or off.
:param enable: True to enable, False to disable. Can also use 'on' or 'off'
:type: str bool
:return: True if successful, False if not
:rtype: bool
:raises: CommandExecutionError on failure
CLI Example:
.. code-block:: bash
salt '*' timezone.set_using_network_time True
"""
state = salt.utils.mac_utils.validate_enabled(enable)
cmd = "systemsetup -setusingnetworktime {}".format(state)
salt.utils.mac_utils.execute_return_success(cmd)
return state == salt.utils.mac_utils.validate_enabled(get_using_network_time())
def get_time_server():
"""
Display the currently set network time server.
:return: the network time server
:rtype: str
CLI Example:
.. code-block:: bash
salt '*' timezone.get_time_server
"""
ret = salt.utils.mac_utils.execute_return_result(
"systemsetup -getnetworktimeserver"
)
return salt.utils.mac_utils.parse_return(ret)
def set_time_server(time_server="time.apple.com"):
"""
Designates a network time server. Enter the IP address or DNS name for the
network time server.
:param time_server: IP or DNS name of the network time server. If nothing
is passed the time server will be set to the macOS default of
'time.apple.com'
:type: str
:return: True if successful, False if not
:rtype: bool
:raises: CommandExecutionError on failure
CLI Example:
.. code-block:: bash
salt '*' timezone.set_time_server time.acme.com
"""
cmd = "systemsetup -setnetworktimeserver {}".format(time_server)
salt.utils.mac_utils.execute_return_success(cmd)
return time_server in get_time_server()
def get_hwclock():
"""
Get current hardware clock setting (UTC or localtime)
CLI Example:
.. code-block:: bash
salt '*' timezone.get_hwclock
"""
# Need to search for a way to figure it out ...
return False
def set_hwclock(clock):
"""
Sets the hardware clock to be either UTC or localtime
CLI Example:
.. code-block:: bash
salt '*' timezone.set_hwclock UTC
"""
# Need to search for a way to figure it out ...
return False
|
{
"content_hash": "e311869c3e46273847f822992ae4db68",
"timestamp": "",
"source": "github",
"line_count": 383,
"max_line_length": 88,
"avg_line_length": 22.287206266318538,
"alnum_prop": 0.6260543580131209,
"repo_name": "saltstack/salt",
"id": "5703ab08784eac6435a42ddf56fa210fda8e665f",
"size": "8536",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt/modules/mac_timezone.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('services', '0013_auto_20151117_1205'),
]
operations = [
migrations.AlterField(
model_name='story',
name='date',
field=models.DateField(auto_now_add=True, db_index=True),
),
]
|
{
"content_hash": "2878186df79d53f3dca3d70467d9377d",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 69,
"avg_line_length": 21.944444444444443,
"alnum_prop": 0.5949367088607594,
"repo_name": "vitorfs/woid",
"id": "5ad43dc6d1b21956412c3fda14905ece8df7dd11",
"size": "419",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "woid/apps/services/migrations/0014_auto_20160715_1202.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "14252"
},
{
"name": "HTML",
"bytes": "13482"
},
{
"name": "JavaScript",
"bytes": "863"
},
{
"name": "Python",
"bytes": "50476"
}
],
"symlink_target": ""
}
|
import heapq
import logging
import threading
import time
try:
from UserDict import DictMixin
except ImportError:
from collections import Mapping as DictMixin
from .token_bucket import Bucket
from six.moves import queue as Queue
logger = logging.getLogger('scheduler')
try:
cmp
except NameError:
cmp = lambda x, y: (x > y) - (x < y)
class AtomInt(object):
__value__ = 0
__mutex__ = threading.RLock()
@classmethod
def get_value(cls):
cls.__mutex__.acquire()
cls.__value__ = cls.__value__ + 1
value = cls.__value__
cls.__mutex__.release()
return value
class InQueueTask(DictMixin):
__slots__ = ('taskid', 'priority', 'exetime', 'sequence')
__getitem__ = lambda *x: getattr(*x)
__setitem__ = lambda *x: setattr(*x)
__iter__ = lambda self: iter(self.__slots__)
__len__ = lambda self: len(self.__slots__)
keys = lambda self: self.__slots__
def __init__(self, taskid, priority=0, exetime=0):
self.taskid = taskid
self.priority = priority
self.exetime = exetime
self.sequence = AtomInt.get_value()
def __cmp__(self, other):
if self.exetime == 0 and other.exetime == 0:
diff = -cmp(self.priority, other.priority)
else:
diff = cmp(self.exetime, other.exetime)
# compare in-queue sequence number finally if two element has the same
# priority or exetime
return diff if diff != 0 else cmp(self.sequence, other.sequence)
def __lt__(self, other):
return self.__cmp__(other) < 0
class PriorityTaskQueue(Queue.Queue):
'''
TaskQueue
Same taskid items will been merged
'''
def _init(self, maxsize):
self.queue = []
self.queue_dict = dict()
def _qsize(self, len=len):
return len(self.queue_dict)
def _put(self, item, heappush=heapq.heappush):
if item.taskid in self.queue_dict:
task = self.queue_dict[item.taskid]
changed = False
if item < task:
changed = True
task.priority = max(item.priority, task.priority)
task.exetime = min(item.exetime, task.exetime)
if changed:
self._resort()
else:
heappush(self.queue, item)
self.queue_dict[item.taskid] = item
def _get(self, heappop=heapq.heappop):
while self.queue:
item = heappop(self.queue)
if item.taskid is None:
continue
self.queue_dict.pop(item.taskid, None)
return item
return None
@property
def top(self):
while self.queue and self.queue[0].taskid is None:
heapq.heappop(self.queue)
if self.queue:
return self.queue[0]
return None
def _resort(self):
heapq.heapify(self.queue)
def __contains__(self, taskid):
return taskid in self.queue_dict
def __getitem__(self, taskid):
return self.queue_dict[taskid]
def __setitem__(self, taskid, item):
assert item.taskid == taskid
self.put(item)
def __delitem__(self, taskid):
self.queue_dict.pop(taskid).taskid = None
class TaskQueue(object):
'''
task queue for scheduler, have a priority queue and a time queue for delayed tasks
'''
processing_timeout = 10 * 60
def __init__(self, rate=0, burst=0):
self.mutex = threading.RLock()
self.priority_queue = PriorityTaskQueue()
self.time_queue = PriorityTaskQueue()
self.processing = PriorityTaskQueue()
self.bucket = Bucket(rate=rate, burst=burst)
@property
def rate(self):
return self.bucket.rate
@rate.setter
def rate(self, value):
self.bucket.rate = value
@property
def burst(self):
return self.bucket.burst
@burst.setter
def burst(self, value):
self.bucket.burst = value
def check_update(self):
'''
Check time queue and processing queue
put tasks to priority queue when execute time arrived or process timeout
'''
self._check_time_queue()
self._check_processing()
def _check_time_queue(self):
now = time.time()
self.mutex.acquire()
while self.time_queue.qsize() and self.time_queue.top and self.time_queue.top.exetime < now:
task = self.time_queue.get_nowait() # type: InQueueTask
task.exetime = 0
self.priority_queue.put(task)
self.mutex.release()
def _check_processing(self):
now = time.time()
self.mutex.acquire()
while self.processing.qsize() and self.processing.top and self.processing.top.exetime < now:
task = self.processing.get_nowait()
if task.taskid is None:
continue
task.exetime = 0
self.priority_queue.put(task)
logger.info("processing: retry %s", task.taskid)
self.mutex.release()
def put(self, taskid, priority=0, exetime=0):
"""
Put a task into task queue
when use heap sort, if we put tasks(with the same priority and exetime=0) into queue,
the queue is not a strict FIFO queue, but more like a FILO stack.
It is very possible that when there are continuous big flow, the speed of select is
slower than request, resulting in priority-queue accumulation in short time.
In this scenario, the tasks more earlier entering the priority-queue will not get
processed until the request flow becomes small.
Thus, we store a global atom self increasing value into task.sequence which represent
the task enqueue sequence. When the comparison of exetime and priority have no
difference, we compare task.sequence to ensure that the entire queue is ordered.
"""
now = time.time()
task = InQueueTask(taskid, priority, exetime)
self.mutex.acquire()
if taskid in self.priority_queue:
self.priority_queue.put(task)
elif taskid in self.time_queue:
self.time_queue.put(task)
elif taskid in self.processing and self.processing[taskid].taskid:
# force update a processing task is not allowed as there are so many
# problems may happen
pass
else:
if exetime and exetime > now:
self.time_queue.put(task)
else:
task.exetime = 0
self.priority_queue.put(task)
self.mutex.release()
def get(self):
'''Get a task from queue when bucket available'''
if self.bucket.get() < 1:
return None
now = time.time()
self.mutex.acquire()
try:
task = self.priority_queue.get_nowait()
self.bucket.desc()
except Queue.Empty:
self.mutex.release()
return None
task.exetime = now + self.processing_timeout
self.processing.put(task)
self.mutex.release()
return task.taskid
def done(self, taskid):
'''Mark task done'''
if taskid in self.processing:
self.mutex.acquire()
if taskid in self.processing:
del self.processing[taskid]
self.mutex.release()
return True
return False
def delete(self, taskid):
if taskid not in self:
return False
if taskid in self.priority_queue:
self.mutex.acquire()
del self.priority_queue[taskid]
self.mutex.release()
elif taskid in self.time_queue:
self.mutex.acquire()
del self.time_queue[taskid]
self.mutex.release()
elif taskid in self.processing:
self.done(taskid)
return True
def size(self):
return self.priority_queue.qsize() + self.time_queue.qsize() + self.processing.qsize()
def is_processing(self, taskid):
'''
return True if taskid is in processing
'''
return taskid in self.processing and self.processing[taskid].taskid
def __len__(self):
return self.size()
def __contains__(self, taskid):
if taskid in self.priority_queue or taskid in self.time_queue:
return True
if taskid in self.processing and self.processing[taskid].taskid:
return True
return False
if __name__ == '__main__':
task_queue = TaskQueue()
task_queue.processing_timeout = 0.1
task_queue.put('a3', 3, time.time() + 0.1)
task_queue.put('a1', 1)
task_queue.put('a2', 2)
assert task_queue.get() == 'a2'
time.sleep(0.1)
task_queue._check_time_queue()
assert task_queue.get() == 'a3'
assert task_queue.get() == 'a1'
task_queue._check_processing()
assert task_queue.get() == 'a2'
assert len(task_queue) == 0
|
{
"content_hash": "31148efd3b5b645acc470e529b4f1155",
"timestamp": "",
"source": "github",
"line_count": 295,
"max_line_length": 100,
"avg_line_length": 30.45084745762712,
"alnum_prop": 0.5874429477902705,
"repo_name": "wangjun/pyspider",
"id": "a6d02e3a5101c8bd34121eb22ec51d9f68b3cc24",
"size": "9168",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pyspider/scheduler/task_queue.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "24952"
},
{
"name": "HTML",
"bytes": "24503"
},
{
"name": "JavaScript",
"bytes": "50223"
},
{
"name": "Python",
"bytes": "456565"
}
],
"symlink_target": ""
}
|
from base import BaseDataStore
class StorageManager(BaseDataStore):
def __init__(self, get_cache_size=0, execute_cache_size=0):
super(StorageManager, self).__init__(get_cache_size, execute_cache_size)
self.inputs = []
self.outputs = []
def iteritems(self):
for _, ds, _ in self.outputs:
for op, v in ds.iteritems():
yield op, v
def set_input(self, cls, ds):
self.inputs.append((cls, ds))
def set_output(self, cls, ds, autosave=False):
self.outputs.append((cls, ds, autosave))
def set_input_output(self, cls, ds, autosave=False):
self.set_input(cls, ds)
self.set_output(cls, ds, autosave=autosave)
def _execute(self, operation):
operation = self._get_operation(operation)
in_ds = self._get_input_store(operation)
if in_ds is None: raise ValueError("input store not found for operation %s" % operation)
if in_ds.execute_cache is None:
res = operation.apply(self)
else:
res = in_ds.execute_cache.get(operation)
if res is None:
res = operation.apply(self)
in_ds.execute_cache.set(operation, res)
out_ds, autosave = self._get_output_store(operation)
if autosave:
out_ds[operation] = res
return res
def __contains__(self, operation):
operation = self._get_operation(operation)
out_ds, autosave = self._get_output_store(operation)
if out_ds is None: return False # raise ValueError("output store not found for operation %s" % operation)
return operation in out_ds
def _get_store(self, operation, list):
for elem in list:
if isinstance(operation, elem[0]): return elem
def _get_input_store(self, operation):
res = self._get_store(operation, self.inputs)
if res is not None: return res[1]
def _get_output_store(self, operation):
res = self._get_store(operation, self.outputs)
if res is not None:
return res[1:]
else:
return None, False
def _get(self, operation):
operation = self._get_operation(operation)
out_ds, autosave = self._get_output_store(operation)
if out_ds is None: raise ValueError("input store not found for operation %s" % operation)
return out_ds[operation]
def save(self, operation, value):
operation = self._get_operation(operation)
out_ds, autosave = self._get_output_store(operation)
if out_ds is None: raise ValueError("output store not found for operation %s" % operation)
return out_ds.save(operation, value)
|
{
"content_hash": "e4b7d77e6a38664f72f012ace2e74453",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 114,
"avg_line_length": 35.5921052631579,
"alnum_prop": 0.6118299445471349,
"repo_name": "elsonidoq/fito",
"id": "a4dd208896bdb6a8482915bd723250673f0c404b",
"size": "2705",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fito/data_store/store_manager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "148390"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from apps.shop.models import Order, OrderLine
class OrderAdmin(admin.ModelAdmin):
model = Order
list_display = ('__str__', 'price', 'quantity', 'order_line')
class OrderInline(admin.TabularInline):
model = Order
extra = 0
readonly_fields = ['__str__']
fields = ['__str__', 'price', 'quantity']
class OrderLineAdmin(admin.ModelAdmin):
model = OrderLine
list_display = ('__str__', 'user', 'datetime', 'paid')
list_filter = ['paid']
search_fields = ['user__first_name', 'user__last_name', 'user__username', 'user__ntnu_username']
inlines = [OrderInline]
admin.site.register(Order, OrderAdmin)
admin.site.register(OrderLine, OrderLineAdmin)
|
{
"content_hash": "4d62aa48ba77ca4775a0b221acdba0da",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 100,
"avg_line_length": 25.857142857142858,
"alnum_prop": 0.6602209944751382,
"repo_name": "dotKom/onlineweb4",
"id": "d1b6cefbd4752f588567ff40b4211182e8e2ce50",
"size": "724",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "apps/shop/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "71414"
},
{
"name": "HTML",
"bytes": "463894"
},
{
"name": "JavaScript",
"bytes": "745404"
},
{
"name": "Python",
"bytes": "925584"
},
{
"name": "Shell",
"bytes": "3130"
},
{
"name": "Standard ML",
"bytes": "1088"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import random
import string
import tasks.competition.messages as msg
from core.task import on_message, on_start, on_timeout
from fsa import build_automaton
from tasks.competition.base import BaseTask
def random_string(length):
return random_string_from(length, string.ascii_uppercase)
def random_string_from(length, subset):
return "".join(random.choice(subset) for _ in range(length))
class TaskSet0(BaseTask):
_task_nr = 0
_SWITCH_TO_LONGER_DESCRIPTION = 1000
_SWITCH_TO_NEXT_SUBTASK = 10
_subtask_nr = -1
def __init__(self, world=None):
super(TaskSet0, self).__init__(world=world, max_time=3000)
@on_start()
def give_instructions(self, event):
length_of_description = 1 * (self._task_nr > self._SWITCH_TO_LONGER_DESCRIPTION) + 1
prev_subtask = self._subtask_nr
self._subtask_nr = self._task_nr // self._SWITCH_TO_NEXT_SUBTASK
if prev_subtask != self._subtask_nr:
self._description = random_string(length_of_description)
self._automaton = build_automaton(self._description, "and")
is_correct = random.choice([True, False])
verification_length = random.randint(length_of_description, length_of_description * 3 - 1)
if is_correct:
verify = self._automaton.get_correct_string(verification_length)
else:
verify = self._automaton.get_wrong_string(verification_length)
self.answer = "true" if is_correct else "false"
self.give_away_message = 'Wrong. The right answer is: {}.'.format(self.answer)
self.set_message("description: {}; verify: {}.".format(self._description, verify))
self._task_nr = self._task_nr + 1
@on_message(r'\.')
def check_response(self, event):
# check if given answer matches
if event.is_message(self.answer, '.'):
# if the message sent by the learner equals the teacher's
# expected answer followed by a period, reward the learner.
self.set_result(True, random.choice(msg.congratulations))
else:
# If the learner said anything else, it fails the task.
self.fail_learner()
@on_timeout()
def on_timeout(self, event):
# if the learner has not produced any plausible answer by the max_time
# allowed, fail the learner sending appropriate feedback.
self.fail_learner()
def fail_learner(self):
# fail the learner sending a random fail feedback message
self.set_result(False, self.give_away_message)
class TaskSetBase(BaseTask):
@staticmethod
def get_task(max_length_of_description, max_nr_of_groups, max_length_of_verify, description_type,
not_portion=None, subset_size=None, without_anything=None):
'''
TODO: generovat incorrect z distribuce correct
TODO: pridat minimalni delku
'''
not_portion = not_portion or 0
subset_size = subset_size or 13
if not description_type == "and" and not description_type == "or":
print("Unknown description_type: {}".format(description_type))
return
if subset_size > 13:
print("Subset size cannot be higher than 13.")
return
nr_of_groups = random.randint(1, max_nr_of_groups)
if not_portion > 0:
alphabet = string.ascii_uppercase
normal_subset = "".join(random.choice(alphabet) for _ in range(subset_size))
reduced_alphabet = "".join([char for char in alphabet if char not in normal_subset])
not_subset = "".join(random.choice(reduced_alphabet) for _ in range(subset_size))
descriptions = []
if without_anything:
has_anything = False
else:
has_anything = random.choice([True, False])
if description_type == "or" and not without_anything:
if random.random() < 0.05:
has_anything = True
nr_of_groups = 0
else:
has_anything = False
for _ in range(nr_of_groups):
length_of_description = random.randint(1, max_length_of_description)
if not_portion > 0:
if random.random() < not_portion:
description = "not " + random_string_from(length_of_description, not_subset)
has_anything = True
else:
description = random_string_from(length_of_description, normal_subset)
else:
description = random_string(length_of_description)
descriptions.append(description)
if has_anything:
descriptions.append("anything")
descriptions = list(set(descriptions))
automaton = build_automaton(" ".join(descriptions), description_type)
type_connection = " {} ".format(description_type)
complete_description = type_connection.join(descriptions)
is_correct = random.choice([True, False])
verify_length = random.randint(1, max_length_of_verify)
if is_correct:
verify = automaton.get_correct_string(verify_length)
else:
verify = automaton.get_wrong_string(verify_length, 0)
return (is_correct, "description: {}; verify: {}.".format(complete_description, verify))
def __init__(self, world=None):
super(TaskSetBase, self).__init__(world=world, max_time=3000)
self.max_length_of_description = None
self.max_nr_of_groups = None
self.max_length_of_verify = None
self.description_type = None
self.not_portion = None
self.subset_size = None
self.without_anything = None
@on_start()
def give_instructions(self, event):
if not self.max_length_of_description or not self.max_nr_of_groups or not self.max_length_of_verify or not self.description_type:
raise AttributeError("Some of the TaskSet attributes are not set!")
is_correct, task = TaskSetBase.get_task(self.max_length_of_description, self.max_nr_of_groups, self.max_length_of_verify,
self.description_type, self.not_portion, self.subset_size, self.without_anything)
self.answer = "true" if is_correct else "false"
self.give_away_message = 'Wrong. The right answer is: {}.'.format(self.answer)
self.set_message(task)
@on_message(r'\.')
def check_response(self, event):
# check if given answer matches
if event.is_message(self.answer, '.'):
# if the message sent by the learner equals the teacher's
# expected answer followed by a period, reward the learner.
self.set_result(True, random.choice(msg.congratulations))
else:
# If the learner said anything else, it fails the task.
self.fail_learner()
@on_timeout()
def on_timeout(self, event):
# if the learner has not produced any plausible answer by the max_time
# allowed, fail the learner sending appropriate feedback.
self.fail_learner()
def fail_learner(self):
# fail the learner sending a random fail feedback message
self.set_result(False, self.give_away_message)
class TaskSet1(TaskSetBase):
def __init__(self, world=None):
super(TaskSet1, self).__init__(world=world)
self.max_length_of_description = 3
self.max_nr_of_groups = 1
self.max_length_of_verify = 10
self.description_type = "and"
self.without_anything = True
class TaskSet2(TaskSetBase):
def __init__(self, world=None):
super(TaskSet2, self).__init__(world=world)
self.max_length_of_description = 3
self.max_nr_of_groups = 3
self.max_length_of_verify = 10
self.description_type = "or"
class TaskSet3(TaskSetBase):
def __init__(self, world=None):
super(TaskSet3, self).__init__(world=world)
self.max_length_of_description = 3
self.max_nr_of_groups = 3
self.max_length_of_verify = 30
self.description_type = "and"
class TaskSet4(TaskSetBase):
def __init__(self, world=None):
super(TaskSet4, self).__init__(world=world)
self.max_length_of_description = 3
self.max_nr_of_groups = 3
self.max_length_of_verify = 30
self.description_type = "and"
self.not_portion = 0.5
class TaskSet5(TaskSetBase):
def __init__(self, world=None):
super(TaskSet5, self).__init__(world=world)
self.world = world
self.task_set_list = [TaskSet1, TaskSet2, TaskSet3, TaskSet4]
@on_start()
def give_instructions(self, event):
actual_task_set = random.choice(self.task_set_list)()
self.max_length_of_description = actual_task_set.max_length_of_description
self.max_nr_of_groups = actual_task_set.max_nr_of_groups
self.max_length_of_verify = actual_task_set.max_length_of_verify
self.description_type = actual_task_set.description_type
self.not_portion = actual_task_set.not_portion
self.subset_size = actual_task_set.subset_size
self.without_anything = actual_task_set.without_anything
return super(TaskSet5, self).give_instructions(event)
|
{
"content_hash": "cfee5d57afea32d615cd542370788be0",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 137,
"avg_line_length": 38.3780487804878,
"alnum_prop": 0.6260989301980723,
"repo_name": "general-ai-challenge/Round1",
"id": "bfc1094cf422d6c2890e35387c1b4ea9c5addc43",
"size": "9644",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/tasks/challenge/round1/challenge_mini.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "6718"
},
{
"name": "C++",
"bytes": "1541"
},
{
"name": "Lua",
"bytes": "1476"
},
{
"name": "Makefile",
"bytes": "7464"
},
{
"name": "Python",
"bytes": "471372"
},
{
"name": "Shell",
"bytes": "1112"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from gge_proxy_manager.models import Kingdom
from lib.core.api2object.castle import import_castle_economy
from lib.socket.response import Response
from django.utils.timezone import now, timedelta
from datetime import datetime
import logging
logger = logging.getLogger(__name__)
fifteen_minutes = timedelta(minutes=15)
class CastleEconomyMiddleware():
@staticmethod
def inbound(context, message):
player = context.get_player()
if not player:
return context, message
if not message.type == 'in' or not message.command == 'dcl':
# maybe we request it
got_castle_economy = context.session_get("got_castle_economy")
request_castle_economy = context.session_get("request_castle_economy")
if request_castle_economy:
# already requested
pass
elif not got_castle_economy or (
isinstance(got_castle_economy, datetime) and got_castle_economy < (now() - fifteen_minutes)):
request_castle_economy = context.session_get("request_castle_economy")
if not request_castle_economy:
# add response
context.session_set("request_castle_economy", True)
response = Response(command='fwd_srv', data={
"cmd": "dcl",
"data": {
"CD": player.gge_id
}
})
logger.info("Request economy player=%s last_economy=%s", player.name, got_castle_economy)
context.add_response(response)
return context, message
"""
request_castle_economy
got_castle_economy
"""
context.session_set("request_castle_economy", False)
context.session_set("got_castle_economy", now())
game = context.get_game()
if not game:
return context, message
data = message.get_data()
economy_data = data.get("C", {})
for in_kingdom in economy_data:
kingdom = Kingdom.objects.get(game=game, kid=in_kingdom.get("KID"))
for economy_by_castle in in_kingdom.get("AI", []):
import_castle_economy(economy_by_castle, kingdom)
logger.info("Economy imported player=%s castle_id=%d", player.name, economy_by_castle.get('AID', 0))
return context, message
|
{
"content_hash": "0b5836071e7d9aea401466bee498d0f7",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 116,
"avg_line_length": 36.88235294117647,
"alnum_prop": 0.580542264752791,
"repo_name": "mrcrgl/gge-storage",
"id": "49df6278c46179d397b390b04c35969c9f2e85b5",
"size": "2508",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/socket/middleware/castle_economy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10602"
},
{
"name": "HTML",
"bytes": "113866"
},
{
"name": "JavaScript",
"bytes": "73567"
},
{
"name": "Python",
"bytes": "331496"
}
],
"symlink_target": ""
}
|
import logging
import traceback
import uuid
from copy import deepcopy
from datetime import timedelta
from io import BytesIO
from typing import Dict, List, Optional
import bmds
import pandas as pd
import reversion
from bmds.bmds3.batch import BmdsSessionBatch
from bmds.bmds3.recommender.recommender import RecommenderSettings
from bmds.bmds3.types.sessions import VersionSchema
from bmds.constants import Dtype
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import ValidationError
from django.db import DataError, models
from django.urls import reverse
from django.utils.text import slugify
from django.utils.timezone import now
from ..common.utils import random_string
from . import executor, tasks, validators
from .reporting.cache import DocxReportCache, ExcelReportCache
from .reporting.excel import dataset_df, params_df, summary_df
from .schema import AnalysisOutput, AnalysisSessionSchema
logger = logging.getLogger(__name__)
def get_deletion_date(current_deletion_date=None):
date = now() + timedelta(days=settings.DAYS_TO_KEEP_ANALYSES)
if current_deletion_date:
return max(current_deletion_date, date)
return date
@reversion.register()
class Analysis(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
password = models.CharField(max_length=12, default=random_string, editable=False)
inputs = models.JSONField(default=dict)
outputs = models.JSONField(default=dict, blank=True)
errors = models.JSONField(default=dict, blank=True)
created = models.DateTimeField(auto_now_add=True)
started = models.DateTimeField(null=True, blank=True)
ended = models.DateTimeField(null=True, blank=True)
deletion_date = models.DateTimeField(null=True, blank=True, default=get_deletion_date)
class Meta:
verbose_name_plural = "Analyses"
ordering = ("-created",)
get_latest_by = ("created",)
def __str__(self):
return str(self.inputs.get("analysis_name", self.id))
def save(self, *args, **kwargs):
try:
super().save(*args, **kwargs)
except DataError:
self.reset_execution()
self.errors = ["An error occurred saving content. Please contact the developers."]
super().save(*args, **kwargs)
finally:
DocxReportCache(analysis=self).delete()
ExcelReportCache(analysis=self).delete()
@property
def slug(self) -> str:
if "analysis_name" in self.inputs:
return slugify(self.inputs["analysis_name"])
else:
return str(self.id)
def name(self) -> str:
return self.inputs.get("analysis_name", "BMDS Analysis")
def get_absolute_url(self):
return reverse("analysis", args=(str(self.id),))
def get_api_url(self):
return reverse("api:analysis-detail", args=(str(self.id),))
def get_api_patch_inputs_url(self):
return reverse("api:analysis-patch-inputs", args=(str(self.id),))
def get_api_execute_url(self):
return reverse("api:analysis-execute", args=(str(self.id),))
def get_api_execute_reset_url(self):
return reverse("api:analysis-execute-reset", args=(str(self.id),))
def get_edit_url(self):
return reverse("analysis_edit", args=(str(self.id), self.password))
def get_renew_url(self):
return reverse("analysis_renew", args=(str(self.id), self.password))
def get_delete_url(self):
return reverse("analysis_delete", args=(str(self.id), self.password))
def get_excel_url(self):
return reverse("api:analysis-excel", args=(str(self.id),))
def get_word_url(self):
return reverse("api:analysis-word", args=(str(self.id),))
def inputs_valid(self) -> bool:
try:
validators.validate_input(self.inputs)
except ValidationError:
return False
return True
@property
def is_executing(self) -> bool:
return self.started is not None and self.ended is None
@property
def is_finished(self) -> bool:
return self.ended and len(self.outputs) > 0 or len(self.errors) > 0
@property
def has_errors(self):
return len(self.errors) > 0
@classmethod
def delete_old_analyses(cls):
qs = cls.objects.filter(deletion_date=now())
logger.info(f"Removing {qs.count()} old BMDS analysis")
qs.delete()
@classmethod
def maybe_hanging(cls, queryset):
"""
Return a queryset of analyses which started at least an hour ago but have not yet ended.
"""
return queryset.filter(started__lt=now() - timedelta(hours=1), ended__isnull=True)
def get_session(self, index: int) -> executor.AnalysisSession:
if not self.is_finished or self.has_errors:
raise ValueError("Session cannot be returned")
return executor.AnalysisSession.deserialize(deepcopy(self.outputs["outputs"][index]))
def get_sessions(self) -> List[executor.AnalysisSession]:
if not self.is_finished or self.has_errors:
raise ValueError("Session cannot be returned")
return [
executor.AnalysisSession.deserialize(output)
for output in deepcopy(self.outputs["outputs"])
]
def to_batch(self) -> BmdsSessionBatch:
# convert List[executor.AnalysisSession] to List[bmds.BmdsSession]
items = []
for session in self.get_sessions():
if session.frequentist:
items.append(session.frequentist)
if session.bayesian:
items.append(session.bayesian)
return BmdsSessionBatch(sessions=items)
def to_df(self) -> dict[str, pd.DataFrame]:
# exit early if we don't have data for a report
if not self.is_finished or self.has_errors:
return {
"error": pd.Series(
data=["Analysis not finished or error occurred - cannot create report"],
name="Status",
).to_frame(),
}
return {
"summary": summary_df(self),
"datasets": dataset_df(self),
"parameters": params_df(self),
}
def to_excel(self) -> BytesIO:
f = BytesIO()
writer = pd.ExcelWriter(f)
data = self.to_df()
for name, df in data.items():
df.to_excel(writer, sheet_name=name, index=False)
writer.save()
return f
def update_selection(self, selection: validators.AnalysisSelectedSchema):
"""Given a new selection data schema; update outputs and save instance
Args:
selection (validators.AnalysisSelectedSchema): The selection to update
"""
for idx, output in enumerate(self.outputs["outputs"]):
if (
output["dataset_index"] == selection.dataset_index
and output["option_index"] == selection.option_index
):
session = self.get_session(idx)
session.frequentist.selected = selection.selected.deserialize(session)
self.outputs["outputs"][idx] = session.to_dict()
self.save()
break
def try_execute(self):
try:
self.execute()
except Exception:
err = traceback.format_exc()
self.handle_execution_error(err)
def try_run_session(
self, inputs: Dict, dataset_index: int, option_index: int
) -> AnalysisSessionSchema:
try:
return executor.AnalysisSession.run(inputs, dataset_index, option_index)
except Exception:
exception = AnalysisSessionSchema(
dataset_index=dataset_index, option_index=option_index, error=traceback.format_exc()
)
logger.error(f"{self.id}: {exception}")
return exception
def start_execute(self):
# update model to indicate execution scheduled
self.started = now()
self.ended = None
self.save()
# add to analysis queue...
tasks.try_execute.delay(str(self.id))
def execute(self):
# update start time to actual time started
self.started = now()
# build combinations based on enabled datasets
combinations = []
for dataset_index in range(len(self.inputs["datasets"])):
for option_index in range(len(self.inputs["options"])):
if self.inputs["dataset_options"][dataset_index]["enabled"]:
combinations.append((dataset_index, option_index))
outputs: list[AnalysisSessionSchema] = [
self.try_run_session(self.inputs, dataset_index, option_index)
for dataset_index, option_index in combinations
]
bmds_python_version = None
for output in outputs:
if output.frequentist is not None:
bmds_python_version = output.frequentist["version"]
break
if output.bayesian is not None:
bmds_python_version = output.bayesian["version"]
break
obj = AnalysisOutput(
analysis_id=str(self.id),
analysis_schema_version="1.0",
bmds_server_version=settings.COMMIT.sha,
bmds_python_version=bmds_python_version,
outputs=[output.dict() for output in outputs],
)
self.outputs = obj.dict()
self.errors = [output.error for output in outputs if output.error]
self.ended = now()
self.deletion_date = get_deletion_date()
self.save()
def reset_execution(self):
"""
Update all modeling results and execution fields to a state where the analysis
has not yet been executed.
"""
self.started = None
self.ended = None
self.outputs = {}
self.errors = {}
def handle_execution_error(self, err):
self.errors = err
self.ended = now()
self.deletion_date = None # don't delete; save for troubleshooting
self.save()
def default_input(self) -> Dict:
return {
"bmds_version": bmds.constants.BMDS330,
"dataset_type": Dtype.DICHOTOMOUS,
"datasets": [],
"models": {},
"dataset_options": [],
"options": [],
"recommender": RecommenderSettings.build_default().dict(),
}
def renew(self):
self.deletion_date = get_deletion_date(self.deletion_date)
def get_bmds_version(self) -> Optional[VersionSchema]:
if not self.is_finished or self.has_errors:
return None
return AnalysisOutput.parse_obj(self.outputs).bmds_python_version
@property
def deletion_date_str(self) -> Optional[str]:
if self.deletion_date is None:
return None
return self.deletion_date.strftime("%B %d, %Y")
@property
def days_until_deletion(self) -> Optional[int]:
if self.deletion_date is None:
return None
return (self.deletion_date - now()).days
class ContentType(models.IntegerChoices):
HOMEPAGE = 1
@reversion.register()
class Content(models.Model):
content_type = models.PositiveIntegerField(choices=ContentType.choices, unique=True)
subject = models.CharField(max_length=128)
content = models.JSONField(null=False)
created = models.DateTimeField(auto_now_add=True)
last_updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ("-created",)
def __str__(self) -> str:
return self.subject
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
self.update_cache()
def update_cache(self) -> Dict:
key = self.cache_name(self.content_type)
cache.set(key, self.content, 3600) # cache for an hour
return self.content
@classmethod
def cache_name(cls, content_type: ContentType) -> str:
return f"{cls._meta.db_table}-{content_type}"
@classmethod
def get_cached_content(cls, content_type: ContentType) -> Dict:
key = cls.cache_name(content_type)
content = cache.get(key)
if content is None:
obj = cls.objects.get(content_type=content_type)
content = obj.update_cache()
return content
|
{
"content_hash": "2dd1a58dbcb58e67eac67a6d3a7d6899",
"timestamp": "",
"source": "github",
"line_count": 359,
"max_line_length": 100,
"avg_line_length": 34.53481894150418,
"alnum_prop": 0.6229230521051783,
"repo_name": "shapiromatron/bmds-server",
"id": "a31e96ec9ab554177d17aac0cd96322e478155af",
"size": "12398",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "bmds_server/analysis/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1738"
},
{
"name": "CSS",
"bytes": "888619"
},
{
"name": "Dockerfile",
"bytes": "1993"
},
{
"name": "HTML",
"bytes": "44101"
},
{
"name": "JavaScript",
"bytes": "282276"
},
{
"name": "Jupyter Notebook",
"bytes": "93852"
},
{
"name": "Makefile",
"bytes": "2703"
},
{
"name": "Python",
"bytes": "168194"
},
{
"name": "Shell",
"bytes": "8719"
}
],
"symlink_target": ""
}
|
"""SE2017 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.views.generic.base import TemplateView
from django.contrib.auth.models import User
from rest_framework import routers, serializers, viewsets
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
url(r'^', include('home.urls'), name='home'),
url(r'^login/', auth_views.login, {'template_name': 'Login/login.html'}, name='login'),
url(r'^logout/$', auth_views.logout, {'template_name': 'Login/login.html'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^student/', include('students.urls'), name='students'),
url(r'^faculty/', include('faculty.urls'), name='faculty'),
url(r'^api/',include('home.urls')),
]
if settings.DEBUG is True:
urlpatterns += static(settings.MEDIA_URL,document_root=settings.MEDIA_ROOT)
|
{
"content_hash": "57e0c86fe8584cccbe1fd0dd72da75e3",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 92,
"avg_line_length": 42.13513513513514,
"alnum_prop": 0.7126363053239256,
"repo_name": "SriHarshaGajavalli/SE2017",
"id": "02ac44363eb34f9d1c9c582468af7b919c83f294",
"size": "2046",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "SE2017/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "508067"
},
{
"name": "HTML",
"bytes": "1063419"
},
{
"name": "JavaScript",
"bytes": "1215944"
},
{
"name": "PowerShell",
"bytes": "471"
},
{
"name": "Python",
"bytes": "107100"
},
{
"name": "Ruby",
"bytes": "2373"
}
],
"symlink_target": ""
}
|
from flask import url_for
from flask_admin.consts import ICON_TYPE_GLYPH
from flask_babelex import lazy_gettext
from wtforms import ValidationError
from tests.fixture import run_as_admin
from tests.object_faker import object_faker
from psi.app.const import DIRECT_PO_TYPE_KEY, PO_ISSUED_STATUS_KEY, \
RECEIVING_DRAFT_STATUS_KEY, RECEIVING_COMPLETE_STATUS_KEY
from tests.base_test_case import BaseTestCase
class TestReceivingAdmin(BaseTestCase):
def test_delete_complete_receiving_not_allowed(self):
def test_logic():
from psi.app.models import Receiving, EnumValues
from psi.app.views import ReceivingAdmin
from psi.app.service import Info
receiving = Receiving()
complete_status = EnumValues.get(RECEIVING_COMPLETE_STATUS_KEY)
receiving.status = complete_status
db_session = Info.get_db().session
receiving_admin = ReceivingAdmin(Receiving, db_session, name=lazy_gettext("Receiving"),
category=lazy_gettext('Purchase'), menu_icon_type=ICON_TYPE_GLYPH,
menu_icon_value='glyphicon-import')
self.assertRaises(ValidationError, receiving_admin.on_model_delete, receiving)
run_as_admin(self.test_client, test_logic)
def test_receiving_pages(self):
from psi.app.models.enum_values import EnumValues
from psi.app.utils import db_util
def test_logic():
type = EnumValues.get(DIRECT_PO_TYPE_KEY)
status = EnumValues.get(PO_ISSUED_STATUS_KEY)
receiving_status = EnumValues.get(RECEIVING_DRAFT_STATUS_KEY)
date = object_faker.faker.date_time_this_year()
po = object_faker.purchase_order(number_of_line=2, type=type, status=status)
db_util.save_objects_commit(po)
remark = object_faker.faker.text(max_nb_chars=50)
list_expect = [receiving_status.display, date.strftime("%Y-%m-%d"), remark,
po.supplier.name, po.order_date.strftime("%Y-%m-%d"),
po.remark]
edit_expect = [receiving_status.display, date.strftime("%Y-%m-%d"), remark,]
# Crete new receiving
self.assertPageRendered(method=self.test_client.post,
data=dict(purchase_order=po.id,
status=receiving_status.id,
create_lines='y',
date=date, remark=remark),
endpoint=self.create_endpoint(view='receiving'),
expect_contents=list_expect)
self.assertPageRendered(expect_contents=edit_expect,
endpoint=self.edit_endpoint(view='receiving'))
# Edit existing receiving
new_remark = object_faker.faker.text(max_nb_chars=50)
new_receive_date = object_faker.faker.date_time_this_year()
complete_status = EnumValues.get(RECEIVING_COMPLETE_STATUS_KEY)
new_expected = [complete_status.display, new_receive_date.strftime("%Y-%m-%d"),
new_remark, po.supplier.name, po.order_date.strftime("%Y-%m-%d"),
po.remark]
self.assertPageRendered(method=self.test_client.post,
endpoint=self.edit_endpoint(view='receiving'),
data=dict(date=new_receive_date,
status=complete_status.id,
remark=new_remark),
expect_contents=new_expected)
# Detail page
self.assertPageRendered(method=self.test_client.get,
endpoint=self.details_endpoint(view='receiving'),
expect_contents=new_expected)
run_as_admin(self.test_client, test_logic)
def test_delete_completed_receiving_not_allowed(self):
from psi.app.models.enum_values import EnumValues
from psi.app.utils import db_util
def test_logic():
type = EnumValues.get(DIRECT_PO_TYPE_KEY)
status = EnumValues.get(PO_ISSUED_STATUS_KEY)
draft_status = EnumValues.get(RECEIVING_DRAFT_STATUS_KEY)
date = object_faker.faker.date_time_this_year()
po = object_faker.purchase_order(number_of_line=2, type=type,
status=status)
db_util.save_objects_commit(po)
remark = object_faker.faker.text(max_nb_chars=50)
# Crete new receiving
self.assertPageRendered(method=self.test_client.post,
data=dict(purchase_order=po.id,
status=draft_status.id,
create_lines='y',
date=date, remark=remark),
endpoint=self.create_endpoint(view='receiving'),)
# Change status to complete
new_remark = object_faker.faker.text(max_nb_chars=50)
new_receive_date = object_faker.faker.date_time_this_year()
complete_status = EnumValues.get(RECEIVING_COMPLETE_STATUS_KEY)
self.assertPageRendered(method=self.test_client.post,
endpoint=self.edit_endpoint(view='receiving'),
data=dict(date=new_receive_date,
status=complete_status.id,
remark=new_remark),)
# Should not delete existing receiving with complete status
endpoint = url_for('receiving.delete_view', id='1')
data = dict(url=url_for('receiving.index_view'), id='1')
rv = self.assertPageRendered(method=self.test_client.post,
endpoint=endpoint, data=data)
self.assertIn(complete_status.display.encode('utf-8'), rv.data)
self.assertIn(new_receive_date.strftime("%Y-%m-%d").encode('utf-8'), rv.data)
self.assertIn(new_remark.encode('utf-8'), rv.data)
self.assertIn(po.supplier.name.encode('utf-8'), rv.data)
self.assertIn(po.order_date.strftime("%Y-%m-%d").encode('utf-8'),rv.data)
self.assertIn(po.remark.encode('utf-8'), rv.data)
self.assertIn(b'You are not allowed to delete this object', rv.data)
run_as_admin(self.test_client, test_logic)
|
{
"content_hash": "86a771089e3a5c552edeb1a328434ca2",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 111,
"avg_line_length": 53.61417322834646,
"alnum_prop": 0.5514759876633867,
"repo_name": "betterlife/flask-psi",
"id": "8ec9982c8a9093468889acab4720352982a6cfdd",
"size": "6809",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/views/receiving_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8807"
},
{
"name": "HTML",
"bytes": "24618"
},
{
"name": "JavaScript",
"bytes": "12365"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "202168"
},
{
"name": "Shell",
"bytes": "726"
}
],
"symlink_target": ""
}
|
"""
VGGの学習済み重みを使わない
"""
from keras import backend as K
from keras.models import Sequential, Model, model_from_json
from keras.layers import Dense, Activation, Reshape, Flatten, Dropout, TimeDistributed, Input, merge, GaussianNoise, BatchNormalization
from keras.layers import LSTM
from keras.layers import Convolution2D, Deconvolution2D, MaxPooling2D, UpSampling2D
from keras.optimizers import Adam
from keras.objectives import categorical_crossentropy
from keras.regularizers import l2
from keras.models import model_from_yaml
from keras.utils import np_utils
from keras.initializations import normal, zero
from keras.callbacks import LearningRateScheduler, ModelCheckpoint
from keras.applications.vgg16 import VGG16
import pickle
import time
import numpy as np
import pickle
import matplotlib.pyplot as plt
from PIL import Image
from data_utils import *
from settings import *
def batch_generator(datapath='traindata_split.dump', batchsize=128, step=128, new_shape=[64,64]):
with open(datapath, mode='rb') as f:
data = pickle.load(f)
image = data['image']
label = data['label']
numData = image.shape[0]
idx = 0
# print(depthcolor.shape)
# print(np.max(depth))
K = np.max(image)
print(K)
while True:
if idx == 0:
perm1 = np.arange(batchsize * step)
np.random.shuffle(perm1)
x,y = np.random.randint(64, size=2)
image_crop = image[:,y:y+256,x:x+256].astype(np.float32)/255
label_crop = label[:,y:y+256,x:x+256].astype(np.float32)/255
batchx = image_crop[perm1[idx:idx + batchsize]]
batchx = np.transpose(batchx, [0, 3, 1, 2])
batchy = label_crop[perm1[idx:idx + batchsize]]
batchy = np.transpose(batchy, [0, 3, 1, 2])
# print(batchx1.shape)
# print(batchx2.shape)
# print(batchy.shape)
yield batchx, batchy
if idx + batchsize >= batchsize * step:
idx = 0
elif idx + batchsize >= image_crop.shape[0]:
idx = 0
else:
idx += batchsize
# parameters
threshold = 0.1
EPOCH = 10
BATCHSIZE = 8
NUM_DATA = 1747
size = [256,256]
num_batches = int(NUM_DATA / BATCHSIZE)
# load model
loadpath = DATA_DIR + 'weight/fc1e0'
f = open(loadpath+'.json', 'r')
json_string = f.read()
f.close()
train_model = model_from_json(json_string)
train_model.load_weights(loadpath+'_W.hdf5')
datapath = DATA_DIR + 'patches/traindata.pkl'
gen = batch_generator(datapath=datapath, batchsize=BATCHSIZE, step=num_batches, new_shape=size)
for epoch in range(EPOCH):
testdata = next(gen)
y = train_model.predict(testdata[0]) # [8, 5, 256, 256]
y = np.transpose(y, [0,2,3,1])
y = np.mean(y, axis=3)
print(y.shape)
y = np.minimum(1, y)
y = np.maximum(0, y)
image = testdata[0]
image = np.transpose(image, [0,2,3,1])
merge = image + y[:,:,:,np.newaxis]
merge = np.minimum(1, merge)
merge = np.maximum(0, merge)
image = (image*255).astype(np.uint8)
y = (y * 255).astype(np.uint8)
merge = (merge * 255).astype(np.uint8)
for i in range(BATCHSIZE):
plt.subplot(1,3,1)
plt.imshow(image[i])
plt.subplot(1,3,2)
plt.imshow(y[i])
plt.gray()
plt.subplot(1,3,3)
plt.imshow(merge[i])
plt.show()
# for i in range(8):
# plt.subplot(4,6, 3*i+1)
# plt.tick_params(axis='both', which='both', bottom='off', top='off', left='off', right='off', labelbottom='off',
# labelleft='off')
# depthcolor = testdata[0][0][i]
# # print(depthcolor.shape)
# depthcolor = np.transpose(depthcolor, [1,2,0])
# # print(depthcolor.shape, depthcolor.dtype)
# plt.imshow(depthcolor)
# plt.subplot(4,6,3*i+2)
# plt.tick_params(axis='both', which='both', bottom='off', top='off', left='off', right='off', labelbottom='off',
# labelleft='off')
# predict = y3[i]
# # print(predict.shape, predict.dtype)
# plt.imshow(predict)
# plt.subplot(4,6,3*i+3)
# plt.tick_params(axis='both', which='both', bottom='off', top='off', left='off', right='off', labelbottom='off',
# labelleft='off')
# groundtruth = testdata[1][i]
# label = np.zeros(size, dtype=np.uint8)
# for k in range(21):
# label[groundtruth[k] == 1] = k + 1
# # groundtruth = np.transpose(groundtruth, [1,2,0])
# # groundtruth = np.sum(groundtruth, axis=-1)
#
# # print(groundtruth.shape, groundtruth.dtype)
#
# plt.imshow(label)
#
# num = number_padded = '{0:04d}'.format(epoch)
# savepath = 'result/' + num + 'rand_f6e32.png'
# plt.savefig(savepath, dpi=300)
# # plt.show()
# plt.cla
|
{
"content_hash": "7f959d9cd100bca281235517da9adc15",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 135,
"avg_line_length": 31.206451612903226,
"alnum_prop": 0.6092619392185239,
"repo_name": "EggInTheShell/TodoCounting",
"id": "970941e0007bf5fe898b998ddf454c37e074a1e2",
"size": "4907",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "evaluate_fcn.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "69035"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'ProductAtLocation'
db.delete_table(u'locations_productatlocation')
# Adding M2M table for field products on 'SQLLocation'
db.create_table(u'locations_sqllocation_products', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('sqllocation', models.ForeignKey(orm[u'locations.sqllocation'], null=False)),
('sqlproduct', models.ForeignKey(orm[u'products.sqlproduct'], null=False))
))
db.create_unique(u'locations_sqllocation_products', ['sqllocation_id', 'sqlproduct_id'])
def backwards(self, orm):
# Adding model 'ProductAtLocation'
db.create_table(u'locations_productatlocation', (
('product', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['products.SQLProduct'])),
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('location', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.SQLLocation'])),
))
db.send_create_signal(u'locations', ['ProductAtLocation'])
# Removing M2M table for field products on 'SQLLocation'
db.delete_table('locations_sqllocation_products')
models = {
u'locations.sqllocation': {
'Meta': {'unique_together': "(('domain', 'site_code'),)", 'object_name': 'SQLLocation'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'domain': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '10'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'location_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
'location_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '10'}),
'metadata': ('json_field.fields.JSONField', [], {'default': '{}'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['locations.SQLLocation']"}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['products.SQLProduct']", 'null': 'True', 'symmetrical': 'False'}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site_code': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'supply_point_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
u'products.sqlproduct': {
'Meta': {'object_name': 'SQLProduct'},
'category': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True'}),
'cost': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '5'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True'}),
'domain': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'product_data': ('json_field.fields.JSONField', [], {'default': '{}'}),
'product_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
'program_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True'}),
'units': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True'})
}
}
complete_apps = ['locations']
|
{
"content_hash": "403700cb3e4688962641f364fec00cd6",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 161,
"avg_line_length": 70.55696202531645,
"alnum_prop": 0.5739146035163258,
"repo_name": "puttarajubr/commcare-hq",
"id": "2667bf3c10e4fbe5d05d7ae6b0b30ffb9ba73b86",
"size": "5592",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "corehq/apps/locations/migrations/0005_auto__del_productatlocation.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "581878"
},
{
"name": "HTML",
"bytes": "2790361"
},
{
"name": "JavaScript",
"bytes": "2572023"
},
{
"name": "Makefile",
"bytes": "3999"
},
{
"name": "Python",
"bytes": "11275678"
},
{
"name": "Shell",
"bytes": "23890"
}
],
"symlink_target": ""
}
|
from testframework.testcases.domain_mock import DomainMockingSuccessfulTestCase
from testframework.testcases.rest import RESTBasedSuccessfulTestCase
class FullLayerHarnessTestCase(RESTBasedSuccessfulTestCase,
DomainMockingSuccessfulTestCase):
pass
|
{
"content_hash": "338c7cd7829a009ec331b956bce543fa",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 79,
"avg_line_length": 40.42857142857143,
"alnum_prop": 0.8021201413427562,
"repo_name": "hsnlab/escape",
"id": "117bb8c2a8fbd822523da17f0d43734509fad121",
"size": "857",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/testframework/testcases/harness.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "381"
},
{
"name": "C",
"bytes": "9773701"
},
{
"name": "C++",
"bytes": "1144774"
},
{
"name": "Dockerfile",
"bytes": "4497"
},
{
"name": "HTML",
"bytes": "423218"
},
{
"name": "JavaScript",
"bytes": "9048"
},
{
"name": "Makefile",
"bytes": "121260"
},
{
"name": "Objective-C",
"bytes": "2964"
},
{
"name": "Python",
"bytes": "2856844"
},
{
"name": "Roff",
"bytes": "80820"
},
{
"name": "Shell",
"bytes": "190566"
}
],
"symlink_target": ""
}
|
import train as train
import time, random
import scipy.sparse
import pycrfsuite as crf
import helper
from nltk.tokenize import sent_tokenize, word_tokenize
from nltk.tag import pos_tag
import random
import text2num as t2n
def trainModel(holdback=-1):
## extract features
trainer = crf.Trainer(verbose=True)
for xseq, yseq in zip(trainX, trainY):
trainer.append(xseq, yseq, group = 0)
for xseq, yseq in zip(testX, testY):
trainer.append(xseq, yseq, group = 1)
trainer.set_params({
'c1': 2.0, # coefficient for L1 penalty
'c2': 1e-3, # coefficient for L2 penalty
# include transitions that are possible, but not observed
'max_iterations': 250, # stop earlier
'feature.possible_transitions': True,
'feature.possible_states': True,
})
trainer.train(trained_model, holdback)
return trainer
def predict():
tagger = crf.Tagger()
tagger.open(trained_model)
predictedY = []
confidences = []
confidences_beam = []
for xseq in testX:
yseq = tagger.tag(xseq)
predictedY.append(yseq)
confidences.append([tagger.marginal(yseq[i],i) for i in range(len(yseq))])
confidences_beam.append([ [tagger.marginal(tag, i) for tag in train.int2tags] for i in range(len(yseq))])
return predictedY, testY, confidences, confidences_beam, tagger.info()
def predict(article, trained_model):
tagger = crf.Tagger()
tagger.open(trained_model)
xseq = articleFeatureExtract(article)
yseq = tagger.tag(xseq)
confidences = [tagger.marginal(yseq[i],i) for i in range(len(yseq))]
confidences_beam = [ [tagger.marginal(tag, i) for tag in train.int2tags] for i in range(len(yseq))]
return yseq, confidences
def featureExtract(data, identifier, prev_n = 4, next_n = 4):
features = []
labels = []
int2tags = ["TAG"] + train.int2tags
for index in range(len(data)):
article = data[index][0]
article_labels = [int2tags[t] for t in data[index][1]]
article_features = articleFeatureExtract(article, prev_n, next_n)
features.append(article_features)
labels.append(article_labels)
return features, labels
def articleFeatureExtract(article, prev_n = 4, next_n = 4):
article_features = []
title_features = {}
labels = []
# if '.' in article:
# title = article[:article.index('.')]
# for i in range(len(title)):
# t = title[i]
# tf = {}
# tf[t] = 1
# title_features[t] = 1
for token_ind in range(len(article)):
token = article[token_ind]
context = {}
for i in range(max(0, token_ind - prev_n), min(token_ind + next_n, len(article))):
context_token = article[i]
context[context_token] =1
token_features = {}
token_features["context"] = context
# token_features["title"] = title_features
token_features["token"] = token
token_features[token] = 1
token_features["other"] = helper.getOtherFeatures(token)
article_features.append(token_features)
return article_features
if __name__ == '__main__':
##SCRIPT
print "reload helper"
reload(helper)
helper.load_constants()
print "end load helper"
retrain = True
if retrain:
num_blocks = 1
## num_blocks = 5
training_file = "../data/tagged_data/EMA/train.tag"
dev_file = "../data/tagged_data/EMA/dev.tag"
test_file = "../data/tagged_data/EMA/test.tag"
trained_model = "trained_model_crf.EMA.p"
print "load files"
train_data, train_identifier = train.load_data(training_file)
test_data, test_identifier = train.load_data(dev_file)
print "End load files"
prev_n = 2
next_n = 2
print "Start Feature extract on train set"
trainX, trainY = featureExtract(train_data,train_identifier, prev_n, next_n )
print "Done Feature extract on train set"
#trainX, trainY = featureExtract(dev_data, prev_n, next_n)
print "Start Feature extract on test set"
testX, testY = featureExtract(test_data, test_identifier, prev_n, next_n)
print "Done Feature extract on test set"
#testX, testY = featureExtract(train_data[split_index:], prev_n, next_n)
trainer = trainModel(1)
|
{
"content_hash": "44218c425184f03c3749ff3146c614ce",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 116,
"avg_line_length": 33.14179104477612,
"alnum_prop": 0.6163026345417699,
"repo_name": "adi-sharma/RLIE_A3C",
"id": "9dc134541857ecc6d1729b5b9c092d216a248840",
"size": "4441",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/train_crf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "33565"
},
{
"name": "OpenEdge ABL",
"bytes": "15916440"
},
{
"name": "Python",
"bytes": "161250"
},
{
"name": "Roff",
"bytes": "861498223"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('name', models.CharField(max_length=32, help_text='The name of the category')),
('slug', models.SlugField(blank=True, null=True)),
],
options={
'verbose_name_plural': 'Categories',
},
bases=(models.Model,),
),
]
|
{
"content_hash": "e7102003b4ce1365cf09bb06019c9dd9",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 114,
"avg_line_length": 28.708333333333332,
"alnum_prop": 0.5457184325108854,
"repo_name": "c-rhodes/hack2014",
"id": "e9ad478f42899df145cebc0ade2f219f4be04e69",
"size": "713",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hack2014/category/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "104715"
},
{
"name": "JavaScript",
"bytes": "2341"
},
{
"name": "Python",
"bytes": "46831"
},
{
"name": "Shell",
"bytes": "5099"
}
],
"symlink_target": ""
}
|
"""Segment Gettext PO, XLIFF and TMX localization files at the sentence level.
See: http://docs.translatehouse.org/projects/translate-toolkit/en/latest/commands/posegment.html
for examples and usage instructions.
"""
from translate.storage import factory
from translate.lang import factory as lang_factory
class segment:
def __init__(self, sourcelang, targetlang, stripspaces=True, onlyaligned=False):
self.sourcelang = sourcelang
self.targetlang = targetlang
self.stripspaces = stripspaces
self.onlyaligned = onlyaligned
def segmentunit(self, unit):
if unit.isheader() or unit.hasplural():
return [unit]
sourcesegments = self.sourcelang.sentences(unit.source, strip=self.stripspaces)
targetsegments = self.targetlang.sentences(unit.target, strip=self.stripspaces)
if unit.istranslated() and (len(sourcesegments) != len(targetsegments)):
if not self.onlyaligned:
return [unit]
else:
return None
# We could do more here to check if the lengths correspond more or less,
# certain quality checks are passed, etc. But for now this is a good
# start.
units = []
for i in range(len(sourcesegments)):
newunit = unit.copy()
newunit.source = sourcesegments[i]
if not unit.istranslated():
newunit.target = ""
else:
newunit.target = targetsegments[i]
units.append(newunit)
return units
def convertstore(self, fromstore):
tostore = type(fromstore)()
for unit in fromstore.units:
newunits = self.segmentunit(unit)
if newunits:
for newunit in newunits:
tostore.addunit(newunit)
return tostore
def segmentfile(inputfile, outputfile, templatefile, sourcelanguage="en", targetlanguage=None, stripspaces=True, onlyaligned=False):
"""reads in inputfile, segments it then, writes to outputfile"""
# note that templatefile is not used, but it is required by the converter...
inputstore = factory.getobject(inputfile)
if inputstore.isempty():
return 0
sourcelang = lang_factory.getlanguage(sourcelanguage)
targetlang = lang_factory.getlanguage(targetlanguage)
convertor = segment(sourcelang, targetlang, stripspaces=stripspaces, onlyaligned=onlyaligned)
outputstore = convertor.convertstore(inputstore)
outputfile.write(str(outputstore))
return 1
def main():
from translate.convert import convert
formats = {"po": ("po", segmentfile), "xlf": ("xlf", segmentfile), "tmx": ("tmx", segmentfile)}
parser = convert.ConvertOptionParser(formats, usepots=True, description=__doc__)
parser.add_option("-l", "--language", dest="targetlanguage", default=None,
help="the target language code", metavar="LANG")
parser.add_option("", "--source-language", dest="sourcelanguage", default=None,
help="the source language code (default 'en')", metavar="LANG")
parser.passthrough.append("sourcelanguage")
parser.passthrough.append("targetlanguage")
parser.add_option("", "--keepspaces", dest="stripspaces", action="store_false",
default=True, help="Disable automatic stripping of whitespace")
parser.passthrough.append("stripspaces")
parser.add_option("", "--only-aligned", dest="onlyaligned", action="store_true",
default=False, help="Removes units where sentence number does not correspond")
parser.passthrough.append("onlyaligned")
parser.run()
if __name__ == '__main__':
main()
|
{
"content_hash": "0c2e159907711bf7750bcfe83f5343c8",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 132,
"avg_line_length": 42.195402298850574,
"alnum_prop": 0.6635794061563607,
"repo_name": "DESHRAJ/fjord",
"id": "e3b1cc72a39ccb104c2646d19fa8d30ba7382879",
"size": "4450",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "vendor/packages/translate-toolkit/translate/tools/posegment.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "168457"
},
{
"name": "JavaScript",
"bytes": "299449"
},
{
"name": "Makefile",
"bytes": "4594"
},
{
"name": "Python",
"bytes": "709245"
},
{
"name": "Shell",
"bytes": "13991"
}
],
"symlink_target": ""
}
|
"""
UnionMatcher.py
Implements a matcher that matches a set of sub-matchers.
"""
from Matcher import Matcher
class UnionMatcher(Matcher):
"""
A matcher that matches a set of sub-matchers.
"""
def __init__(self, submatchers):
self._submatchers = submatchers
maxlength = max(submatchers, key=lambda x: x._buffer_length)._buffer_length
super(UnionMatcher, self).__init__(buffer_length = maxlength)
def __or__(self, other):
"""
Special version to cope with chains of unions.
>>> m = Matcher('dog') | Matcher('cat') | Matcher('mouse')
>>> len(m._submatchers)
3
>>> list(m("the dog chases the cat chases the mouse".split()))
['the', 'dog', dog{'token': 'dog'}, 'chases', 'the', 'cat', cat{'token': 'cat'}, 'chases', 'the', 'mouse', mouse{'token': 'mouse'}]
"""
submatchers = self._submatchers + [other]
return UnionMatcher(submatchers)
def _match(self, buf):
"""
Matches all the submatchers in parallel.
>>> m1 = Matcher("dog", "animal")
>>> m2 = Matcher("cat", "animal")
>>> m = UnionMatcher([m1,m2])
>>> list(m("the dog chases the cat".split()))
['the', 'dog', animal{'token': 'dog'}, 'chases', 'the', 'cat', animal{'token': 'cat'}]
"""
results = []
for m in self._submatchers:
results += m._match(buf)
return results
# Doctest magic
if __name__ == '__main__':
import doctest
doctest.testmod()
|
{
"content_hash": "840bd63b9ed5206a57f0b5889bf6d05e",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 135,
"avg_line_length": 27.80392156862745,
"alnum_prop": 0.5994358251057827,
"repo_name": "jgeldart/stolat",
"id": "3e1e78863df22ee00da8c7551802104bf1ba712b",
"size": "1418",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/stolat/UnionMatcher.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "21042"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from bcc import BPF, USDT
from time import sleep, strftime
import argparse
import re
import signal
import sys
import traceback
debug = False
class Probe(object):
def __init__(self, pattern, kernel_stack, user_stack, use_regex=False,
pid=None, per_pid=False):
"""Init a new probe.
Init the probe from the pattern provided by the user. The supported
patterns mimic the 'trace' and 'argdist' tools, but are simpler because
we don't have to distinguish between probes and retprobes.
func -- probe a kernel function
lib:func -- probe a user-space function in the library 'lib'
p::func -- same thing as 'func'
p:lib:func -- same thing as 'lib:func'
t:cat:event -- probe a kernel tracepoint
u:lib:probe -- probe a USDT tracepoint
"""
self.kernel_stack = kernel_stack
self.user_stack = user_stack
parts = pattern.split(':')
if len(parts) == 1:
parts = ["p", "", parts[0]]
elif len(parts) == 2:
parts = ["p", parts[0], parts[1]]
elif len(parts) == 3:
if parts[0] == "t":
parts = ["t", "", "%s:%s" % tuple(parts[1:])]
if parts[0] not in ["p", "t", "u"]:
raise Exception("Type must be 'p', 't', or 'u', but got %s" %
parts[0])
else:
raise Exception("Too many ':'-separated components in pattern %s" %
pattern)
(self.type, self.library, self.pattern) = parts
if not use_regex:
self.pattern = self.pattern.replace('*', '.*')
self.pattern = '^' + self.pattern + '$'
if (self.type == "p" and self.library) or self.type == "u":
libpath = BPF.find_library(self.library)
if libpath is None:
# This might be an executable (e.g. 'bash')
libpath = BPF.find_exe(self.library)
if libpath is None or len(libpath) == 0:
raise Exception("unable to find library %s" % self.library)
self.library = libpath
self.pid = pid
self.per_pid = per_pid
self.matched = 0
def is_kernel_probe(self):
return self.type == "t" or (self.type == "p" and self.library == "")
def attach(self):
if self.type == "p":
if self.library:
self.bpf.attach_uprobe(name=self.library,
sym_re=self.pattern,
fn_name="trace_count",
pid=self.pid or -1)
self.matched = self.bpf.num_open_uprobes()
else:
self.bpf.attach_kprobe(event_re=self.pattern,
fn_name="trace_count")
self.matched = self.bpf.num_open_kprobes()
elif self.type == "t":
self.bpf.attach_tracepoint(tp_re=self.pattern,
fn_name="trace_count")
self.matched = self.bpf.num_open_tracepoints()
elif self.type == "u":
pass # Nothing to do -- attach already happened in `load`
if self.matched == 0:
raise Exception("No functions matched by pattern %s" %
self.pattern)
def load(self):
ctx_name = "ctx"
stack_trace = ""
if self.user_stack:
stack_trace += """
key.user_stack_id = stack_traces.get_stackid(
%s, BPF_F_REUSE_STACKID | BPF_F_USER_STACK
);""" % (ctx_name)
else:
stack_trace += "key.user_stack_id = -1;"
if self.kernel_stack:
stack_trace += """
key.kernel_stack_id = stack_traces.get_stackid(
%s, BPF_F_REUSE_STACKID
);""" % (ctx_name)
else:
stack_trace += "key.kernel_stack_id = -1;"
trace_count_text = """
int trace_count(void *ctx) {
FILTER
struct key_t key = {};
key.tgid = GET_TGID;
STORE_COMM
%s
counts.increment(key);
return 0;
}
"""
trace_count_text = trace_count_text % (stack_trace)
bpf_text = """#include <uapi/linux/ptrace.h>
#include <linux/sched.h>
struct key_t {
// no pid (thread ID) so that we do not needlessly split this key
u32 tgid;
int kernel_stack_id;
int user_stack_id;
char name[TASK_COMM_LEN];
};
BPF_HASH(counts, struct key_t);
BPF_STACK_TRACE(stack_traces, 1024);
"""
# We really mean the tgid from the kernel's perspective, which is in
# the top 32 bits of bpf_get_current_pid_tgid().
if self.is_kernel_probe() and self.pid:
trace_count_text = trace_count_text.replace('FILTER',
('u32 pid; pid = bpf_get_current_pid_tgid() >> 32; ' +
'if (pid != %d) { return 0; }') % (self.pid))
else:
trace_count_text = trace_count_text.replace('FILTER', '')
# We need per-pid statistics when tracing a user-space process, because
# the meaning of the symbols depends on the pid. We also need them if
# per-pid statistics were requested with -P, or for user stacks.
if self.per_pid or not self.is_kernel_probe() or self.user_stack:
trace_count_text = trace_count_text.replace('GET_TGID',
'bpf_get_current_pid_tgid() >> 32')
trace_count_text = trace_count_text.replace('STORE_COMM',
'bpf_get_current_comm(&key.name, sizeof(key.name));')
else:
# kernel stacks only. skip splitting on PID so these aggregate
# together, and don't store the process name.
trace_count_text = trace_count_text.replace(
'GET_TGID', '0xffffffff')
trace_count_text = trace_count_text.replace('STORE_COMM', '')
self.usdt = None
if self.type == "u":
self.usdt = USDT(path=self.library, pid=self.pid)
for probe in self.usdt.enumerate_probes():
if not self.pid and (probe.bin_path != self.library):
continue
if re.match(self.pattern, probe.name):
# This hack is required because the bpf_usdt_readarg
# functions generated need different function names for
# each attached probe. If we just stick to trace_count,
# we'd get multiple bpf_usdt_readarg helpers with the same
# name when enabling more than one USDT probe.
new_func = "trace_count_%d" % self.matched
bpf_text += trace_count_text.replace(
"trace_count", new_func)
self.usdt.enable_probe(probe.name, new_func)
self.matched += 1
if debug:
print(self.usdt.get_text())
else:
bpf_text += trace_count_text
if debug:
print(bpf_text)
self.bpf = BPF(text=bpf_text,
usdt_contexts=[self.usdt] if self.usdt else [])
class Tool(object):
def __init__(self):
examples = """examples:
./stackcount submit_bio # count kernel stack traces for submit_bio
./stackcount -d ip_output # include a user/kernel stack delimiter
./stackcount -s ip_output # show symbol offsets
./stackcount -sv ip_output # show offsets and raw addresses (verbose)
./stackcount 'tcp_send*' # count stacks for funcs matching tcp_send*
./stackcount -r '^tcp_send.*' # same as above, using regular expressions
./stackcount -Ti 5 ip_output # output every 5 seconds, with timestamps
./stackcount -p 185 ip_output # count ip_output stacks for PID 185 only
./stackcount -p 185 c:malloc # count stacks for malloc in PID 185
./stackcount t:sched:sched_fork # count stacks for sched_fork tracepoint
./stackcount -p 185 u:node:* # count stacks for all USDT probes in node
./stackcount -K t:sched:sched_switch # kernel stacks only
./stackcount -U t:sched:sched_switch # user stacks only
"""
parser = argparse.ArgumentParser(
description="Count events and their stack traces",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("-p", "--pid", type=int,
help="trace this PID only")
parser.add_argument("-i", "--interval",
help="summary interval, seconds")
parser.add_argument("-D", "--duration",
help="total duration of trace, seconds")
parser.add_argument("-T", "--timestamp", action="store_true",
help="include timestamp on output")
parser.add_argument("-r", "--regexp", action="store_true",
help="use regular expressions. Default is \"*\" wildcards only.")
parser.add_argument("-s", "--offset", action="store_true",
help="show address offsets")
parser.add_argument("-P", "--perpid", action="store_true",
help="display stacks separately for each process")
parser.add_argument("-K", "--kernel-stacks-only",
action="store_true", help="kernel stack only", default=False)
parser.add_argument("-U", "--user-stacks-only",
action="store_true", help="user stack only", default=False)
parser.add_argument("-v", "--verbose", action="store_true",
help="show raw addresses")
parser.add_argument("-d", "--delimited", action="store_true",
help="insert delimiter between kernel/user stacks")
parser.add_argument("-f", "--folded", action="store_true",
help="output folded format")
parser.add_argument("--debug", action="store_true",
help="print BPF program before starting (for debugging purposes)")
parser.add_argument("pattern",
help="search expression for events")
self.args = parser.parse_args()
global debug
debug = self.args.debug
if self.args.duration and not self.args.interval:
self.args.interval = self.args.duration
if not self.args.interval:
self.args.interval = 99999999
if self.args.kernel_stacks_only and self.args.user_stacks_only:
print("ERROR: -K and -U are mutually exclusive. If you want " +
"both stacks, that is the default.")
exit()
if not self.args.kernel_stacks_only and not self.args.user_stacks_only:
self.kernel_stack = True
self.user_stack = True
else:
self.kernel_stack = self.args.kernel_stacks_only
self.user_stack = self.args.user_stacks_only
self.probe = Probe(self.args.pattern,
self.kernel_stack, self.user_stack,
self.args.regexp, self.args.pid, self.args.perpid)
self.need_delimiter = self.args.delimited and not (
self.args.kernel_stacks_only or self.args.user_stacks_only)
def _print_kframe(self, addr):
print(" ", end="")
if self.args.verbose:
print("%-16x " % addr, end="")
if self.args.offset:
print("%s" % self.probe.bpf.ksym(addr, show_offset=True))
else:
print("%s" % self.probe.bpf.ksym(addr))
def _print_uframe(self, addr, pid):
print(" ", end="")
if self.args.verbose:
print("%-16x " % addr, end="")
if self.args.offset:
print("%s" % self.probe.bpf.sym(addr, pid, show_offset=True))
else:
print("%s" % self.probe.bpf.sym(addr, pid))
@staticmethod
def _signal_ignore(signal, frame):
print()
def _print_comm(self, comm, pid):
print(" %s [%d]" % (comm, pid))
def run(self):
self.probe.load()
self.probe.attach()
if not self.args.folded:
print("Tracing %d functions for \"%s\"... Hit Ctrl-C to end." %
(self.probe.matched, self.args.pattern))
b = self.probe.bpf
exiting = 0 if self.args.interval else 1
seconds = 0
while True:
try:
sleep(int(self.args.interval))
seconds += int(self.args.interval)
except KeyboardInterrupt:
exiting = 1
# as cleanup can take many seconds, trap Ctrl-C:
signal.signal(signal.SIGINT, Tool._signal_ignore)
if self.args.duration and seconds >= int(self.args.duration):
exiting = 1
if not self.args.folded:
print()
if self.args.timestamp:
print("%-8s\n" % strftime("%H:%M:%S"), end="")
counts = self.probe.bpf["counts"]
stack_traces = self.probe.bpf["stack_traces"]
self.comm_cache = {}
for k, v in sorted(counts.items(),
key=lambda counts: counts[1].value):
user_stack = [] if k.user_stack_id < 0 else \
stack_traces.walk(k.user_stack_id)
kernel_stack = [] if k.kernel_stack_id < 0 else \
stack_traces.walk(k.kernel_stack_id)
if self.args.folded:
# print folded stack output
user_stack = list(user_stack)
kernel_stack = list(kernel_stack)
line = [k.name.decode('utf-8', 'replace')] + \
[b.sym(addr, k.tgid) for addr in
reversed(user_stack)] + \
(self.need_delimiter and ["-"] or []) + \
[b.ksym(addr) for addr in reversed(kernel_stack)]
print("%s %d" % (";".join(line), v.value))
else:
# print multi-line stack output
for addr in kernel_stack:
self._print_kframe(addr)
if self.need_delimiter:
print(" --")
for addr in user_stack:
self._print_uframe(addr, k.tgid)
if not self.args.pid and k.tgid != 0xffffffff:
self._print_comm(k.name, k.tgid)
print(" %d\n" % v.value)
counts.clear()
if exiting:
if not self.args.folded:
print("Detaching...")
exit()
if __name__ == "__main__":
try:
Tool().run()
except Exception:
if debug:
traceback.print_exc()
elif sys.exc_info()[0] is not SystemExit:
print(sys.exc_info()[1])
|
{
"content_hash": "ed48ee2ced9fcc707c6072938239120b",
"timestamp": "",
"source": "github",
"line_count": 356,
"max_line_length": 79,
"avg_line_length": 42.32865168539326,
"alnum_prop": 0.5247859844714314,
"repo_name": "mcaleavya/bcc",
"id": "9dfc06f116cf5cf14f1ec1011024fc4b4c54770a",
"size": "15702",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/stackcount.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "321768"
},
{
"name": "C++",
"bytes": "920975"
},
{
"name": "CMake",
"bytes": "38841"
},
{
"name": "HTML",
"bytes": "2979"
},
{
"name": "LLVM",
"bytes": "4379"
},
{
"name": "Limbo",
"bytes": "6069"
},
{
"name": "Lua",
"bytes": "298149"
},
{
"name": "Makefile",
"bytes": "1481"
},
{
"name": "P4",
"bytes": "9242"
},
{
"name": "Python",
"bytes": "1206933"
},
{
"name": "Shell",
"bytes": "17023"
},
{
"name": "Yacc",
"bytes": "19817"
}
],
"symlink_target": ""
}
|
from airflow.contrib.hooks.gcs_hook import GoogleCloudStorageHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.version import version
class GoogleCloudStorageCreateBucketOperator(BaseOperator):
"""
Creates a new bucket. Google Cloud Storage uses a flat namespace,
so you can't create a bucket with a name that is already in use.
.. seealso::
For more information, see Bucket Naming Guidelines:
https://cloud.google.com/storage/docs/bucketnaming.html#requirements
:param bucket_name: The name of the bucket. (templated)
:type bucket_name: str
:param resource: An optional dict with parameters for creating the bucket.
For information on available parameters, see Cloud Storage API doc:
https://cloud.google.com/storage/docs/json_api/v1/buckets/insert
:type resource: dict
:param storage_class: This defines how objects in the bucket are stored
and determines the SLA and the cost of storage (templated). Values include
- ``MULTI_REGIONAL``
- ``REGIONAL``
- ``STANDARD``
- ``NEARLINE``
- ``COLDLINE``.
If this value is not specified when the bucket is
created, it will default to STANDARD.
:type storage_class: str
:param location: The location of the bucket. (templated)
Object data for objects in the bucket resides in physical storage
within this region. Defaults to US.
.. seealso:: https://developers.google.com/storage/docs/bucket-locations
:type location: str
:param project_id: The ID of the GCP Project. (templated)
:type project_id: str
:param labels: User-provided labels, in key/value pairs.
:type labels: dict
:param google_cloud_storage_conn_id: The connection ID to use when
connecting to Google cloud storage.
:type google_cloud_storage_conn_id: str
:param delegate_to: The account to impersonate, if any.
For this to work, the service account making the request must
have domain-wide delegation enabled.
:type delegate_to: str
:Example::
The following Operator would create a new bucket ``test-bucket``
with ``MULTI_REGIONAL`` storage class in ``EU`` region ::
CreateBucket = GoogleCloudStorageCreateBucketOperator(
task_id='CreateNewBucket',
bucket_name='test-bucket',
storage_class='MULTI_REGIONAL',
location='EU',
labels={'env': 'dev', 'team': 'airflow'},
google_cloud_storage_conn_id='airflow-service-account'
)
"""
template_fields = ('bucket_name', 'storage_class',
'location', 'project_id')
ui_color = '#f0eee4'
@apply_defaults
def __init__(self,
bucket_name,
resource=None,
storage_class='MULTI_REGIONAL',
location='US',
project_id=None,
labels=None,
google_cloud_storage_conn_id='google_cloud_default',
delegate_to=None,
*args,
**kwargs):
super().__init__(*args, **kwargs)
self.bucket_name = bucket_name
self.resource = resource
self.storage_class = storage_class
self.location = location
self.project_id = project_id
self.labels = labels
self.google_cloud_storage_conn_id = google_cloud_storage_conn_id
self.delegate_to = delegate_to
def execute(self, context):
if self.labels is not None:
self.labels.update(
{'airflow-version': 'v' + version.replace('.', '-').replace('+', '-')}
)
hook = GoogleCloudStorageHook(
google_cloud_storage_conn_id=self.google_cloud_storage_conn_id,
delegate_to=self.delegate_to
)
hook.create_bucket(bucket_name=self.bucket_name,
resource=self.resource,
storage_class=self.storage_class,
location=self.location,
project_id=self.project_id,
labels=self.labels)
|
{
"content_hash": "6b9d4da0f84004170c654d77c75c2bcf",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 86,
"avg_line_length": 38.945454545454545,
"alnum_prop": 0.6057422969187675,
"repo_name": "r39132/airflow",
"id": "5b430f4304f9497b0fc0a25ffca77ccc676817a1",
"size": "5096",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airflow/contrib/operators/gcs_operator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12126"
},
{
"name": "Dockerfile",
"bytes": "4111"
},
{
"name": "HTML",
"bytes": "128531"
},
{
"name": "JavaScript",
"bytes": "22118"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "5928206"
},
{
"name": "Shell",
"bytes": "41869"
}
],
"symlink_target": ""
}
|
survey_data['name'].value_counts()[:8]
|
{
"content_hash": "6d513bee8abfbe9c746b122aa75e46a5",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 38,
"avg_line_length": 38,
"alnum_prop": 0.6842105263157895,
"repo_name": "jorisvandenbossche/DS-python-data-analysis",
"id": "db2e83815d798498173da5b26ee25cda229ee311",
"size": "38",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "notebooks/_solutions/case2_observations30.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "12189"
},
{
"name": "Jupyter Notebook",
"bytes": "1637109"
},
{
"name": "Less",
"bytes": "78481"
},
{
"name": "Python",
"bytes": "56700"
},
{
"name": "SCSS",
"bytes": "79489"
},
{
"name": "Shell",
"bytes": "1787"
}
],
"symlink_target": ""
}
|
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import collections
import contextlib
import multiprocessing
import os
import subprocess
from pants.backend.jvm.tasks.jvm_task import JvmTask
from pants.base.exceptions import TaskError
from pants.binaries import binary_util
from pants.util.dirutil import safe_mkdir, safe_walk
Jvmdoc = collections.namedtuple('Jvmdoc', ['tool_name', 'product_type'])
class JvmdocGen(JvmTask):
@classmethod
def jvmdoc(cls):
"""Subclasses should return their Jvmdoc configuration."""
raise NotImplementedError()
@classmethod
def register_options(cls, register):
super(JvmdocGen, cls).register_options(register)
tool_name = cls.jvmdoc().tool_name
register('--include-codegen', default=False, action='store_true',
fingerprint=True,
help='Create {0} for generated code.'.format(tool_name))
register('--transitive', default=True, action='store_true',
fingerprint=True,
help='Create {0} for the transitive closure of internal targets reachable from the '
'roots specified on the command line.'.format(tool_name))
register('--combined', default=False, action='store_true',
fingerprint=True,
help='Generate {0} for all targets combined, instead of each target '
'individually.'.format(tool_name))
register('--open', default=False, action='store_true',
help='Open the generated {0} in a browser (implies --combined).'.format(tool_name))
register('--ignore-failure', default=False, action='store_true',
fingerprint=True,
help='Do not consider {0} errors to be build errors.'.format(tool_name))
# TODO(John Sirois): This supports the JarPublish task and is an abstraction leak.
# It allows folks doing a local-publish to skip an expensive and un-needed step.
# Remove this flag and instead support conditional requirements being registered against
# the round manager. This may require incremental or windowed flag parsing that happens bit by
# bit as tasks are recursively prepared vs. the current all-at once style.
register('--skip', default=False, action='store_true',
fingerprint=True,
help='Skip {0} generation.'.format(tool_name))
@classmethod
def product_types(cls):
return [cls.jvmdoc().product_type]
@classmethod
def prepare(cls, options, round_manager):
super(JvmdocGen, cls).prepare(options, round_manager)
# TODO(John Sirois): this is a fake requirement in order to force compile run before this
# goal. Introduce a RuntimeClasspath product for JvmCompile and PrepareResources to populate
# and depend on that.
# See: https://github.com/pantsbuild/pants/issues/310
round_manager.require_data('classes_by_target')
def __init__(self, *args, **kwargs):
super(JvmdocGen, self).__init__(*args, **kwargs)
options = self.get_options()
self._include_codegen = options.include_codegen
self.transitive = options.transitive
self.open = options.open
self.combined = self.open or options.combined
self.ignore_failure = options.ignore_failure
self.skip = options.skip
def generate_doc(self, language_predicate, create_jvmdoc_command):
"""
Generate an execute method given a language predicate and command to create documentation
language_predicate: a function that accepts a target and returns True if the target is of that
language
create_jvmdoc_command: (classpath, directory, *targets) -> command (string) that will generate
documentation documentation for targets
"""
if self.skip:
return
catalog = self.context.products.isrequired(self.jvmdoc().product_type)
if catalog and self.combined:
raise TaskError(
'Cannot provide {} target mappings for combined output'.format(self.jvmdoc().product_type))
def docable(tgt):
return language_predicate(tgt) and (self._include_codegen or not tgt.is_codegen)
targets = self.context.targets(predicate=docable)
if not targets:
return
with self.invalidated(targets) as invalidation_check:
safe_mkdir(self.workdir)
classpath = self.classpath(targets)
def find_jvmdoc_targets():
invalid_targets = set()
for vt in invalidation_check.invalid_vts:
invalid_targets.update(vt.targets)
if self.transitive:
return invalid_targets
else:
return set(invalid_targets).intersection(set(self.context.target_roots))
jvmdoc_targets = list(find_jvmdoc_targets())
if self.combined:
self._generate_combined(classpath, jvmdoc_targets, create_jvmdoc_command)
else:
self._generate_individual(classpath, jvmdoc_targets, create_jvmdoc_command)
if catalog:
for target in targets:
gendir = self._gendir(target)
jvmdocs = []
for root, dirs, files in safe_walk(gendir):
jvmdocs.extend(os.path.relpath(os.path.join(root, f), gendir) for f in files)
self.context.products.get(self.jvmdoc().product_type).add(target, gendir, jvmdocs)
def _generate_combined(self, classpath, targets, create_jvmdoc_command):
gendir = os.path.join(self.workdir, 'combined')
if targets:
safe_mkdir(gendir, clean=True)
command = create_jvmdoc_command(classpath, gendir, *targets)
if command:
self.context.log.debug("Running create_jvmdoc in {} with {}".format(gendir, " ".join(command)))
result, gendir = create_jvmdoc(command, gendir)
self._handle_create_jvmdoc_result(targets, result, command)
if self.open:
binary_util.ui_open(os.path.join(gendir, 'index.html'))
def _generate_individual(self, classpath, targets, create_jvmdoc_command):
jobs = {}
for target in targets:
gendir = self._gendir(target)
command = create_jvmdoc_command(classpath, gendir, target)
if command:
jobs[gendir] = (target, command)
if jobs:
with contextlib.closing(
multiprocessing.Pool(processes=min(len(jobs), multiprocessing.cpu_count()))) as pool:
# map would be a preferable api here but fails after the 1st batch with an internal:
# ...
# File "...src/python/pants/backend/core/tasks/jar_create.py", line 170, in javadocjar
# pool.map(createjar, jobs)
# File "...lib/python2.6/multiprocessing/pool.py", line 148, in map
# return self.map_async(func, iterable, chunksize).get()
# File "...lib/python2.6/multiprocessing/pool.py", line 422, in get
# raise self._value
# NameError: global name 'self' is not defined
futures = []
self.context.log.debug("Begin multiprocessing section; output may be misordered or garbled")
try:
for gendir, (target, command) in jobs.items():
self.context.log.debug("Running create_jvmdoc in {} with {}"
.format(gendir, " ".join(command)))
futures.append(pool.apply_async(create_jvmdoc, args=(command, gendir)))
for future in futures:
result, gendir = future.get()
target, command = jobs[gendir]
self._handle_create_jvmdoc_result([target], result, command)
finally:
# In the event of an exception, we want to call terminate() because otherwise
# we get errors on exit when multiprocessing tries to do it, because what
# is dead may never die.
pool.terminate()
self.context.log.debug("End multiprocessing section")
def _handle_create_jvmdoc_result(self, targets, result, command):
if result != 0:
targetlist = ", ".join(map(str, targets))
message = 'Failed to process {} for {} [{}]: {}'.format(
self.jvmdoc().tool_name, targetlist, result, command)
if self.ignore_failure:
self.context.log.warn(message)
else:
raise TaskError(message)
def _gendir(self, target):
return os.path.join(self.workdir, target.id)
def create_jvmdoc(command, gendir):
try:
safe_mkdir(gendir, clean=True)
process = subprocess.Popen(command)
result = process.wait()
return result, gendir
except OSError:
return 1, gendir
|
{
"content_hash": "0b67b2b44926b309a8602bbc3cfe4a3f",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 103,
"avg_line_length": 40.34285714285714,
"alnum_prop": 0.6612370160528801,
"repo_name": "sameerparekh/pants",
"id": "697f911a705dd7c1b9dfe5eef20df2cfc874c3ab",
"size": "8619",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/pants/backend/jvm/tasks/jvmdoc_gen.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "767"
},
{
"name": "CSS",
"bytes": "11442"
},
{
"name": "GAP",
"bytes": "2459"
},
{
"name": "Go",
"bytes": "1437"
},
{
"name": "HTML",
"bytes": "70150"
},
{
"name": "Java",
"bytes": "308102"
},
{
"name": "JavaScript",
"bytes": "25075"
},
{
"name": "Protocol Buffer",
"bytes": "3783"
},
{
"name": "Python",
"bytes": "3862954"
},
{
"name": "Scala",
"bytes": "85437"
},
{
"name": "Shell",
"bytes": "49265"
},
{
"name": "Thrift",
"bytes": "2858"
}
],
"symlink_target": ""
}
|
"""Functional test case that utilizes httplib2 against the API server"""
import datetime
import hashlib
import json
import tempfile
import httplib2
from glance.openstack.common import timeutils
from glance.tests import functional
from glance.tests.utils import skip_if_disabled, minimal_headers
FIVE_KB = 5 * 1024
FIVE_GB = 5 * 1024 * 1024 * 1024
class TestApi(functional.FunctionalTest):
"""Functional tests using httplib2 against the API server"""
@skip_if_disabled
def test_get_head_simple_post(self):
"""
We test the following sequential series of actions:
0. GET /images
- Verify no public images
1. GET /images/detail
- Verify no public images
2. POST /images with public image named Image1
and no custom properties
- Verify 201 returned
3. HEAD image
- Verify HTTP headers have correct information we just added
4. GET image
- Verify all information on image we just added is correct
5. GET /images
- Verify the image we just added is returned
6. GET /images/detail
- Verify the image we just added is returned
7. PUT image with custom properties of "distro" and "arch"
- Verify 200 returned
8. GET image
- Verify updated information about image was stored
9. PUT image
- Remove a previously existing property.
10. PUT image
- Add a previously deleted property.
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. GET /images/detail
# Verify no public images
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 2. POST /images with public image named Image1
# attribute and no custom properties. Verify a 200 OK is returned
image_data = "*" * FIVE_KB
headers = minimal_headers('Image1')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers,
body=image_data)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
self.assertEqual(data['image']['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['image']['size'], FIVE_KB)
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
# 3. HEAD image
# Verify image found now
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
# 4. GET image
# Verify all information on image we just added is correct
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image_headers = {
'x-image-meta-id': image_id,
'x-image-meta-name': 'Image1',
'x-image-meta-is_public': 'True',
'x-image-meta-status': 'active',
'x-image-meta-disk_format': 'raw',
'x-image-meta-container_format': 'ovf',
'x-image-meta-size': str(FIVE_KB)}
expected_std_headers = {
'content-length': str(FIVE_KB),
'content-type': 'application/octet-stream'}
for expected_key, expected_value in expected_image_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. Got '%s'"
% (expected_key, expected_value,
response[expected_key]))
for expected_key, expected_value in expected_std_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. Got '%s'"
% (expected_key,
expected_value,
response[expected_key]))
self.assertEqual(content, "*" * FIVE_KB)
self.assertEqual(hashlib.md5(content).hexdigest(),
hashlib.md5("*" * FIVE_KB).hexdigest())
# 5. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_result = {"images": [
{"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"name": "Image1",
"checksum": "c2e5db72bd7fd153f53ede5da5a06de3",
"size": 5120}]}
self.assertEqual(json.loads(content), expected_result)
# 6. GET /images/detail
# Verify image and all its metadata
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image = {
"status": "active",
"name": "Image1",
"deleted": False,
"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"is_public": True,
"deleted_at": None,
"properties": {},
"size": 5120}
image = json.loads(content)
for expected_key, expected_value in expected_image.items():
self.assertEqual(expected_value, image['images'][0][expected_key],
"For key '%s' expected header value '%s'. Got '%s'"
% (expected_key,
expected_value,
image['images'][0][expected_key]))
# 7. PUT image with custom properties of "distro" and "arch"
# Verify 200 returned
headers = {'X-Image-Meta-Property-Distro': 'Ubuntu',
'X-Image-Meta-Property-Arch': 'x86_64'}
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['image']['properties']['arch'], "x86_64")
self.assertEqual(data['image']['properties']['distro'], "Ubuntu")
# 8. GET /images/detail
# Verify image and all its metadata
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image = {
"status": "active",
"name": "Image1",
"deleted": False,
"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"is_public": True,
"deleted_at": None,
"properties": {'distro': 'Ubuntu', 'arch': 'x86_64'},
"size": 5120}
image = json.loads(content)
for expected_key, expected_value in expected_image.items():
self.assertEqual(expected_value, image['images'][0][expected_key],
"For key '%s' expected header value '%s'. Got '%s'"
% (expected_key,
expected_value,
image['images'][0][expected_key]))
# 9. PUT image and remove a previously existing property.
headers = {'X-Image-Meta-Property-Arch': 'x86_64'}
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images'][0]
self.assertEqual(len(data['properties']), 1)
self.assertEqual(data['properties']['arch'], "x86_64")
# 10. PUT image and add a previously deleted property.
headers = {'X-Image-Meta-Property-Distro': 'Ubuntu',
'X-Image-Meta-Property-Arch': 'x86_64'}
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
data = json.loads(content)
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images'][0]
self.assertEqual(len(data['properties']), 2)
self.assertEqual(data['properties']['arch'], "x86_64")
self.assertEqual(data['properties']['distro'], "Ubuntu")
self.assertNotEqual(data['created_at'], data['updated_at'])
# DELETE image
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
self.stop_servers()
@skip_if_disabled
def test_queued_process_flow(self):
"""
We test the process flow where a user registers an image
with Glance but does not immediately upload an image file.
Later, the user uploads an image file using a PUT operation.
We track the changing of image status throughout this process.
0. GET /images
- Verify no public images
1. POST /images with public image named Image1 with no location
attribute and no image data.
- Verify 201 returned
2. GET /images
- Verify one public image
3. HEAD image
- Verify image now in queued status
4. PUT image with image data
- Verify 200 returned
5. HEAD images
- Verify image now in active status
6. GET /images
- Verify one public image
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with public image named Image1
# with no location or image data
headers = minimal_headers('Image1')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['checksum'], None)
self.assertEqual(data['image']['size'], 0)
self.assertEqual(data['image']['container_format'], 'ovf')
self.assertEqual(data['image']['disk_format'], 'raw')
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
image_id = data['image']['id']
# 2. GET /images
# Verify 1 public image
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['images'][0]['id'], image_id)
self.assertEqual(data['images'][0]['checksum'], None)
self.assertEqual(data['images'][0]['size'], 0)
self.assertEqual(data['images'][0]['container_format'], 'ovf')
self.assertEqual(data['images'][0]['disk_format'], 'raw')
self.assertEqual(data['images'][0]['name'], "Image1")
# 3. HEAD /images
# Verify status is in queued
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
self.assertEqual(response['x-image-meta-status'], "queued")
self.assertEqual(response['x-image-meta-size'], '0')
self.assertEqual(response['x-image-meta-id'], image_id)
# 4. PUT image with image data, verify 200 returned
image_data = "*" * FIVE_KB
headers = {'Content-Type': 'application/octet-stream'}
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers,
body=image_data)
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['image']['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['image']['size'], FIVE_KB)
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
# 5. HEAD /images
# Verify status is in active
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
self.assertEqual(response['x-image-meta-status'], "active")
# 6. GET /images
# Verify 1 public image still...
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['images'][0]['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['images'][0]['id'], image_id)
self.assertEqual(data['images'][0]['size'], FIVE_KB)
self.assertEqual(data['images'][0]['container_format'], 'ovf')
self.assertEqual(data['images'][0]['disk_format'], 'raw')
self.assertEqual(data['images'][0]['name'], "Image1")
# DELETE image
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
self.stop_servers()
@skip_if_disabled
def test_size_greater_2G_mysql(self):
"""
A test against the actual datastore backend for the registry
to ensure that the image size property is not truncated.
:see https://bugs.launchpad.net/glance/+bug/739433
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 1. POST /images with public image named Image1
# attribute and a size of 5G. Use the HTTP engine with an
# X-Image-Meta-Location attribute to make Glance forego
# "adding" the image data.
# Verify a 201 OK is returned
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Location': 'http://example.com/fakeimage',
'X-Image-Meta-Size': str(FIVE_GB),
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-disk_format': 'raw',
'X-image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
# 2. HEAD /images
# Verify image size is what was passed in, and not truncated
path = response.get('location')
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-size'], str(FIVE_GB))
self.assertEqual(response['x-image-meta-name'], 'Image1')
self.assertEqual(response['x-image-meta-is_public'], 'True')
self.stop_servers()
@skip_if_disabled
def test_v1_not_enabled(self):
self.cleanup()
self.api_server.enable_v1_api = False
self.start_servers(**self.__dict__.copy())
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 300)
self.stop_servers()
@skip_if_disabled
def test_v1_enabled(self):
self.cleanup()
self.api_server.enable_v1_api = True
self.start_servers(**self.__dict__.copy())
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.stop_servers()
@skip_if_disabled
def test_zero_initial_size(self):
"""
A test to ensure that an image with size explicitly set to zero
has status that immediately transitions to active.
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 1. POST /images with public image named Image1
# attribute and a size of zero.
# Verify a 201 OK is returned
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Size': '0',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-disk_format': 'raw',
'X-image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
# 2. HEAD image-location
# Verify image size is zero and the status is active
path = response.get('location')
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-size'], '0')
self.assertEqual(response['x-image-meta-status'], 'active')
# 3. GET image-location
# Verify image content is empty
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(len(content), 0)
self.stop_servers()
@skip_if_disabled
def test_traceback_not_consumed(self):
"""
A test that errors coming from the POST API do not
get consumed and print the actual error message, and
not something like <traceback object at 0x1918d40>
:see https://bugs.launchpad.net/glance/+bug/755912
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# POST /images with binary data, but not setting
# Content-Type to application/octet-stream, verify a
# 400 returned and that the error is readable.
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
headers = minimal_headers('Image1')
headers['Content-Type'] = 'not octet-stream'
response, content = http.request(path, 'POST',
body=test_data_file.name,
headers=headers)
self.assertEqual(response.status, 400)
expected = "Content-Type must be application/octet-stream"
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
self.stop_servers()
@skip_if_disabled
def test_filtered_images(self):
"""
Set up four test images and ensure each query param filter works
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
image_ids = []
# 1. POST /images with three public images, and one private image
# with various attributes
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'True',
'X-Image-Meta-Property-pants': 'are on'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are on")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Image!',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vhd',
'X-Image-Meta-Size': '20',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'False',
'X-Image-Meta-Property-pants': 'are on'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are on")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Image!',
'X-Image-Meta-Status': 'saving',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '21',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'False',
'X-Image-Meta-Property-pants': 'are off'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are off")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Private Image',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '22',
'X-Image-Meta-Is-Public': 'False',
'X-Image-Meta-Protected': 'False'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['is_public'], False)
image_ids.append(data['image']['id'])
# 2. GET /images
# Verify three public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# 3. GET /images with name filter
# Verify correct images returned with name
params = "name=My%20Image!"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['name'], "My Image!")
# 4. GET /images with status filter
# Verify correct images returned with status
params = "status=queued"
path = "http://%s:%d/v1/images/detail?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
for image in data['images']:
self.assertEqual(image['status'], "queued")
params = "status=active"
path = "http://%s:%d/v1/images/detail?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# 5. GET /images with container_format filter
# Verify correct images returned with container_format
params = "container_format=ovf"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['container_format'], "ovf")
# 6. GET /images with disk_format filter
# Verify correct images returned with disk_format
params = "disk_format=vdi"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['disk_format'], "vdi")
# 7. GET /images with size_max filter
# Verify correct images returned with size <= expected
params = "size_max=20"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertTrue(image['size'] <= 20)
# 8. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_min=20"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertTrue(image['size'] >= 20)
# 9. Get /images with is_public=None filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=None"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 4)
# 10. Get /images with is_public=False filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=False"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['name'], "My Private Image")
# 11. Get /images with is_public=True filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=True"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
for image in data['images']:
self.assertNotEqual(image['name'], "My Private Image")
# 12. Get /images with protected=False filter
# Verify correct images returned with property
params = "protected=False"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertNotEqual(image['name'], "Image1")
# 13. Get /images with protected=True filter
# Verify correct images returned with property
params = "protected=True"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['name'], "Image1")
# 14. GET /images with property filter
# Verify correct images returned with property
params = "property-pants=are%20on"
path = "http://%s:%d/v1/images/detail?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['properties']['pants'], "are on")
# 15. GET /images with property filter and name filter
# Verify correct images returned with property and name
# Make sure you quote the url when using more than one param!
params = "name=My%20Image!&property-pants=are%20on"
path = "http://%s:%d/v1/images/detail?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['properties']['pants'], "are on")
self.assertEqual(image['name'], "My Image!")
# 16. GET /images with past changes-since filter
yesterday = timeutils.isotime(timeutils.utcnow() -
datetime.timedelta(1))
params = "changes-since=%s" % yesterday
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# one timezone west of Greenwich equates to an hour ago
# taking care to pre-urlencode '+' as '%2B', otherwise the timezone
# '+' is wrongly decoded as a space
# TODO(eglynn): investigate '+' --> <SPACE> decoding, an artifact
# of WSGI/webob dispatch?
now = timeutils.utcnow()
hour_ago = now.strftime('%Y-%m-%dT%H:%M:%S%%2B01:00')
params = "changes-since=%s" % hour_ago
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# 17. GET /images with future changes-since filter
tomorrow = timeutils.isotime(timeutils.utcnow() +
datetime.timedelta(1))
params = "changes-since=%s" % tomorrow
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# one timezone east of Greenwich equates to an hour from now
now = timeutils.utcnow()
hour_hence = now.strftime('%Y-%m-%dT%H:%M:%S-01:00')
params = "changes-since=%s" % hour_hence
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# 18. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_min=-1"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("filter size_min got -1" in content)
# 19. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_max=-1"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("filter size_max got -1" in content)
# 20. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "min_ram=-1"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("Bad value passed to filter min_ram got -1" in content)
# 21. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "protected=imalittleteapot"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("protected got imalittleteapot" in content)
# 22. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "is_public=imalittleteapot"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("is_public got imalittleteapot" in content)
self.stop_servers()
@skip_if_disabled
def test_limited_images(self):
"""
Ensure marker and limit query params work
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
image_ids = []
# 1. POST /images with three public images with various attributes
headers = minimal_headers('Image1')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = minimal_headers('Image2')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = minimal_headers('Image3')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
# 2. GET /images with all images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = json.loads(content)['images']
self.assertEqual(len(images), 3)
# 3. GET /images with limit of 2
# Verify only two images were returned
params = "limit=2"
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 2)
self.assertEqual(data[0]['id'], images[0]['id'])
self.assertEqual(data[1]['id'], images[1]['id'])
# 4. GET /images with marker
# Verify only two images were returned
params = "marker=%s" % images[0]['id']
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 2)
self.assertEqual(data[0]['id'], images[1]['id'])
self.assertEqual(data[1]['id'], images[2]['id'])
# 5. GET /images with marker and limit
# Verify only one image was returned with the correct id
params = "limit=1&marker=%s" % images[1]['id']
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['id'], images[2]['id'])
# 6. GET /images/detail with marker and limit
# Verify only one image was returned with the correct id
params = "limit=1&marker=%s" % images[1]['id']
path = "http://%s:%d/v1/images?%s" % (
"127.0.0.1", self.api_port, params)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['id'], images[2]['id'])
# DELETE images
for image_id in image_ids:
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
self.stop_servers()
@skip_if_disabled
def test_ordered_images(self):
"""
Set up three test images and ensure each query param filter works
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with three public images with various attributes
image_ids = []
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'ASDF',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'bare',
'X-Image-Meta-Disk-Format': 'iso',
'X-Image-Meta-Size': '2',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'XYZ',
'X-Image-Meta-Status': 'saving',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '5',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
# 2. GET /images with no query params
# Verify three public images sorted by created_at desc
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[2])
self.assertEqual(data['images'][1]['id'], image_ids[1])
self.assertEqual(data['images'][2]['id'], image_ids[0])
# 3. GET /images sorted by name asc
params = 'sort_key=name&sort_dir=asc'
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[1])
self.assertEqual(data['images'][1]['id'], image_ids[0])
self.assertEqual(data['images'][2]['id'], image_ids[2])
# 4. GET /images sorted by size desc
params = 'sort_key=size&sort_dir=desc'
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[0])
self.assertEqual(data['images'][1]['id'], image_ids[2])
self.assertEqual(data['images'][2]['id'], image_ids[1])
# 5. GET /images sorted by size desc with a marker
params = 'sort_key=size&sort_dir=desc&marker=%s' % image_ids[0]
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
self.assertEqual(data['images'][0]['id'], image_ids[2])
self.assertEqual(data['images'][1]['id'], image_ids[1])
# 6. GET /images sorted by name asc with a marker
params = 'sort_key=name&sort_dir=asc&marker=%s' % image_ids[2]
path = "http://%s:%d/v1/images?%s" % ("127.0.0.1", self.api_port,
params)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# DELETE images
for image_id in image_ids:
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
self.stop_servers()
@skip_if_disabled
def test_duplicate_image_upload(self):
"""
Upload initial image, then attempt to upload duplicate image
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with public image named Image1
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image = json.loads(content)['image']
# 2. POST /images with public image named Image1, and ID: 1
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1 Update',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Id': image['id'],
'X-Image-Meta-Is-Public': 'True'}
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 409)
self.stop_servers()
@skip_if_disabled
def test_delete_not_existing(self):
"""
We test the following:
0. GET /images/1
- Verify 404
1. DELETE /images/1
- Verify 404
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
api_port = self.api_port
registry_port = self.registry_port
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. DELETE /images/1
# Verify 404 returned
path = "http://%s:%d/v1/images/1" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 404)
self.stop_servers()
@skip_if_disabled
def test_unsupported_default_store(self):
"""
We test that a mis-configured default_store causes the API server
to fail to start.
"""
self.cleanup()
self.default_store = 'shouldnotexist'
# ensure failure exit code is available to assert on
# -- on slower machines this needs a few seconds or
# the unit test will fail
self.api_server.server_control_options += ' --await-child=3'
# ensure that the API server fails to launch
self.start_server(self.api_server,
expect_launch=False,
expected_exitcode=255,
**self.__dict__.copy())
def _do_test_post_image_content_bad_format(self, format):
"""
We test that missing container/disk format fails with 400 "Bad Request"
:see https://bugs.launchpad.net/glance/+bug/933702
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = json.loads(content)['images']
self.assertEqual(len(images), 0)
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
# POST /images without given format being specified
headers = minimal_headers('Image1')
headers['X-Image-Meta-' + format] = 'bad_value'
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
http = httplib2.Http()
response, content = http.request(path, 'POST',
headers=headers,
body=test_data_file.name)
self.assertEqual(response.status, 400)
type = format.replace('_format', '')
expected = "Invalid %s format 'bad_value' for image" % type
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
# make sure the image was not created
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = json.loads(content)['images']
self.assertEqual(len(images), 0)
self.stop_servers()
@skip_if_disabled
def test_post_image_content_bad_container_format(self):
self._do_test_post_image_content_bad_format('container_format')
@skip_if_disabled
def test_post_image_content_bad_disk_format(self):
self._do_test_post_image_content_bad_format('disk_format')
def _do_test_put_image_content_missing_format(self, format):
"""
We test that missing container/disk format only fails with
400 "Bad Request" when the image content is PUT (i.e. not
on the original POST of a queued image).
:see https://bugs.launchpad.net/glance/+bug/937216
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# POST queued image
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
headers = {
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Is-Public': 'True',
}
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
print data
# PUT image content images without given format being specified
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
headers = minimal_headers('Image1')
del headers['X-Image-Meta-' + format]
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
http = httplib2.Http()
response, content = http.request(path, 'PUT',
headers=headers,
body=test_data_file.name)
self.assertEqual(response.status, 400)
type = format.replace('_format', '')
expected = "Invalid %s format 'None' for image" % type
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
self.stop_servers()
@skip_if_disabled
def test_put_image_content_bad_container_format(self):
self._do_test_put_image_content_missing_format('container_format')
@skip_if_disabled
def test_put_image_content_bad_disk_format(self):
self._do_test_put_image_content_missing_format('disk_format')
@skip_if_disabled
def test_ownership(self):
self.cleanup()
self.api_server.deployment_flavor = 'fakeauth'
self.registry_server.deployment_flavor = 'fakeauth'
self.start_servers(**self.__dict__.copy())
# Add an image with admin privileges and ensure the owner
# can be set to something other than what was used to authenticate
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
create_headers = {
'X-Image-Meta-Name': 'MyImage',
'X-Image-Meta-disk_format': 'raw',
'X-Image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Owner': 'tenant2',
}
create_headers.update(auth_headers)
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=create_headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'HEAD', headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant2', response['x-image-meta-owner'])
# Now add an image without admin privileges and ensure the owner
# cannot be set to something other than what was used to authenticate
auth_headers = {
'X-Auth-Token': 'user1:tenant1:role1',
}
create_headers.update(auth_headers)
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=create_headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
# We have to be admin to see the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
create_headers.update(auth_headers)
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'HEAD', headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant1', response['x-image-meta-owner'])
# Make sure the non-privileged user can't update their owner either
update_headers = {
'X-Image-Meta-Name': 'MyImage2',
'X-Image-Meta-Owner': 'tenant2',
'X-Auth-Token': 'user1:tenant1:role1',
}
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=update_headers)
self.assertEqual(response.status, 200)
# We have to be admin to see the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'HEAD', headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant1', response['x-image-meta-owner'])
# An admin user should be able to update the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant3:admin',
}
update_headers = {
'X-Image-Meta-Name': 'MyImage2',
'X-Image-Meta-Owner': 'tenant2',
}
update_headers.update(auth_headers)
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=update_headers)
self.assertEqual(response.status, 200)
path = ("http://%s:%d/v1/images/%s" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'HEAD', headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant2', response['x-image-meta-owner'])
self.stop_servers()
|
{
"content_hash": "7b4a61bbb2254b26a84ea73a3cf9d312",
"timestamp": "",
"source": "github",
"line_count": 1436,
"max_line_length": 79,
"avg_line_length": 42.213091922005574,
"alnum_prop": 0.5584644824969481,
"repo_name": "tylertian/Openstack",
"id": "f0b607754e5bf0c7ed04e7d12d8ee489aad0bfed",
"size": "61293",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openstack F/glance/glance/tests/functional/v1/test_api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "239919"
},
{
"name": "JavaScript",
"bytes": "156942"
},
{
"name": "Python",
"bytes": "16949418"
},
{
"name": "Shell",
"bytes": "96743"
}
],
"symlink_target": ""
}
|
import nose
# asr modules
import pyspell.check as check
# Tests
def execution_test():
check.get_parser()
|
{
"content_hash": "8425a013a138ef8e579e990bd53572ca",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 29,
"avg_line_length": 12.444444444444445,
"alnum_prop": 0.7142857142857143,
"repo_name": "MartinThoma/pyspell",
"id": "9368d354a822f4a4af95cd0fe1066237b0ee6250",
"size": "159",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/check_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "7121"
},
{
"name": "Python",
"bytes": "28222"
},
{
"name": "Shell",
"bytes": "6712"
}
],
"symlink_target": ""
}
|
from case import Case
class Case10_1_1(Case):
DESCRIPTION = """Send text message with payload of length 65536 auto-fragmented with <b>autoFragmentSize = 1300</b>."""
EXPECTATION = """Receive echo'ed text message (with payload as sent and transmitted frame counts as expected). Clean close with normal code."""
def onOpen(self):
self.payload = "*" * 65536
self.p.autoFragmentSize = 1300
self.expected[Case.OK] = [("message", self.payload, False)]
self.expectedClose = {"closedByMe": True, "closeCode": [self.p.CLOSE_STATUS_CODE_NORMAL], "requireClean": True}
self.p.sendMessage(self.payload)
self.p.killAfter(10)
def onConnectionLost(self, failedByMe):
Case.onConnectionLost(self, failedByMe)
frames_expected = {}
frames_expected[0] = len(self.payload) / self.p.autoFragmentSize
frames_expected[1] = 1 if len(self.payload) % self.p.autoFragmentSize > 0 else 0
frames_got = {}
frames_got[0] = self.p.txFrameStats[0]
frames_got[1] = self.p.txFrameStats[1]
if frames_expected == frames_got:
pass
else:
self.behavior = Case.FAILED
self.result = "Frames transmitted %s does not match what we expected %s." % (str(frames_got), str(frames_expected))
|
{
"content_hash": "8eb63cac7f23c13518087cc05fe2ee7f",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 146,
"avg_line_length": 44,
"alnum_prop": 0.6661442006269592,
"repo_name": "mikelikespie/AutobahnTestSuite",
"id": "53e9eb613050af6c0eee95fb5dafaf13c4825535",
"size": "2042",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "autobahntestsuite/autobahntestsuite/case/case10_1_1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "2806"
},
{
"name": "Python",
"bytes": "345307"
}
],
"symlink_target": ""
}
|
import toml
import yaml
class Configuration(dict):
def __init__(self, **kwargs):
super().__init__(kwargs)
for required_arg in ["custom_order", "explicit_checks", "exclude_checks"]:
if required_arg not in self:
self[required_arg] = None
@classmethod
def from_config_file(cls, filename):
try:
config = toml.load(filename)
except toml.TomlDecodeError:
# Try yaml
config = yaml.safe_load(open(filename))
if not isinstance(config, dict):
raise Exception(f"Can't understand config file {filename}.")
return cls(**config)
def maybe_override(self, other):
for key, value in other.items():
if value is not None:
self[key] = value
|
{
"content_hash": "5d93da10935cbf8129edc9dffa0f7be8",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 82,
"avg_line_length": 29.703703703703702,
"alnum_prop": 0.571072319201995,
"repo_name": "moyogo/fontbakery",
"id": "671c3c0cf0632ec0461f811e9a8faf20214fcc4b",
"size": "802",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "Lib/fontbakery/configuration.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "7520"
},
{
"name": "PureBasic",
"bytes": "22896"
},
{
"name": "Python",
"bytes": "1020988"
},
{
"name": "Shell",
"bytes": "2457"
}
],
"symlink_target": ""
}
|
import os
import warnings
from typing import Any, Callable, Iterable, Optional, overload
import prestodb
from prestodb.exceptions import DatabaseError
from prestodb.transaction import IsolationLevel
from airflow import AirflowException
from airflow.configuration import conf
from airflow.hooks.dbapi import DbApiHook
from airflow.models import Connection
class PrestoException(Exception):
"""Presto exception"""
def _boolify(value):
if isinstance(value, bool):
return value
if isinstance(value, str):
if value.lower() == 'false':
return False
elif value.lower() == 'true':
return True
return value
class PrestoHook(DbApiHook):
"""
Interact with Presto through prestodb.
>>> ph = PrestoHook()
>>> sql = "SELECT count(1) AS num FROM airflow.static_babynames"
>>> ph.get_records(sql)
[[340698]]
"""
conn_name_attr = 'presto_conn_id'
default_conn_name = 'presto_default'
conn_type = 'presto'
hook_name = 'Presto'
def get_conn(self) -> Connection:
"""Returns a connection object"""
db = self.get_connection(self.presto_conn_id) # type: ignore[attr-defined]
extra = db.extra_dejson
auth = None
if db.password and extra.get('auth') == 'kerberos':
raise AirflowException("Kerberos authorization doesn't support password.")
elif db.password:
auth = prestodb.auth.BasicAuthentication(db.login, db.password)
elif extra.get('auth') == 'kerberos':
auth = prestodb.auth.KerberosAuthentication(
config=extra.get('kerberos__config', os.environ.get('KRB5_CONFIG')),
service_name=extra.get('kerberos__service_name'),
mutual_authentication=_boolify(extra.get('kerberos__mutual_authentication', False)),
force_preemptive=_boolify(extra.get('kerberos__force_preemptive', False)),
hostname_override=extra.get('kerberos__hostname_override'),
sanitize_mutual_error_response=_boolify(
extra.get('kerberos__sanitize_mutual_error_response', True)
),
principal=extra.get('kerberos__principal', conf.get('kerberos', 'principal')),
delegate=_boolify(extra.get('kerberos__delegate', False)),
ca_bundle=extra.get('kerberos__ca_bundle'),
)
presto_conn = prestodb.dbapi.connect(
host=db.host,
port=db.port,
user=db.login,
source=db.extra_dejson.get('source', 'airflow'),
http_scheme=db.extra_dejson.get('protocol', 'http'),
catalog=db.extra_dejson.get('catalog', 'hive'),
schema=db.schema,
auth=auth,
isolation_level=self.get_isolation_level(), # type: ignore[func-returns-value]
)
if extra.get('verify') is not None:
# Unfortunately verify parameter is available via public API.
# The PR is merged in the presto library, but has not been released.
# See: https://github.com/prestosql/presto-python-client/pull/31
presto_conn._http_session.verify = _boolify(extra['verify'])
return presto_conn
def get_isolation_level(self) -> Any:
"""Returns an isolation level"""
db = self.get_connection(self.presto_conn_id) # type: ignore[attr-defined]
isolation_level = db.extra_dejson.get('isolation_level', 'AUTOCOMMIT').upper()
return getattr(IsolationLevel, isolation_level, IsolationLevel.AUTOCOMMIT)
@staticmethod
def _strip_sql(sql: str) -> str:
return sql.strip().rstrip(';')
@overload
def get_records(self, sql: str = "", parameters: Optional[dict] = None):
"""Get a set of records from Presto
:param sql: SQL statement to be executed.
:param parameters: The parameters to render the SQL query with.
"""
@overload
def get_records(self, sql: str = "", parameters: Optional[dict] = None, hql: str = ""):
""":sphinx-autoapi-skip:"""
def get_records(self, sql: str = "", parameters: Optional[dict] = None, hql: str = ""):
""":sphinx-autoapi-skip:"""
if hql:
warnings.warn(
"The hql parameter has been deprecated. You should pass the sql parameter.",
DeprecationWarning,
stacklevel=2,
)
sql = hql
try:
return super().get_records(self._strip_sql(sql), parameters)
except DatabaseError as e:
raise PrestoException(e)
@overload
def get_first(self, sql: str = "", parameters: Optional[dict] = None) -> Any:
"""Returns only the first row, regardless of how many rows the query returns.
:param sql: SQL statement to be executed.
:param parameters: The parameters to render the SQL query with.
"""
@overload
def get_first(self, sql: str = "", parameters: Optional[dict] = None, hql: str = "") -> Any:
""":sphinx-autoapi-skip:"""
def get_first(self, sql: str = "", parameters: Optional[dict] = None, hql: str = "") -> Any:
""":sphinx-autoapi-skip:"""
if hql:
warnings.warn(
"The hql parameter has been deprecated. You should pass the sql parameter.",
DeprecationWarning,
stacklevel=2,
)
sql = hql
try:
return super().get_first(self._strip_sql(sql), parameters)
except DatabaseError as e:
raise PrestoException(e)
@overload
def get_pandas_df(self, sql: str = "", parameters=None, **kwargs):
"""Get a pandas dataframe from a sql query.
:param sql: SQL statement to be executed.
:param parameters: The parameters to render the SQL query with.
"""
@overload
def get_pandas_df(self, sql: str = "", parameters=None, hql: str = "", **kwargs):
""":sphinx-autoapi-skip:"""
def get_pandas_df(self, sql: str = "", parameters=None, hql: str = "", **kwargs):
""":sphinx-autoapi-skip:"""
if hql:
warnings.warn(
"The hql parameter has been deprecated. You should pass the sql parameter.",
DeprecationWarning,
stacklevel=2,
)
sql = hql
import pandas
cursor = self.get_cursor()
try:
cursor.execute(self._strip_sql(sql), parameters)
data = cursor.fetchall()
except DatabaseError as e:
raise PrestoException(e)
column_descriptions = cursor.description
if data:
df = pandas.DataFrame(data, **kwargs)
df.columns = [c[0] for c in column_descriptions]
else:
df = pandas.DataFrame(**kwargs)
return df
@overload
def run(
self,
sql: str = "",
autocommit: bool = False,
parameters: Optional[dict] = None,
handler: Optional[Callable] = None,
) -> None:
"""Execute the statement against Presto. Can be used to create views."""
@overload
def run(
self,
sql: str = "",
autocommit: bool = False,
parameters: Optional[dict] = None,
handler: Optional[Callable] = None,
hql: str = "",
) -> None:
""":sphinx-autoapi-skip:"""
def run(
self,
sql: str = "",
autocommit: bool = False,
parameters: Optional[dict] = None,
handler: Optional[Callable] = None,
hql: str = "",
) -> None:
""":sphinx-autoapi-skip:"""
if hql:
warnings.warn(
"The hql parameter has been deprecated. You should pass the sql parameter.",
DeprecationWarning,
stacklevel=2,
)
sql = hql
return super().run(sql=self._strip_sql(sql), parameters=parameters, handler=handler)
def insert_rows(
self,
table: str,
rows: Iterable[tuple],
target_fields: Optional[Iterable[str]] = None,
commit_every: int = 0,
replace: bool = False,
**kwargs,
) -> None:
"""
A generic way to insert a set of tuples into a table.
:param table: Name of the target table
:param rows: The rows to insert into the table
:param target_fields: The names of the columns to fill in the table
:param commit_every: The maximum number of rows to insert in one
transaction. Set to 0 to insert all rows in one transaction.
:param replace: Whether to replace instead of insert
"""
if self.get_isolation_level() == IsolationLevel.AUTOCOMMIT:
self.log.info(
'Transactions are not enable in presto connection. '
'Please use the isolation_level property to enable it. '
'Falling back to insert all rows in one transaction.'
)
commit_every = 0
super().insert_rows(table, rows, target_fields, commit_every)
|
{
"content_hash": "1590cff8dfa3f6f8ea530f6253105d1a",
"timestamp": "",
"source": "github",
"line_count": 257,
"max_line_length": 100,
"avg_line_length": 35.57587548638132,
"alnum_prop": 0.5809909220168434,
"repo_name": "bolkedebruin/airflow",
"id": "419b571c9f92b1ac294a8606fd13688afb17d599",
"size": "9930",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airflow/providers/presto/hooks/presto.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25286"
},
{
"name": "Dockerfile",
"bytes": "40459"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "157840"
},
{
"name": "JavaScript",
"bytes": "167972"
},
{
"name": "Jinja",
"bytes": "33382"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "19287942"
},
{
"name": "Shell",
"bytes": "645244"
},
{
"name": "TypeScript",
"bytes": "173854"
}
],
"symlink_target": ""
}
|
from PIL import Image
import StringIO
import threading
import urllib2
import time
import requests
# Lock to protect the cache for write access.
_global_lock = threading.Lock()
# The cache.
_cache = {
# url : [contents, lock, last_timestamp]
}
MAX_TIME = 0.1 # ms
def webcam_cache(webcam_func):
def _webcam_caller(req, url, *args):
return _wrapper_webcam(req, url, webcam_func, *args)
return _webcam_caller
@webcam_cache
def _webcam(req, url):
req.content_type = "image/jpg"
req.headers_out["Cache-Control"] = "no-cache"
req.headers_out["Pragma-directive"] = "no-cache"
req.headers_out["Cache-directive"] = "no-cache"
req.headers_out["Pragma"] = "no-cache"
req.headers_out["Expires"] = "0"
try:
content = requests.get(url).content
except:
content = "ERROR"
req.content_type = "text"
return content
@webcam_cache
def _webcam_rotate(req, url, degrees = 90):
req.content_type = "image/jpg"
req.headers_out["Cache-Control"] = "no-cache"
req.headers_out["Pragma-directive"] = "no-cache"
req.headers_out["Cache-directive"] = "no-cache"
req.headers_out["Pragma"] = "no-cache"
req.headers_out["Expires"] = "0"
# return str(degrees)
try:
sio_in = StringIO.StringIO(urllib2.urlopen(url).read())
img = Image.open(sio_in)
img2 = img.rotate(degrees)
sio_out = StringIO.StringIO()
img2.save(sio_out, 'jpeg')
content = sio_out.getvalue()
except:
req.content_type = "text"
content = "ERROR"
return content
@webcam_cache
def _webcam_external(req, url):
req.content_type = "image/jpg"
req.headers_out["Cache-Control"] = "no-cache"
req.headers_out["Pragma-directive"] = "no-cache"
req.headers_out["Cache-directive"] = "no-cache"
req.headers_out["Pragma"] = "no-cache"
req.headers_out["Expires"] = "0"
# proxy = urllib2.ProxyHandler({'http': ''})
proxy = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy)
try:
content = opener.open(url).read()
except:
content = "ERROR"
req.content_type = "text"
return content
def _wrapper_webcam(req, url, webcam_func, *args):
req.content_type = "image/jpg"
req.headers_out["Cache-Control"] = "no-cache"
req.headers_out["Pragma-directive"] = "no-cache"
req.headers_out["Cache-directive"] = "no-cache"
req.headers_out["Pragma"] = "no-cache"
req.headers_out["Expires"] = "0"
# Check whether our URL is in the cache
if url not in _cache:
# We are going to add the entry, we lock globally.
with _global_lock:
if url not in _cache:
contents = webcam_func(req, url, *args) # Problem: _fake_webcam is the slow func.
_cache[url] = [contents, threading.Lock(), time.time()]
return contents
# Our cache entry does exist. We access it directly. We don't really mind
# if we access the previous value because of threading issues.
cache_entry = _cache[url]
contents, lock, last_timestamp = cache_entry
elapsed = time.time() - last_timestamp
if elapsed > MAX_TIME:
# Our entry has expired. We need to update it. We'll only lock our entry.
with lock:
if time.time() - last_timestamp > MAX_TIME:
contents = webcam_func(req, url, *args)
cache_entry[0] = contents
last_timestamp = time.time()
cache_entry[2] = last_timestamp
return contents
def fpga1(req, *args, **kwargs):
return _webcam_rotate(req, "http://192.168.3.72/image.jpg", 180)
def pld1(req, *args, **kwargs):
return _webcam(req, "http://192.168.0.62/cgi-bin/video.jpg")
def pld2(req, *args, **kwargs):
return _webcam(req, "http://192.168.0.64/cgi-bin/video.jpg?size=2")
def robot2(req, *args, **kwargs):
return _webcam(req, "http://192.168.0.81/img/snapshot.cgi?size=2")
|
{
"content_hash": "18c6d9ac15a6f1915f0b9207c6a79ce8",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 98,
"avg_line_length": 29.64179104477612,
"alnum_prop": 0.6160624370594159,
"repo_name": "porduna/weblabdeusto",
"id": "d103d972d93f185234d0d2766707c00a23f9634d",
"size": "3972",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tools/cams/proxied.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "ASP",
"bytes": "4785"
},
{
"name": "ActionScript",
"bytes": "8508"
},
{
"name": "Batchfile",
"bytes": "7753"
},
{
"name": "C",
"bytes": "19456"
},
{
"name": "C#",
"bytes": "315160"
},
{
"name": "C++",
"bytes": "9547"
},
{
"name": "CSS",
"bytes": "203478"
},
{
"name": "CoffeeScript",
"bytes": "39146"
},
{
"name": "Go",
"bytes": "7076"
},
{
"name": "HTML",
"bytes": "610251"
},
{
"name": "Java",
"bytes": "856300"
},
{
"name": "JavaScript",
"bytes": "1538963"
},
{
"name": "Makefile",
"bytes": "24995"
},
{
"name": "Mako",
"bytes": "1236"
},
{
"name": "PHP",
"bytes": "159985"
},
{
"name": "Python",
"bytes": "3780070"
},
{
"name": "Shell",
"bytes": "7880"
},
{
"name": "Smarty",
"bytes": "40320"
},
{
"name": "TSQL",
"bytes": "717"
},
{
"name": "VHDL",
"bytes": "5874"
}
],
"symlink_target": ""
}
|
# This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
#
testinfo = "s, t 1, s, t 2.1, s, t 3.2, s, t 4.1, s, q"
tags = "Layer, Waves3D, Flip"
from cocos.director import director
from cocos.actions import Flip, Waves3D
from cocos.sprite import Sprite
from cocos.layer import Layer, ColorLayer
from cocos.scene import Scene
class SpriteLayer ( Layer ):
def __init__( self ):
super( SpriteLayer, self ).__init__()
sprite1 = Sprite( 'grossini.png' )
sprite2 = Sprite( 'grossinis_sister1.png')
sprite3 = Sprite( 'grossinis_sister2.png')
sprite1.position = (400,240)
sprite2.position = (300,240)
sprite3.position = (500,240)
self.add( sprite1 )
self.add( sprite2 )
self.add( sprite3 )
description = """
A scaled-down ColorLayer and three sprites, the scene at fist waves and
then flips trough the use of Waves3D and Flip actions over the holder Layer
"""
def main():
print description
director.init( resizable=True )
main_scene = Scene()
red = ColorLayer(255,0,0,128)
sprite = SpriteLayer()
red.scale = 0.75
main_scene.add( red, z=0 )
main_scene.add( sprite, z=1 )
sprite.do( Waves3D(duration=2) + Flip(duration=2) )
director.run (main_scene)
if __name__ == '__main__':
main()
|
{
"content_hash": "03dad26174d9bc6ad9ac2b46f0c5d962",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 76,
"avg_line_length": 25.94736842105263,
"alnum_prop": 0.6159567275185936,
"repo_name": "shadowmint/nwidget",
"id": "3f1f046820b3efa31681670b96adc0f0bb38817e",
"size": "1533",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/cocos2d-0.5.5/test/test_grid_effect_in_layer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11298"
},
{
"name": "JavaScript",
"bytes": "17394"
},
{
"name": "PHP",
"bytes": "2190"
},
{
"name": "Python",
"bytes": "9815941"
},
{
"name": "Shell",
"bytes": "10521"
}
],
"symlink_target": ""
}
|
import sys
import weaver.client as client
# create graph from file
coord_id = 0
c = client.Client(client._CLIENT_ID, coord_id)
nodes = dict()
tx_id = c.begin_tx()
for i in range(6):
nodes[i] = c.create_node(tx_id)
edge_id = c.create_edge(tx_id, nodes[0], nodes[1])
c.set_edge_property(tx_id, nodes[0], edge_id, 'weight', '6')
edge_id = c.create_edge(tx_id, nodes[0], nodes[2])
c.set_edge_property(tx_id, nodes[0], edge_id, 'weight', '5')
edge_id = c.create_edge(tx_id, nodes[1], nodes[3])
c.set_edge_property(tx_id, nodes[1], edge_id, 'weight', '6')
edge_id = c.create_edge(tx_id, nodes[1], nodes[4])
c.set_edge_property(tx_id, nodes[1], edge_id, 'weight', '7')
edge_id = c.create_edge(tx_id, nodes[2], nodes[4])
c.set_edge_property(tx_id, nodes[2], edge_id, 'weight', '6')
edge_id = c.create_edge(tx_id, nodes[3], nodes[2])
c.set_edge_property(tx_id, nodes[3], edge_id, 'weight', '6')
edge_id = c.create_edge(tx_id, nodes[3], nodes[5])
c.set_edge_property(tx_id, nodes[3], edge_id, 'weight', '8')
edge_id = c.create_edge(tx_id, nodes[4], nodes[5])
c.set_edge_property(tx_id, nodes[4], edge_id, 'weight', '6')
c.end_tx(tx_id)
dp = client.DijkstraParams(src_handle=nodes[0], dst_handle=nodes[5], edge_weight_name="weight", is_widest_path=False)
prog_args = [(nodes[0], dp)]
response = c.run_dijkstra_program(prog_args)
print "shortest path response was cost " + str(response.cost)
assert(response.cost == 17)
prog_args[0][1].is_widest_path = True
response = c.run_dijkstra_program(prog_args)
print "widest path response was cost " + str(response.cost)
assert(response.cost == 6)
|
{
"content_hash": "996a9b6cb81982eacef25d3da0a31c68",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 117,
"avg_line_length": 31.333333333333332,
"alnum_prop": 0.6683354192740926,
"repo_name": "sid17/weaver",
"id": "fb27dae3061f8187618f50749dc205749942e89c",
"size": "2006",
"binary": false,
"copies": "3",
"ref": "refs/heads/neural",
"path": "tests/python/correctness/dijkstra_basic_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "24691"
},
{
"name": "C++",
"bytes": "1002379"
},
{
"name": "Python",
"bytes": "155834"
},
{
"name": "Shell",
"bytes": "22112"
}
],
"symlink_target": ""
}
|
import sys, traceback
import os
import shutil
import requests
import time
import grequests
import itertools
from itertools import product
import multiprocessing as mp
from multiprocessing import Pool
import logging
import subprocess
from subprocess import check_call
import codecs
from xml.etree import ElementTree as ET
argvs = sys.argv
argc = len(argvs)
sailfish_dname = '/data/sailfish_index'
sailfish_0_9_dname = '/data/sailfish_0.9_index'
bowtie2_dname = '/data/bowtie2_index'
tophat_dname = '/data/tophat_index'
hisat2_dname = '/data/hisat2_index'
loc_dname = '/galaxy-central/tool-data'
if (argc != 4):
print 'Usage: # python %s index_list_mrna index_list_tophat2 index_list_hisat2' % argvs[0]
quit()
def read_input(args):
f = open(argvs[args])
ret_list = []
for i in f.readlines():
i = i.strip()
if len(i) < 1:
continue
ret_list = ret_list + [i]
f.close
return ret_list
def create_loc_file(index_list, loc_name, dirname):
f = codecs.open(loc_name, "w", "utf-8")
for item in index_list:
index_id = item.split(',')[0]
index_name = item.split(',')[1]
if "sailfish" in dirname:
index_dir = dirname + '/' + index_id
else:
index_dir = dirname + '/' + index_id + '/' + index_id
str_loc = '%s : %s : %s : %s' % (index_id,index_id,index_name,index_dir)
print str_loc
f.write('%s\t%s\t%s\t%s\n' % (index_id,index_id,index_name,index_dir))
f.close()
def add_tool_data_table_conf(tree, name, locname):
root_elm = tree.getroot()
add_node = ET.Element('table', name=name, comment_char='#')
snode_col = ET.Element('columns')
snode_col.text = 'value, dbkey, name, path'
snode_file = ET.Element('file', path='/galaxy-central/tool-data/' + locname)
add_node.append(snode_col)
add_node.append(snode_file)
root_elm.append(add_node)
print root_elm.getchildren()[len(root_elm)-1].attrib
print root_elm.getchildren()[len(root_elm)-1].getchildren()
tree.write('/galaxy-central/config/tool_data_table_conf.xml')
def main():
try:
input_index_list = []
input_index_list = read_input(1)
print 'length of index_list: ' + str(len(input_index_list))
print ':::::::::::::::::::::::::::::::::::::::::::'
print '>>>>>>>>>>>>>>>>> create sailfish_index.loc...'
os.chdir(loc_dname)
create_loc_file(input_index_list, "sailfish_index.loc", sailfish_dname)
print '>>>>>>>>>>>>>>>>> add sailfish index-node to tool_data_table_conf.xml...'
os.chdir('/galaxy-central/config')
tree = ET.parse('tool_data_table_conf.xml')
add_tool_data_table_conf(tree, 'sailfish_custom_indexes', 'sailfish_index.loc')
print ':::::::::::::::::::::::::::::::::::::::::::'
print '>>>>>>>>>>>>>>>>> create sailfish_0.9_index.loc...'
os.chdir(loc_dname)
create_loc_file(input_index_list, "sailfish_0.9_index.loc", sailfish_0_9_dname)
print '>>>>>>>>>>>>>>>>> add sailfish_0.9 index-node to tool_data_table_conf.xml...'
os.chdir('/galaxy-central/config')
tree = ET.parse('tool_data_table_conf.xml')
add_tool_data_table_conf(tree, 'sailfish_0.9_indexes', 'sailfish_0.9_index.loc')
print ':::::::::::::::::::::::::::::::::::::::::::'
print '>>>>>>>>>>>>>>>>> create bowtie2_indices.loc...'
os.chdir(loc_dname)
create_loc_file(input_index_list, "bowtie2_indices.loc", bowtie2_dname)
print '>>>>>>>>>>>>>>>>> add bowtie2 index-node to tool_data_table_conf.xml...'
os.chdir('/galaxy-central/config')
tree = ET.parse('tool_data_table_conf.xml')
bowtie2_node = 0
for e in tree.getiterator():
if e.get('name') == 'bowtie2_indexes':
bowtie2_node = 1
if bowtie2_node == 0:
add_tool_data_table_conf(tree, 'bowtie2_indexes', 'bowtie2_indices.loc')
else:
print 'bowtie2 index-node already created.'
input_index_list = read_input(2)
print ':::::::::::::::::::::::::::::::::::::::::::'
print '>>>>>>>>>>>>>>>>> create tophat_indices.loc...'
os.chdir(loc_dname)
create_loc_file(input_index_list, "tophat_indices.loc", tophat_dname)
print '>>>>>>>>>>>>>>>>> add tophat index-node to tool_data_table_conf.xml...'
os.chdir('/galaxy-central/config')
tree = ET.parse('tool_data_table_conf.xml')
add_tool_data_table_conf(tree, 'tophat2_indexes', 'tophat_indices.loc')
input_index_list = read_input(3)
print ':::::::::::::::::::::::::::::::::::::::::::'
print '>>>>>>>>>>>>>>>>> create hisat2_index.loc...'
os.chdir(loc_dname)
create_loc_file(input_index_list, "hisat2_index.loc", hisat2_dname)
print '>>>>>>>>>>>>>>>>> add hisat2 index-node to tool_data_table_conf.xml...'
os.chdir('/galaxy-central/config')
tree = ET.parse('tool_data_table_conf.xml')
add_tool_data_table_conf(tree, 'hisat2_indexes', 'hisat2_index.loc')
print ':::::::::::::::::::::::::::::::::::::::::::'
print '>>>>>>>>>>>>>>>>> script ended :)'
except:
info = sys.exc_info()
tbinfo = traceback.format_tb( info[2] )
print 'Error Info...'.ljust( 80, '=' )
for tbi in tbinfo:
print tbi
print ' %s' % str( info[1] )
print '\n'.rjust( 85, '=' )
sys.exit(1)
if __name__ == '__main__':
main()
|
{
"content_hash": "a11a13e4a4caccf669a3ef3cc99e05d4",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 94,
"avg_line_length": 35.955128205128204,
"alnum_prop": 0.5498306293456944,
"repo_name": "myoshimura080822/galaxy_in_docker_custom_bit_wf",
"id": "4dc4fb541ecb5aaa975c4718866da9e08ef469bc",
"size": "5633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup_scripts/setting_tools_index.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Perl",
"bytes": "14589"
},
{
"name": "Python",
"bytes": "113525"
},
{
"name": "R",
"bytes": "1097"
},
{
"name": "Shell",
"bytes": "1135"
}
],
"symlink_target": ""
}
|
__all__ = ['instagram_download']
from ..common import *
def instagram_download(url, output_dir = '.', merge = True, info_only = False):
html = get_html(url)
id = r1(r'instagram.com/p/([^/]+)/', html)
description = r1(r'<meta property="og:description" content="([^"]*)"', html)
title = description + " [" + id + "]"
url = r1(r'<meta property="og:video" content="([^"]*)"', html)
type, ext, size = url_info(url)
print_info(site_info, title, type, size)
if not info_only:
download_urls([url], title, ext, size, output_dir, merge = merge)
site_info = "Instagram.com"
download = instagram_download
download_playlist = playlist_not_supported('instagram')
|
{
"content_hash": "225c96525f7fd6adb4cbadbeb1596c03",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 80,
"avg_line_length": 35.1,
"alnum_prop": 0.6125356125356125,
"repo_name": "kzganesan/you-get",
"id": "6071dfd0c27376584cea894c94fa6b78c14dac58",
"size": "725",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/you_get/extractor/instagram.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "610"
},
{
"name": "Python",
"bytes": "188217"
}
],
"symlink_target": ""
}
|
import ctypes, random, unittest, sys
from django.contrib.gis.geos import *
from django.contrib.gis.geos.base import gdal, numpy, GEOSBase
from django.contrib.gis.geos.libgeos import GEOS_PREPARE
from django.contrib.gis.geometry.test_data import TestDataMixin
class GEOSTest(unittest.TestCase, TestDataMixin):
@property
def null_srid(self):
"""
Returns the proper null SRID depending on the GEOS version.
See the comments in `test15_srid` for more details.
"""
info = geos_version_info()
if info['version'] == '3.0.0' and info['release_candidate']:
return -1
else:
return None
def test00_base(self):
"Tests out the GEOSBase class."
# Testing out GEOSBase class, which provides a `ptr` property
# that abstracts out access to underlying C pointers.
class FakeGeom1(GEOSBase):
pass
# This one only accepts pointers to floats
c_float_p = ctypes.POINTER(ctypes.c_float)
class FakeGeom2(GEOSBase):
ptr_type = c_float_p
# Default ptr_type is `c_void_p`.
fg1 = FakeGeom1()
# Default ptr_type is C float pointer
fg2 = FakeGeom2()
# These assignments are OK -- None is allowed because
# it's equivalent to the NULL pointer.
fg1.ptr = ctypes.c_void_p()
fg1.ptr = None
fg2.ptr = c_float_p(ctypes.c_float(5.23))
fg2.ptr = None
# Because pointers have been set to NULL, an exception should be
# raised when we try to access it. Raising an exception is
# preferrable to a segmentation fault that commonly occurs when
# a C method is given a NULL memory reference.
for fg in (fg1, fg2):
# Equivalent to `fg.ptr`
self.assertRaises(GEOSException, fg._get_ptr)
# Anything that is either not None or the acceptable pointer type will
# result in a TypeError when trying to assign it to the `ptr` property.
# Thus, memmory addresses (integers) and pointers of the incorrect type
# (in `bad_ptrs`) will not be allowed.
bad_ptrs = (5, ctypes.c_char_p('foobar'))
for bad_ptr in bad_ptrs:
# Equivalent to `fg.ptr = bad_ptr`
self.assertRaises(TypeError, fg1._set_ptr, bad_ptr)
self.assertRaises(TypeError, fg2._set_ptr, bad_ptr)
def test01a_wkt(self):
"Testing WKT output."
for g in self.geometries.wkt_out:
geom = fromstr(g.wkt)
self.assertEqual(g.ewkt, geom.wkt)
def test01b_hex(self):
"Testing HEX output."
for g in self.geometries.hex_wkt:
geom = fromstr(g.wkt)
self.assertEqual(g.hex, geom.hex)
def test01b_hexewkb(self):
"Testing (HEX)EWKB output."
from binascii import a2b_hex
# For testing HEX(EWKB).
ogc_hex = '01010000000000000000000000000000000000F03F'
# `SELECT ST_AsHEXEWKB(ST_GeomFromText('POINT(0 1)', 4326));`
hexewkb_2d = '0101000020E61000000000000000000000000000000000F03F'
# `SELECT ST_AsHEXEWKB(ST_GeomFromEWKT('SRID=4326;POINT(0 1 2)'));`
hexewkb_3d = '01010000A0E61000000000000000000000000000000000F03F0000000000000040'
pnt_2d = Point(0, 1, srid=4326)
pnt_3d = Point(0, 1, 2, srid=4326)
# OGC-compliant HEX will not have SRID nor Z value.
self.assertEqual(ogc_hex, pnt_2d.hex)
self.assertEqual(ogc_hex, pnt_3d.hex)
# HEXEWKB should be appropriate for its dimension -- have to use an
# a WKBWriter w/dimension set accordingly, else GEOS will insert
# garbage into 3D coordinate if there is none. Also, GEOS has a
# a bug in versions prior to 3.1 that puts the X coordinate in
# place of Z; an exception should be raised on those versions.
self.assertEqual(hexewkb_2d, pnt_2d.hexewkb)
if GEOS_PREPARE:
self.assertEqual(hexewkb_3d, pnt_3d.hexewkb)
self.assertEqual(True, GEOSGeometry(hexewkb_3d).hasz)
else:
try:
hexewkb = pnt_3d.hexewkb
except GEOSException:
pass
else:
self.fail('Should have raised GEOSException.')
# Same for EWKB.
self.assertEqual(buffer(a2b_hex(hexewkb_2d)), pnt_2d.ewkb)
if GEOS_PREPARE:
self.assertEqual(buffer(a2b_hex(hexewkb_3d)), pnt_3d.ewkb)
else:
try:
ewkb = pnt_3d.ewkb
except GEOSException:
pass
else:
self.fail('Should have raised GEOSException')
# Redundant sanity check.
self.assertEqual(4326, GEOSGeometry(hexewkb_2d).srid)
def test01c_kml(self):
"Testing KML output."
for tg in self.geometries.wkt_out:
geom = fromstr(tg.wkt)
kml = getattr(tg, 'kml', False)
if kml: self.assertEqual(kml, geom.kml)
def test01d_errors(self):
"Testing the Error handlers."
# string-based
print "\nBEGIN - expecting GEOS_ERROR; safe to ignore.\n"
for err in self.geometries.errors:
try:
g = fromstr(err.wkt)
except (GEOSException, ValueError):
pass
# Bad WKB
self.assertRaises(GEOSException, GEOSGeometry, buffer('0'))
print "\nEND - expecting GEOS_ERROR; safe to ignore.\n"
class NotAGeometry(object):
pass
# Some other object
self.assertRaises(TypeError, GEOSGeometry, NotAGeometry())
# None
self.assertRaises(TypeError, GEOSGeometry, None)
def test01e_wkb(self):
"Testing WKB output."
from binascii import b2a_hex
for g in self.geometries.hex_wkt:
geom = fromstr(g.wkt)
wkb = geom.wkb
self.assertEqual(b2a_hex(wkb).upper(), g.hex)
def test01f_create_hex(self):
"Testing creation from HEX."
for g in self.geometries.hex_wkt:
geom_h = GEOSGeometry(g.hex)
# we need to do this so decimal places get normalised
geom_t = fromstr(g.wkt)
self.assertEqual(geom_t.wkt, geom_h.wkt)
def test01g_create_wkb(self):
"Testing creation from WKB."
from binascii import a2b_hex
for g in self.geometries.hex_wkt:
wkb = buffer(a2b_hex(g.hex))
geom_h = GEOSGeometry(wkb)
# we need to do this so decimal places get normalised
geom_t = fromstr(g.wkt)
self.assertEqual(geom_t.wkt, geom_h.wkt)
def test01h_ewkt(self):
"Testing EWKT."
srid = 32140
for p in self.geometries.polygons:
ewkt = 'SRID=%d;%s' % (srid, p.wkt)
poly = fromstr(ewkt)
self.assertEqual(srid, poly.srid)
self.assertEqual(srid, poly.shell.srid)
self.assertEqual(srid, fromstr(poly.ewkt).srid) # Checking export
def test01i_json(self):
"Testing GeoJSON input/output (via GDAL)."
if not gdal or not gdal.GEOJSON: return
for g in self.geometries.json_geoms:
geom = GEOSGeometry(g.wkt)
if not hasattr(g, 'not_equal'):
self.assertEqual(g.json, geom.json)
self.assertEqual(g.json, geom.geojson)
self.assertEqual(GEOSGeometry(g.wkt), GEOSGeometry(geom.json))
def test01k_fromfile(self):
"Testing the fromfile() factory."
from StringIO import StringIO
ref_pnt = GEOSGeometry('POINT(5 23)')
wkt_f = StringIO()
wkt_f.write(ref_pnt.wkt)
wkb_f = StringIO()
wkb_f.write(str(ref_pnt.wkb))
# Other tests use `fromfile()` on string filenames so those
# aren't tested here.
for fh in (wkt_f, wkb_f):
fh.seek(0)
pnt = fromfile(fh)
self.assertEqual(ref_pnt, pnt)
def test01k_eq(self):
"Testing equivalence."
p = fromstr('POINT(5 23)')
self.assertEqual(p, p.wkt)
self.assertNotEqual(p, 'foo')
ls = fromstr('LINESTRING(0 0, 1 1, 5 5)')
self.assertEqual(ls, ls.wkt)
self.assertNotEqual(p, 'bar')
# Error shouldn't be raise on equivalence testing with
# an invalid type.
for g in (p, ls):
self.assertNotEqual(g, None)
self.assertNotEqual(g, {'foo' : 'bar'})
self.assertNotEqual(g, False)
def test02a_points(self):
"Testing Point objects."
prev = fromstr('POINT(0 0)')
for p in self.geometries.points:
# Creating the point from the WKT
pnt = fromstr(p.wkt)
self.assertEqual(pnt.geom_type, 'Point')
self.assertEqual(pnt.geom_typeid, 0)
self.assertEqual(p.x, pnt.x)
self.assertEqual(p.y, pnt.y)
self.assertEqual(True, pnt == fromstr(p.wkt))
self.assertEqual(False, pnt == prev)
# Making sure that the point's X, Y components are what we expect
self.assertAlmostEqual(p.x, pnt.tuple[0], 9)
self.assertAlmostEqual(p.y, pnt.tuple[1], 9)
# Testing the third dimension, and getting the tuple arguments
if hasattr(p, 'z'):
self.assertEqual(True, pnt.hasz)
self.assertEqual(p.z, pnt.z)
self.assertEqual(p.z, pnt.tuple[2], 9)
tup_args = (p.x, p.y, p.z)
set_tup1 = (2.71, 3.14, 5.23)
set_tup2 = (5.23, 2.71, 3.14)
else:
self.assertEqual(False, pnt.hasz)
self.assertEqual(None, pnt.z)
tup_args = (p.x, p.y)
set_tup1 = (2.71, 3.14)
set_tup2 = (3.14, 2.71)
# Centroid operation on point should be point itself
self.assertEqual(p.centroid, pnt.centroid.tuple)
# Now testing the different constructors
pnt2 = Point(tup_args) # e.g., Point((1, 2))
pnt3 = Point(*tup_args) # e.g., Point(1, 2)
self.assertEqual(True, pnt == pnt2)
self.assertEqual(True, pnt == pnt3)
# Now testing setting the x and y
pnt.y = 3.14
pnt.x = 2.71
self.assertEqual(3.14, pnt.y)
self.assertEqual(2.71, pnt.x)
# Setting via the tuple/coords property
pnt.tuple = set_tup1
self.assertEqual(set_tup1, pnt.tuple)
pnt.coords = set_tup2
self.assertEqual(set_tup2, pnt.coords)
prev = pnt # setting the previous geometry
def test02b_multipoints(self):
"Testing MultiPoint objects."
for mp in self.geometries.multipoints:
mpnt = fromstr(mp.wkt)
self.assertEqual(mpnt.geom_type, 'MultiPoint')
self.assertEqual(mpnt.geom_typeid, 4)
self.assertAlmostEqual(mp.centroid[0], mpnt.centroid.tuple[0], 9)
self.assertAlmostEqual(mp.centroid[1], mpnt.centroid.tuple[1], 9)
self.assertRaises(GEOSIndexError, mpnt.__getitem__, len(mpnt))
self.assertEqual(mp.centroid, mpnt.centroid.tuple)
self.assertEqual(mp.coords, tuple(m.tuple for m in mpnt))
for p in mpnt:
self.assertEqual(p.geom_type, 'Point')
self.assertEqual(p.geom_typeid, 0)
self.assertEqual(p.empty, False)
self.assertEqual(p.valid, True)
def test03a_linestring(self):
"Testing LineString objects."
prev = fromstr('POINT(0 0)')
for l in self.geometries.linestrings:
ls = fromstr(l.wkt)
self.assertEqual(ls.geom_type, 'LineString')
self.assertEqual(ls.geom_typeid, 1)
self.assertEqual(ls.empty, False)
self.assertEqual(ls.ring, False)
if hasattr(l, 'centroid'):
self.assertEqual(l.centroid, ls.centroid.tuple)
if hasattr(l, 'tup'):
self.assertEqual(l.tup, ls.tuple)
self.assertEqual(True, ls == fromstr(l.wkt))
self.assertEqual(False, ls == prev)
self.assertRaises(GEOSIndexError, ls.__getitem__, len(ls))
prev = ls
# Creating a LineString from a tuple, list, and numpy array
self.assertEqual(ls, LineString(ls.tuple)) # tuple
self.assertEqual(ls, LineString(*ls.tuple)) # as individual arguments
self.assertEqual(ls, LineString([list(tup) for tup in ls.tuple])) # as list
self.assertEqual(ls.wkt, LineString(*tuple(Point(tup) for tup in ls.tuple)).wkt) # Point individual arguments
if numpy: self.assertEqual(ls, LineString(numpy.array(ls.tuple))) # as numpy array
def test03b_multilinestring(self):
"Testing MultiLineString objects."
prev = fromstr('POINT(0 0)')
for l in self.geometries.multilinestrings:
ml = fromstr(l.wkt)
self.assertEqual(ml.geom_type, 'MultiLineString')
self.assertEqual(ml.geom_typeid, 5)
self.assertAlmostEqual(l.centroid[0], ml.centroid.x, 9)
self.assertAlmostEqual(l.centroid[1], ml.centroid.y, 9)
self.assertEqual(True, ml == fromstr(l.wkt))
self.assertEqual(False, ml == prev)
prev = ml
for ls in ml:
self.assertEqual(ls.geom_type, 'LineString')
self.assertEqual(ls.geom_typeid, 1)
self.assertEqual(ls.empty, False)
self.assertRaises(GEOSIndexError, ml.__getitem__, len(ml))
self.assertEqual(ml.wkt, MultiLineString(*tuple(s.clone() for s in ml)).wkt)
self.assertEqual(ml, MultiLineString(*tuple(LineString(s.tuple) for s in ml)))
def test04_linearring(self):
"Testing LinearRing objects."
for rr in self.geometries.linearrings:
lr = fromstr(rr.wkt)
self.assertEqual(lr.geom_type, 'LinearRing')
self.assertEqual(lr.geom_typeid, 2)
self.assertEqual(rr.n_p, len(lr))
self.assertEqual(True, lr.valid)
self.assertEqual(False, lr.empty)
# Creating a LinearRing from a tuple, list, and numpy array
self.assertEqual(lr, LinearRing(lr.tuple))
self.assertEqual(lr, LinearRing(*lr.tuple))
self.assertEqual(lr, LinearRing([list(tup) for tup in lr.tuple]))
if numpy: self.assertEqual(lr, LinearRing(numpy.array(lr.tuple)))
def test05a_polygons(self):
"Testing Polygon objects."
# Testing `from_bbox` class method
bbox = (-180, -90, 180, 90)
p = Polygon.from_bbox( bbox )
self.assertEqual(bbox, p.extent)
prev = fromstr('POINT(0 0)')
for p in self.geometries.polygons:
# Creating the Polygon, testing its properties.
poly = fromstr(p.wkt)
self.assertEqual(poly.geom_type, 'Polygon')
self.assertEqual(poly.geom_typeid, 3)
self.assertEqual(poly.empty, False)
self.assertEqual(poly.ring, False)
self.assertEqual(p.n_i, poly.num_interior_rings)
self.assertEqual(p.n_i + 1, len(poly)) # Testing __len__
self.assertEqual(p.n_p, poly.num_points)
# Area & Centroid
self.assertAlmostEqual(p.area, poly.area, 9)
self.assertAlmostEqual(p.centroid[0], poly.centroid.tuple[0], 9)
self.assertAlmostEqual(p.centroid[1], poly.centroid.tuple[1], 9)
# Testing the geometry equivalence
self.assertEqual(True, poly == fromstr(p.wkt))
self.assertEqual(False, poly == prev) # Should not be equal to previous geometry
self.assertEqual(True, poly != prev)
# Testing the exterior ring
ring = poly.exterior_ring
self.assertEqual(ring.geom_type, 'LinearRing')
self.assertEqual(ring.geom_typeid, 2)
if p.ext_ring_cs:
self.assertEqual(p.ext_ring_cs, ring.tuple)
self.assertEqual(p.ext_ring_cs, poly[0].tuple) # Testing __getitem__
# Testing __getitem__ and __setitem__ on invalid indices
self.assertRaises(GEOSIndexError, poly.__getitem__, len(poly))
self.assertRaises(GEOSIndexError, poly.__setitem__, len(poly), False)
self.assertRaises(GEOSIndexError, poly.__getitem__, -1 * len(poly) - 1)
# Testing __iter__
for r in poly:
self.assertEqual(r.geom_type, 'LinearRing')
self.assertEqual(r.geom_typeid, 2)
# Testing polygon construction.
self.assertRaises(TypeError, Polygon.__init__, 0, [1, 2, 3])
self.assertRaises(TypeError, Polygon.__init__, 'foo')
# Polygon(shell, (hole1, ... holeN))
rings = tuple(r for r in poly)
self.assertEqual(poly, Polygon(rings[0], rings[1:]))
# Polygon(shell_tuple, hole_tuple1, ... , hole_tupleN)
ring_tuples = tuple(r.tuple for r in poly)
self.assertEqual(poly, Polygon(*ring_tuples))
# Constructing with tuples of LinearRings.
self.assertEqual(poly.wkt, Polygon(*tuple(r for r in poly)).wkt)
self.assertEqual(poly.wkt, Polygon(*tuple(LinearRing(r.tuple) for r in poly)).wkt)
def test05b_multipolygons(self):
"Testing MultiPolygon objects."
print "\nBEGIN - expecting GEOS_NOTICE; safe to ignore.\n"
prev = fromstr('POINT (0 0)')
for mp in self.geometries.multipolygons:
mpoly = fromstr(mp.wkt)
self.assertEqual(mpoly.geom_type, 'MultiPolygon')
self.assertEqual(mpoly.geom_typeid, 6)
self.assertEqual(mp.valid, mpoly.valid)
if mp.valid:
self.assertEqual(mp.num_geom, mpoly.num_geom)
self.assertEqual(mp.n_p, mpoly.num_coords)
self.assertEqual(mp.num_geom, len(mpoly))
self.assertRaises(GEOSIndexError, mpoly.__getitem__, len(mpoly))
for p in mpoly:
self.assertEqual(p.geom_type, 'Polygon')
self.assertEqual(p.geom_typeid, 3)
self.assertEqual(p.valid, True)
self.assertEqual(mpoly.wkt, MultiPolygon(*tuple(poly.clone() for poly in mpoly)).wkt)
print "\nEND - expecting GEOS_NOTICE; safe to ignore.\n"
def test06a_memory_hijinks(self):
"Testing Geometry __del__() on rings and polygons."
#### Memory issues with rings and polygons
# These tests are needed to ensure sanity with writable geometries.
# Getting a polygon with interior rings, and pulling out the interior rings
poly = fromstr(self.geometries.polygons[1].wkt)
ring1 = poly[0]
ring2 = poly[1]
# These deletes should be 'harmless' since they are done on child geometries
del ring1
del ring2
ring1 = poly[0]
ring2 = poly[1]
# Deleting the polygon
del poly
# Access to these rings is OK since they are clones.
s1, s2 = str(ring1), str(ring2)
def test08_coord_seq(self):
"Testing Coordinate Sequence objects."
for p in self.geometries.polygons:
if p.ext_ring_cs:
# Constructing the polygon and getting the coordinate sequence
poly = fromstr(p.wkt)
cs = poly.exterior_ring.coord_seq
self.assertEqual(p.ext_ring_cs, cs.tuple) # done in the Polygon test too.
self.assertEqual(len(p.ext_ring_cs), len(cs)) # Making sure __len__ works
# Checks __getitem__ and __setitem__
for i in xrange(len(p.ext_ring_cs)):
c1 = p.ext_ring_cs[i] # Expected value
c2 = cs[i] # Value from coordseq
self.assertEqual(c1, c2)
# Constructing the test value to set the coordinate sequence with
if len(c1) == 2: tset = (5, 23)
else: tset = (5, 23, 8)
cs[i] = tset
# Making sure every set point matches what we expect
for j in range(len(tset)):
cs[i] = tset
self.assertEqual(tset[j], cs[i][j])
def test09_relate_pattern(self):
"Testing relate() and relate_pattern()."
g = fromstr('POINT (0 0)')
self.assertRaises(GEOSException, g.relate_pattern, 0, 'invalid pattern, yo')
for rg in self.geometries.relate_geoms:
a = fromstr(rg.wkt_a)
b = fromstr(rg.wkt_b)
self.assertEqual(rg.result, a.relate_pattern(b, rg.pattern))
self.assertEqual(rg.pattern, a.relate(b))
def test10_intersection(self):
"Testing intersects() and intersection()."
for i in xrange(len(self.geometries.topology_geoms)):
a = fromstr(self.geometries.topology_geoms[i].wkt_a)
b = fromstr(self.geometries.topology_geoms[i].wkt_b)
i1 = fromstr(self.geometries.intersect_geoms[i].wkt)
self.assertEqual(True, a.intersects(b))
i2 = a.intersection(b)
self.assertEqual(i1, i2)
self.assertEqual(i1, a & b) # __and__ is intersection operator
a &= b # testing __iand__
self.assertEqual(i1, a)
def test11_union(self):
"Testing union()."
for i in xrange(len(self.geometries.topology_geoms)):
a = fromstr(self.geometries.topology_geoms[i].wkt_a)
b = fromstr(self.geometries.topology_geoms[i].wkt_b)
u1 = fromstr(self.geometries.union_geoms[i].wkt)
u2 = a.union(b)
self.assertEqual(u1, u2)
self.assertEqual(u1, a | b) # __or__ is union operator
a |= b # testing __ior__
self.assertEqual(u1, a)
def test12_difference(self):
"Testing difference()."
for i in xrange(len(self.geometries.topology_geoms)):
a = fromstr(self.geometries.topology_geoms[i].wkt_a)
b = fromstr(self.geometries.topology_geoms[i].wkt_b)
d1 = fromstr(self.geometries.diff_geoms[i].wkt)
d2 = a.difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a - b) # __sub__ is difference operator
a -= b # testing __isub__
self.assertEqual(d1, a)
def test13_symdifference(self):
"Testing sym_difference()."
for i in xrange(len(self.geometries.topology_geoms)):
a = fromstr(self.geometries.topology_geoms[i].wkt_a)
b = fromstr(self.geometries.topology_geoms[i].wkt_b)
d1 = fromstr(self.geometries.sdiff_geoms[i].wkt)
d2 = a.sym_difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator
a ^= b # testing __ixor__
self.assertEqual(d1, a)
def test14_buffer(self):
"Testing buffer()."
for bg in self.geometries.buffer_geoms:
g = fromstr(bg.wkt)
# The buffer we expect
exp_buf = fromstr(bg.buffer_wkt)
quadsegs = bg.quadsegs
width = bg.width
# Can't use a floating-point for the number of quadsegs.
self.assertRaises(ctypes.ArgumentError, g.buffer, width, float(quadsegs))
# Constructing our buffer
buf = g.buffer(width, quadsegs)
self.assertEqual(exp_buf.num_coords, buf.num_coords)
self.assertEqual(len(exp_buf), len(buf))
# Now assuring that each point in the buffer is almost equal
for j in xrange(len(exp_buf)):
exp_ring = exp_buf[j]
buf_ring = buf[j]
self.assertEqual(len(exp_ring), len(buf_ring))
for k in xrange(len(exp_ring)):
# Asserting the X, Y of each point are almost equal (due to floating point imprecision)
self.assertAlmostEqual(exp_ring[k][0], buf_ring[k][0], 9)
self.assertAlmostEqual(exp_ring[k][1], buf_ring[k][1], 9)
def test15_srid(self):
"Testing the SRID property and keyword."
# Testing SRID keyword on Point
pnt = Point(5, 23, srid=4326)
self.assertEqual(4326, pnt.srid)
pnt.srid = 3084
self.assertEqual(3084, pnt.srid)
self.assertRaises(ctypes.ArgumentError, pnt.set_srid, '4326')
# Testing SRID keyword on fromstr(), and on Polygon rings.
poly = fromstr(self.geometries.polygons[1].wkt, srid=4269)
self.assertEqual(4269, poly.srid)
for ring in poly: self.assertEqual(4269, ring.srid)
poly.srid = 4326
self.assertEqual(4326, poly.shell.srid)
# Testing SRID keyword on GeometryCollection
gc = GeometryCollection(Point(5, 23), LineString((0, 0), (1.5, 1.5), (3, 3)), srid=32021)
self.assertEqual(32021, gc.srid)
for i in range(len(gc)): self.assertEqual(32021, gc[i].srid)
# GEOS may get the SRID from HEXEWKB
# 'POINT(5 23)' at SRID=4326 in hex form -- obtained from PostGIS
# using `SELECT GeomFromText('POINT (5 23)', 4326);`.
hex = '0101000020E610000000000000000014400000000000003740'
p1 = fromstr(hex)
self.assertEqual(4326, p1.srid)
# In GEOS 3.0.0rc1-4 when the EWKB and/or HEXEWKB is exported,
# the SRID information is lost and set to -1 -- this is not a
# problem on the 3.0.0 version (another reason to upgrade).
exp_srid = self.null_srid
p2 = fromstr(p1.hex)
self.assertEqual(exp_srid, p2.srid)
p3 = fromstr(p1.hex, srid=-1) # -1 is intended.
self.assertEqual(-1, p3.srid)
def test16_mutable_geometries(self):
"Testing the mutability of Polygons and Geometry Collections."
### Testing the mutability of Polygons ###
for p in self.geometries.polygons:
poly = fromstr(p.wkt)
# Should only be able to use __setitem__ with LinearRing geometries.
self.assertRaises(TypeError, poly.__setitem__, 0, LineString((1, 1), (2, 2)))
# Constructing the new shell by adding 500 to every point in the old shell.
shell_tup = poly.shell.tuple
new_coords = []
for point in shell_tup: new_coords.append((point[0] + 500., point[1] + 500.))
new_shell = LinearRing(*tuple(new_coords))
# Assigning polygon's exterior ring w/the new shell
poly.exterior_ring = new_shell
s = str(new_shell) # new shell is still accessible
self.assertEqual(poly.exterior_ring, new_shell)
self.assertEqual(poly[0], new_shell)
### Testing the mutability of Geometry Collections
for tg in self.geometries.multipoints:
mp = fromstr(tg.wkt)
for i in range(len(mp)):
# Creating a random point.
pnt = mp[i]
new = Point(random.randint(1, 100), random.randint(1, 100))
# Testing the assignment
mp[i] = new
s = str(new) # what was used for the assignment is still accessible
self.assertEqual(mp[i], new)
self.assertEqual(mp[i].wkt, new.wkt)
self.assertNotEqual(pnt, mp[i])
# MultiPolygons involve much more memory management because each
# Polygon w/in the collection has its own rings.
for tg in self.geometries.multipolygons:
mpoly = fromstr(tg.wkt)
for i in xrange(len(mpoly)):
poly = mpoly[i]
old_poly = mpoly[i]
# Offsetting the each ring in the polygon by 500.
for j in xrange(len(poly)):
r = poly[j]
for k in xrange(len(r)): r[k] = (r[k][0] + 500., r[k][1] + 500.)
poly[j] = r
self.assertNotEqual(mpoly[i], poly)
# Testing the assignment
mpoly[i] = poly
s = str(poly) # Still accessible
self.assertEqual(mpoly[i], poly)
self.assertNotEqual(mpoly[i], old_poly)
# Extreme (!!) __setitem__ -- no longer works, have to detect
# in the first object that __setitem__ is called in the subsequent
# objects -- maybe mpoly[0, 0, 0] = (3.14, 2.71)?
#mpoly[0][0][0] = (3.14, 2.71)
#self.assertEqual((3.14, 2.71), mpoly[0][0][0])
# Doing it more slowly..
#self.assertEqual((3.14, 2.71), mpoly[0].shell[0])
#del mpoly
def test17_threed(self):
"Testing three-dimensional geometries."
# Testing a 3D Point
pnt = Point(2, 3, 8)
self.assertEqual((2.,3.,8.), pnt.coords)
self.assertRaises(TypeError, pnt.set_coords, (1.,2.))
pnt.coords = (1.,2.,3.)
self.assertEqual((1.,2.,3.), pnt.coords)
# Testing a 3D LineString
ls = LineString((2., 3., 8.), (50., 250., -117.))
self.assertEqual(((2.,3.,8.), (50.,250.,-117.)), ls.tuple)
self.assertRaises(TypeError, ls.__setitem__, 0, (1.,2.))
ls[0] = (1.,2.,3.)
self.assertEqual((1.,2.,3.), ls[0])
def test18_distance(self):
"Testing the distance() function."
# Distance to self should be 0.
pnt = Point(0, 0)
self.assertEqual(0.0, pnt.distance(Point(0, 0)))
# Distance should be 1
self.assertEqual(1.0, pnt.distance(Point(0, 1)))
# Distance should be ~ sqrt(2)
self.assertAlmostEqual(1.41421356237, pnt.distance(Point(1, 1)), 11)
# Distances are from the closest vertex in each geometry --
# should be 3 (distance from (2, 2) to (5, 2)).
ls1 = LineString((0, 0), (1, 1), (2, 2))
ls2 = LineString((5, 2), (6, 1), (7, 0))
self.assertEqual(3, ls1.distance(ls2))
def test19_length(self):
"Testing the length property."
# Points have 0 length.
pnt = Point(0, 0)
self.assertEqual(0.0, pnt.length)
# Should be ~ sqrt(2)
ls = LineString((0, 0), (1, 1))
self.assertAlmostEqual(1.41421356237, ls.length, 11)
# Should be circumfrence of Polygon
poly = Polygon(LinearRing((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)))
self.assertEqual(4.0, poly.length)
# Should be sum of each element's length in collection.
mpoly = MultiPolygon(poly.clone(), poly)
self.assertEqual(8.0, mpoly.length)
def test20a_emptyCollections(self):
"Testing empty geometries and collections."
gc1 = GeometryCollection([])
gc2 = fromstr('GEOMETRYCOLLECTION EMPTY')
pnt = fromstr('POINT EMPTY')
ls = fromstr('LINESTRING EMPTY')
poly = fromstr('POLYGON EMPTY')
mls = fromstr('MULTILINESTRING EMPTY')
mpoly1 = fromstr('MULTIPOLYGON EMPTY')
mpoly2 = MultiPolygon(())
for g in [gc1, gc2, pnt, ls, poly, mls, mpoly1, mpoly2]:
self.assertEqual(True, g.empty)
# Testing len() and num_geom.
if isinstance(g, Polygon):
self.assertEqual(1, len(g)) # Has one empty linear ring
self.assertEqual(1, g.num_geom)
self.assertEqual(0, len(g[0]))
elif isinstance(g, (Point, LineString)):
self.assertEqual(1, g.num_geom)
self.assertEqual(0, len(g))
else:
self.assertEqual(0, g.num_geom)
self.assertEqual(0, len(g))
# Testing __getitem__ (doesn't work on Point or Polygon)
if isinstance(g, Point):
self.assertRaises(GEOSIndexError, g.get_x)
elif isinstance(g, Polygon):
lr = g.shell
self.assertEqual('LINEARRING EMPTY', lr.wkt)
self.assertEqual(0, len(lr))
self.assertEqual(True, lr.empty)
self.assertRaises(GEOSIndexError, lr.__getitem__, 0)
else:
self.assertRaises(GEOSIndexError, g.__getitem__, 0)
def test20b_collections_of_collections(self):
"Testing GeometryCollection handling of other collections."
# Creating a GeometryCollection WKT string composed of other
# collections and polygons.
coll = [mp.wkt for mp in self.geometries.multipolygons if mp.valid]
coll.extend([mls.wkt for mls in self.geometries.multilinestrings])
coll.extend([p.wkt for p in self.geometries.polygons])
coll.extend([mp.wkt for mp in self.geometries.multipoints])
gc_wkt = 'GEOMETRYCOLLECTION(%s)' % ','.join(coll)
# Should construct ok from WKT
gc1 = GEOSGeometry(gc_wkt)
# Should also construct ok from individual geometry arguments.
gc2 = GeometryCollection(*tuple(g for g in gc1))
# And, they should be equal.
self.assertEqual(gc1, gc2)
def test21_test_gdal(self):
"Testing `ogr` and `srs` properties."
if not gdal.HAS_GDAL: return
g1 = fromstr('POINT(5 23)')
self.assertEqual(True, isinstance(g1.ogr, gdal.OGRGeometry))
self.assertEqual(g1.srs, None)
g2 = fromstr('LINESTRING(0 0, 5 5, 23 23)', srid=4326)
self.assertEqual(True, isinstance(g2.ogr, gdal.OGRGeometry))
self.assertEqual(True, isinstance(g2.srs, gdal.SpatialReference))
self.assertEqual(g2.hex, g2.ogr.hex)
self.assertEqual('WGS 84', g2.srs.name)
def test22_copy(self):
"Testing use with the Python `copy` module."
import copy
poly = GEOSGeometry('POLYGON((0 0, 0 23, 23 23, 23 0, 0 0), (5 5, 5 10, 10 10, 10 5, 5 5))')
cpy1 = copy.copy(poly)
cpy2 = copy.deepcopy(poly)
self.assertNotEqual(poly._ptr, cpy1._ptr)
self.assertNotEqual(poly._ptr, cpy2._ptr)
def test23_transform(self):
"Testing `transform` method."
if not gdal.HAS_GDAL: return
orig = GEOSGeometry('POINT (-104.609 38.255)', 4326)
trans = GEOSGeometry('POINT (992385.4472045 481455.4944650)', 2774)
# Using a srid, a SpatialReference object, and a CoordTransform object
# for transformations.
t1, t2, t3 = orig.clone(), orig.clone(), orig.clone()
t1.transform(trans.srid)
t2.transform(gdal.SpatialReference('EPSG:2774'))
ct = gdal.CoordTransform(gdal.SpatialReference('WGS84'), gdal.SpatialReference(2774))
t3.transform(ct)
# Testing use of the `clone` keyword.
k1 = orig.clone()
k2 = k1.transform(trans.srid, clone=True)
self.assertEqual(k1, orig)
self.assertNotEqual(k1, k2)
prec = 3
for p in (t1, t2, t3, k2):
self.assertAlmostEqual(trans.x, p.x, prec)
self.assertAlmostEqual(trans.y, p.y, prec)
def test23_transform_noop(self):
""" Testing `transform` method (SRID match) """
# transform() should no-op if source & dest SRIDs match,
# regardless of whether GDAL is available.
if gdal.HAS_GDAL:
g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
gt = g.tuple
g.transform(4326)
self.assertEqual(g.tuple, gt)
self.assertEqual(g.srid, 4326)
g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
g1 = g.transform(4326, clone=True)
self.assertEqual(g1.tuple, g.tuple)
self.assertEqual(g1.srid, 4326)
self.assertTrue(g1 is not g, "Clone didn't happen")
old_has_gdal = gdal.HAS_GDAL
try:
gdal.HAS_GDAL = False
g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
gt = g.tuple
g.transform(4326)
self.assertEqual(g.tuple, gt)
self.assertEqual(g.srid, 4326)
g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
g1 = g.transform(4326, clone=True)
self.assertEqual(g1.tuple, g.tuple)
self.assertEqual(g1.srid, 4326)
self.assertTrue(g1 is not g, "Clone didn't happen")
finally:
gdal.HAS_GDAL = old_has_gdal
def test23_transform_nosrid(self):
""" Testing `transform` method (no SRID) """
# raise a warning if SRID <0/None
import warnings
print "\nBEGIN - expecting Warnings; safe to ignore.\n"
# test for do-nothing behaviour.
try:
# Keeping line-noise down by only printing the relevant
# warnings once.
warnings.simplefilter('once', UserWarning)
warnings.simplefilter('once', FutureWarning)
g = GEOSGeometry('POINT (-104.609 38.255)', srid=None)
g.transform(2774)
self.assertEqual(g.tuple, (-104.609, 38.255))
self.assertEqual(g.srid, None)
g = GEOSGeometry('POINT (-104.609 38.255)', srid=None)
g1 = g.transform(2774, clone=True)
self.assertTrue(g1 is None)
g = GEOSGeometry('POINT (-104.609 38.255)', srid=-1)
g.transform(2774)
self.assertEqual(g.tuple, (-104.609, 38.255))
self.assertEqual(g.srid, -1)
g = GEOSGeometry('POINT (-104.609 38.255)', srid=-1)
g1 = g.transform(2774, clone=True)
self.assertTrue(g1 is None)
finally:
warnings.simplefilter('default', UserWarning)
warnings.simplefilter('default', FutureWarning)
print "\nEND - expecting Warnings; safe to ignore.\n"
# test warning is raised
try:
warnings.simplefilter('error', FutureWarning)
warnings.simplefilter('ignore', UserWarning)
g = GEOSGeometry('POINT (-104.609 38.255)', srid=None)
self.assertRaises(FutureWarning, g.transform, 2774)
g = GEOSGeometry('POINT (-104.609 38.255)', srid=None)
self.assertRaises(FutureWarning, g.transform, 2774, clone=True)
g = GEOSGeometry('POINT (-104.609 38.255)', srid=-1)
self.assertRaises(FutureWarning, g.transform, 2774)
g = GEOSGeometry('POINT (-104.609 38.255)', srid=-1)
self.assertRaises(FutureWarning, g.transform, 2774, clone=True)
finally:
warnings.simplefilter('default', FutureWarning)
warnings.simplefilter('default', UserWarning)
def test23_transform_nogdal(self):
""" Testing `transform` method (GDAL not available) """
old_has_gdal = gdal.HAS_GDAL
try:
gdal.HAS_GDAL = False
g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
self.assertRaises(GEOSException, g.transform, 2774)
g = GEOSGeometry('POINT (-104.609 38.255)', 4326)
self.assertRaises(GEOSException, g.transform, 2774, clone=True)
finally:
gdal.HAS_GDAL = old_has_gdal
def test24_extent(self):
"Testing `extent` method."
# The xmin, ymin, xmax, ymax of the MultiPoint should be returned.
mp = MultiPoint(Point(5, 23), Point(0, 0), Point(10, 50))
self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent)
pnt = Point(5.23, 17.8)
# Extent of points is just the point itself repeated.
self.assertEqual((5.23, 17.8, 5.23, 17.8), pnt.extent)
# Testing on the 'real world' Polygon.
poly = fromstr(self.geometries.polygons[3].wkt)
ring = poly.shell
x, y = ring.x, ring.y
xmin, ymin = min(x), min(y)
xmax, ymax = max(x), max(y)
self.assertEqual((xmin, ymin, xmax, ymax), poly.extent)
def test25_pickle(self):
"Testing pickling and unpickling support."
# Using both pickle and cPickle -- just 'cause.
import pickle, cPickle
# Creating a list of test geometries for pickling,
# and setting the SRID on some of them.
def get_geoms(lst, srid=None):
return [GEOSGeometry(tg.wkt, srid) for tg in lst]
tgeoms = get_geoms(self.geometries.points)
tgeoms.extend(get_geoms(self.geometries.multilinestrings, 4326))
tgeoms.extend(get_geoms(self.geometries.polygons, 3084))
tgeoms.extend(get_geoms(self.geometries.multipolygons, 900913))
# The SRID won't be exported in GEOS 3.0 release candidates.
no_srid = self.null_srid == -1
for geom in tgeoms:
s1, s2 = cPickle.dumps(geom), pickle.dumps(geom)
g1, g2 = cPickle.loads(s1), pickle.loads(s2)
for tmpg in (g1, g2):
self.assertEqual(geom, tmpg)
if not no_srid: self.assertEqual(geom.srid, tmpg.srid)
def test26_prepared(self):
"Testing PreparedGeometry support."
if not GEOS_PREPARE: return
# Creating a simple multipolygon and getting a prepared version.
mpoly = GEOSGeometry('MULTIPOLYGON(((0 0,0 5,5 5,5 0,0 0)),((5 5,5 10,10 10,10 5,5 5)))')
prep = mpoly.prepared
# A set of test points.
pnts = [Point(5, 5), Point(7.5, 7.5), Point(2.5, 7.5)]
covers = [True, True, False] # No `covers` op for regular GEOS geoms.
for pnt, c in zip(pnts, covers):
# Results should be the same (but faster)
self.assertEqual(mpoly.contains(pnt), prep.contains(pnt))
self.assertEqual(mpoly.intersects(pnt), prep.intersects(pnt))
self.assertEqual(c, prep.covers(pnt))
def test26_line_merge(self):
"Testing line merge support"
ref_geoms = (fromstr('LINESTRING(1 1, 1 1, 3 3)'),
fromstr('MULTILINESTRING((1 1, 3 3), (3 3, 4 2))'),
)
ref_merged = (fromstr('LINESTRING(1 1, 3 3)'),
fromstr('LINESTRING (1 1, 3 3, 4 2)'),
)
for geom, merged in zip(ref_geoms, ref_merged):
self.assertEqual(merged, geom.merged)
def test27_valid_reason(self):
"Testing IsValidReason support"
# Skipping tests if GEOS < v3.1.
if not GEOS_PREPARE: return
g = GEOSGeometry("POINT(0 0)")
self.assertTrue(g.valid)
self.assertTrue(isinstance(g.valid_reason, basestring))
self.assertEqual(g.valid_reason, "Valid Geometry")
print "\nBEGIN - expecting GEOS_NOTICE; safe to ignore.\n"
g = GEOSGeometry("LINESTRING(0 0, 0 0)")
self.assertTrue(not g.valid)
self.assertTrue(isinstance(g.valid_reason, basestring))
self.assertTrue(g.valid_reason.startswith("Too few points in geometry component"))
print "\nEND - expecting GEOS_NOTICE; safe to ignore.\n"
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(GEOSTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
{
"content_hash": "945f6ebb241485dd6d7ded53381e5a67",
"timestamp": "",
"source": "github",
"line_count": 1055,
"max_line_length": 121,
"avg_line_length": 40.913744075829385,
"alnum_prop": 0.5779353164674266,
"repo_name": "skevy/django",
"id": "4620a9fb528913d2fd90d2562eebb0964c0cdeb3",
"size": "43164",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "django/contrib/gis/geos/tests/test_geos.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "98723"
},
{
"name": "Python",
"bytes": "6925512"
},
{
"name": "Shell",
"bytes": "4009"
}
],
"symlink_target": ""
}
|
class EtcdTime(object):
"""
An object holding a time value
"""
def __init__(self, hour, minute, second):
self.hour = hour
self.minute = minute
self.second = second
|
{
"content_hash": "b984831da2aafbef116e5eff9362ade7",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 45,
"avg_line_length": 25.5,
"alnum_prop": 0.5637254901960784,
"repo_name": "box/etcdb",
"id": "450ef71234aff4bd8d1e78fd594ad8888ebb3acf",
"size": "204",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "etcdb/etcdtime.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "2961"
},
{
"name": "Pascal",
"bytes": "43"
},
{
"name": "Puppet",
"bytes": "2986"
},
{
"name": "Python",
"bytes": "226169"
},
{
"name": "Ruby",
"bytes": "1610"
},
{
"name": "Shell",
"bytes": "750"
}
],
"symlink_target": ""
}
|
import mock
from neutron.agent.common import config as agent_config
from neutron.agent.l3 import router_info
from neutron.agent.linux import ip_lib
from neutron.common import constants as l3_constants
from neutron.common import exceptions as n_exc
from neutron.openstack.common import uuidutils
from neutron.tests import base
_uuid = uuidutils.generate_uuid
class TestRouterInfo(base.BaseTestCase):
def setUp(self):
super(TestRouterInfo, self).setUp()
conf = agent_config.setup_conf()
conf.use_namespaces = True
self.ip_cls_p = mock.patch('neutron.agent.linux.ip_lib.IPWrapper')
ip_cls = self.ip_cls_p.start()
self.mock_ip = mock.MagicMock()
ip_cls.return_value = self.mock_ip
self.ri_kwargs = {'agent_conf': conf,
'interface_driver': mock.sentinel.interface_driver}
def _check_agent_method_called(self, calls):
self.mock_ip.netns.execute.assert_has_calls(
[mock.call(call, check_exit_code=False) for call in calls],
any_order=True)
def test_routing_table_update(self):
ri = router_info.RouterInfo(_uuid(), {}, **self.ri_kwargs)
ri.router = {}
fake_route1 = {'destination': '135.207.0.0/16',
'nexthop': '1.2.3.4'}
fake_route2 = {'destination': '135.207.111.111/32',
'nexthop': '1.2.3.4'}
ri._update_routing_table('replace', fake_route1)
expected = [['ip', 'route', 'replace', 'to', '135.207.0.0/16',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri._update_routing_table('delete', fake_route1)
expected = [['ip', 'route', 'delete', 'to', '135.207.0.0/16',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri._update_routing_table('replace', fake_route2)
expected = [['ip', 'route', 'replace', 'to', '135.207.111.111/32',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri._update_routing_table('delete', fake_route2)
expected = [['ip', 'route', 'delete', 'to', '135.207.111.111/32',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
def test_routes_updated(self):
ri = router_info.RouterInfo(_uuid(), {}, **self.ri_kwargs)
ri.router = {}
fake_old_routes = []
fake_new_routes = [{'destination': "110.100.31.0/24",
'nexthop': "10.100.10.30"},
{'destination': "110.100.30.0/24",
'nexthop': "10.100.10.30"}]
ri.routes = fake_old_routes
ri.router['routes'] = fake_new_routes
ri.routes_updated()
expected = [['ip', 'route', 'replace', 'to', '110.100.30.0/24',
'via', '10.100.10.30'],
['ip', 'route', 'replace', 'to', '110.100.31.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
fake_new_routes = [{'destination': "110.100.30.0/24",
'nexthop': "10.100.10.30"}]
ri.router['routes'] = fake_new_routes
ri.routes_updated()
expected = [['ip', 'route', 'delete', 'to', '110.100.31.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
fake_new_routes = []
ri.router['routes'] = fake_new_routes
ri.routes_updated()
expected = [['ip', 'route', 'delete', 'to', '110.100.30.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
class BasicRouterTestCaseFramework(base.BaseTestCase):
def _create_router(self, router=None, **kwargs):
if not router:
router = mock.MagicMock()
self.agent_conf = mock.Mock()
# NOTE The use_namespaces config will soon be deprecated
self.agent_conf.use_namespaces = True
self.router_id = _uuid()
return router_info.RouterInfo(self.router_id,
router,
self.agent_conf,
mock.sentinel.interface_driver,
**kwargs)
class TestBasicRouterOperations(BasicRouterTestCaseFramework):
def test_get_floating_ips(self):
router = mock.MagicMock()
router.get.return_value = [mock.sentinel.floating_ip]
ri = self._create_router(router)
fips = ri.get_floating_ips()
self.assertEqual([mock.sentinel.floating_ip], fips)
def test_process_floating_ip_nat_rules(self):
ri = self._create_router()
fips = [{'fixed_ip_address': mock.sentinel.ip,
'floating_ip_address': mock.sentinel.fip}]
ri.get_floating_ips = mock.Mock(return_value=fips)
ri.iptables_manager = mock.MagicMock()
ipv4_nat = ri.iptables_manager.ipv4['nat']
ri.floating_forward_rules = mock.Mock(
return_value=[(mock.sentinel.chain, mock.sentinel.rule)])
ri.process_floating_ip_nat_rules()
# Be sure that the rules are cleared first and apply is called last
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_nat.mock_calls[0])
self.assertEqual(mock.call.apply(), ri.iptables_manager.mock_calls[-1])
# Be sure that add_rule is called somewhere in the middle
ipv4_nat.add_rule.assert_called_once_with(mock.sentinel.chain,
mock.sentinel.rule,
tag='floating_ip')
def test_process_floating_ip_nat_rules_removed(self):
ri = self._create_router()
ri.get_floating_ips = mock.Mock(return_value=[])
ri.iptables_manager = mock.MagicMock()
ipv4_nat = ri.iptables_manager.ipv4['nat']
ri.process_floating_ip_nat_rules()
# Be sure that the rules are cleared first and apply is called last
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_nat.mock_calls[0])
self.assertEqual(mock.call.apply(), ri.iptables_manager.mock_calls[-1])
# Be sure that add_rule is called somewhere in the middle
self.assertFalse(ipv4_nat.add_rule.called)
def _test_add_fip_addr_to_device_error(self, device):
ri = self._create_router()
ip = '15.1.2.3'
result = ri._add_fip_addr_to_device(
{'id': mock.sentinel.id, 'floating_ip_address': ip}, device)
device.addr.add.assert_called_with(ip + '/32')
return result
def test__add_fip_addr_to_device(self):
result = self._test_add_fip_addr_to_device_error(mock.Mock())
self.assertTrue(result)
def test__add_fip_addr_to_device_error(self):
device = mock.Mock()
device.addr.add.side_effect = RuntimeError
result = self._test_add_fip_addr_to_device_error(device)
self.assertFalse(result)
def test_process_snat_dnat_for_fip(self):
ri = self._create_router()
ri.process_floating_ip_nat_rules = mock.Mock(side_effect=Exception)
self.assertRaises(n_exc.FloatingIpSetupException,
ri.process_snat_dnat_for_fip)
ri.process_floating_ip_nat_rules.assert_called_once_with()
def test_put_fips_in_error_state(self):
ri = self._create_router()
ri.router = mock.Mock()
ri.router.get.return_value = [{'id': mock.sentinel.id1},
{'id': mock.sentinel.id2}]
statuses = ri.put_fips_in_error_state()
expected = [{mock.sentinel.id1: l3_constants.FLOATINGIP_STATUS_ERROR,
mock.sentinel.id2: l3_constants.FLOATINGIP_STATUS_ERROR}]
self.assertNotEqual(expected, statuses)
def test_configure_fip_addresses(self):
ri = self._create_router()
ri.process_floating_ip_addresses = mock.Mock(
side_effect=Exception)
self.assertRaises(n_exc.FloatingIpSetupException,
ri.configure_fip_addresses,
mock.sentinel.interface_name)
ri.process_floating_ip_addresses.assert_called_once_with(
mock.sentinel.interface_name)
def test_get_router_cidrs_returns_cidrs(self):
ri = self._create_router()
addresses = ['15.1.2.2/24', '15.1.2.3/32']
device = mock.MagicMock()
device.addr.list.return_value = [{'cidr': addresses[0]},
{'cidr': addresses[1]}]
self.assertEqual(set(addresses), ri.get_router_cidrs(device))
@mock.patch.object(ip_lib, 'IPDevice')
class TestFloatingIpWithMockDevice(BasicRouterTestCaseFramework):
def test_process_floating_ip_addresses_remap(self, IPDevice):
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2'
}
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = [{'cidr': '15.1.2.3/32'}]
ri = self._create_router()
ri.get_floating_ips = mock.Mock(return_value=[fip])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({fip_id: l3_constants.FLOATINGIP_STATUS_ACTIVE},
fip_statuses)
self.assertFalse(device.addr.add.called)
self.assertFalse(device.addr.delete.called)
def test_process_router_with_disabled_floating_ip(self, IPDevice):
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2'
}
ri = self._create_router()
ri.floating_ips = [fip]
ri.get_floating_ips = mock.Mock(return_value=[])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertIsNone(fip_statuses.get(fip_id))
def test_process_router_floating_ip_with_device_add_error(self, IPDevice):
IPDevice.return_value = device = mock.Mock(side_effect=RuntimeError)
device.addr.list.return_value = []
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2',
'status': 'DOWN'
}
ri = self._create_router()
ri.add_floating_ip = mock.Mock(
return_value=l3_constants.FLOATINGIP_STATUS_ERROR)
ri.get_floating_ips = mock.Mock(return_value=[fip])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({fip_id: l3_constants.FLOATINGIP_STATUS_ERROR},
fip_statuses)
# TODO(mrsmith): refactor for DVR cases
def test_process_floating_ip_addresses_remove(self, IPDevice):
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = [{'cidr': '15.1.2.3/32'}]
ri = self._create_router()
ri.remove_floating_ip = mock.Mock()
ri.router.get = mock.Mock(return_value=[])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({}, fip_statuses)
ri.remove_floating_ip.assert_called_once_with(device, '15.1.2.3/32')
|
{
"content_hash": "b4ae3301ef8b1163d81772e1675b64ce",
"timestamp": "",
"source": "github",
"line_count": 300,
"max_line_length": 79,
"avg_line_length": 38.74666666666667,
"alnum_prop": 0.573554714384033,
"repo_name": "JioCloud/neutron",
"id": "5e60aa12c8fe54d9869079c7e699393d338eee95",
"size": "12197",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "neutron/tests/unit/agent/l3/test_router_info.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "980"
},
{
"name": "Python",
"bytes": "7396915"
},
{
"name": "Shell",
"bytes": "12807"
}
],
"symlink_target": ""
}
|
import re, string, sys
with open("../stop_words.txt") as f:
stops = set(f.read().split(",") + list(string.ascii_lowercase))
# The "database"
data = {}
# Internal functions of the "server"-side application
def error_state():
return "Something wrong", ["get", "default", None]
# The "server"-side application handlers
def quit_handler(args):
sys.exit("Goodbye cruel world...")
def default_get_handler(args):
rep = "What would you like to do?"
rep += "\n1 - Quit" + "\n2 - Upload file"
links = {"1" : ["post", "execution", None], "2" : ["get", "file_form", None]}
return rep, links
def upload_get_handler(args):
return "Name of file to upload?", ["post", "file"]
def upload_post_handler(args):
def create_data(filename):
if filename in data:
return
word_freqs = {}
print "Trying to open " + filename
with open(filename) as f:
for w in [x.lower() for x in re.split("[^a-zA-Z]+", f.read()) if len(x) > 0 and x.lower() not in stops]:
word_freqs[w] = word_freqs.get(w, 0) + 1
word_freqsl = word_freqs.items()
word_freqsl.sort(lambda x, y: cmp(y[1], x[1]))
data[filename] = word_freqsl
if args == None:
return error_state()
filename = args[0]
try:
create_data(filename)
except:
return error_state()
return word_get_handler([filename, 0])
def word_get_handler(args):
def get_word(filename, word_index):
if word_index < len(data[filename]):
return data[filename][word_index]
else:
return ("no more words", 0)
filename = args[0]; word_index = args[1]
word_info = get_word(filename, word_index)
rep = '\n#{0}: {1} - {2}'.format(word_index+1, word_info[0], word_info[1])
rep += "\n\nWhat would you like to do next?"
rep += "\n1 - Quit" + "\n2 - Upload file"
rep += "\n3 - See next most-frequently occurring word"
links = {"1" : ["post", "execution", None],
"2" : ["get", "file_form", None],
"3" : ["get", "word", [filename, word_index+1]]}
return rep, links
# Handler registration
handlers = {"post_execution" : quit_handler,
"get_default" : default_get_handler,
"get_file_form" : upload_get_handler,
"post_file" : upload_post_handler,
"get_word" : word_get_handler }
# The "server" core
def handle_request(verb, uri, args):
def handler_key(verb, uri):
return verb + "_" + uri
if handler_key(verb, uri) in handlers:
return handlers[handler_key(verb, uri)](args)
else:
return handlers[handler_key("get", "default")](args)
# A very simple client "browser"
def render_and_get_input(state_representation, links):
print state_representation
sys.stdout.flush()
if type(links) is dict: # many possible next states
input = sys.stdin.readline().strip()
if input in links:
return links[input]
else:
return ["get", "default", None]
elif type(links) is list: # only one possible next state
if links[0] == "post": # get "form" data
input = sys.stdin.readline().strip()
links.append([input]) # add the data at the end
return links
else: # get action, don't get user input
return links
else:
return ["get", "default", None]
request = ["get", "default", None]
while True:
# "server"-side computation
state_representation, links = handle_request(*request)
# "client"-side computation
request = render_and_get_input(state_representation, links)
|
{
"content_hash": "8cfc9d831c4781801b774c8145aa22ab",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 116,
"avg_line_length": 33.74311926605505,
"alnum_prop": 0.5815660685154975,
"repo_name": "potherca-contrib/exercises-in-programming-style",
"id": "e8554bef807ae1f0e91f4b10c14a9f66c0e81706",
"size": "3701",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "33-restful/tf-33.py",
"mode": "33261",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
"""
Test that variables of floating point types are displayed correctly.
"""
from __future__ import print_function
import AbstractBase
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class DoubleTypesTestCase(AbstractBase.GenericTester):
mydir = AbstractBase.GenericTester.compute_mydir(__file__)
def test_double_type(self):
"""Test that double-type variables are displayed correctly."""
self.build_and_run('double.cpp', set(['double']))
@skipUnlessDarwin
def test_double_type_from_block(self):
"""Test that double-type variables are displayed correctly from a block."""
self.build_and_run('double.cpp', set(['double']), bc=True)
|
{
"content_hash": "57787db0c326e481f8a9eb49ede85edf",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 83,
"avg_line_length": 28.333333333333332,
"alnum_prop": 0.7189542483660131,
"repo_name": "apple/swift-lldb",
"id": "e9704c6efb2651190ad5b5538e96fd87776b6439",
"size": "765",
"binary": false,
"copies": "5",
"ref": "refs/heads/stable",
"path": "packages/Python/lldbsuite/test/types/TestDoubleTypes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "130449"
},
{
"name": "C",
"bytes": "198536"
},
{
"name": "C++",
"bytes": "27687071"
},
{
"name": "CMake",
"bytes": "172176"
},
{
"name": "DTrace",
"bytes": "334"
},
{
"name": "LLVM",
"bytes": "6106"
},
{
"name": "Makefile",
"bytes": "106804"
},
{
"name": "Objective-C",
"bytes": "106821"
},
{
"name": "Objective-C++",
"bytes": "25658"
},
{
"name": "Perl",
"bytes": "72175"
},
{
"name": "Python",
"bytes": "4680483"
},
{
"name": "Shell",
"bytes": "6573"
},
{
"name": "Swift",
"bytes": "260786"
},
{
"name": "Vim script",
"bytes": "8434"
}
],
"symlink_target": ""
}
|
"""Tests for training.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import json
import os
import random
import shutil
import tempfile
import time
import numpy as np
from tensorflow.python.estimator import estimator as estimator_lib
from tensorflow.python.estimator import exporter as exporter_lib
from tensorflow.python.estimator import run_config as run_config_lib
from tensorflow.python.estimator import training
from tensorflow.python.estimator.canned import dnn
from tensorflow.python.estimator.canned import prediction_keys
from tensorflow.python.estimator.export import export as export_lib
from tensorflow.python.estimator.inputs import numpy_io
from tensorflow.python.feature_column import feature_column
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary_iterator
from tensorflow.python.summary.writer import writer_cache
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import monitored_session
from tensorflow.python.training import server_lib
from tensorflow.python.training import session_run_hook
from tensorflow.python.util import compat
_DEFAULT_EVAL_STEPS = 100
_DEFAULT_EVAL_DELAY_SECS = 120
_DEFAULT_EVAL_THROTTLE_SECS = 600
_DELAY_SECS_PER_WORKER = 5
_GLOBAL_STEP_KEY = ops.GraphKeys.GLOBAL_STEP
_INVALID_INPUT_FN_MSG = '`input_fn` must be callable'
_INVALID_HOOK_MSG = 'All hooks must be `SessionRunHook` instances'
_INVALID_MAX_STEPS_MSG = 'Must specify max_steps > 0'
_INVALID_STEPS_MSG = 'Must specify steps > 0'
_INVALID_NAME_MSG = '`name` must be string'
_INVALID_EVAL_DELAY_SECS_MSG = 'Must specify start_delay_secs >= 0'
_INVALID_EVAL_THROTTLE_SECS_MSG = 'Must specify throttle_secs >= 0'
_INVALID_ESTIMATOR_MSG = '`estimator` must have type `tf.estimator.Estimator`'
_STALE_CHECKPOINT_MSG = 'There was no new checkpoint after the training.'
_INVALID_EXPORTER_MSG = '`exporters` must be an Exporter'
_INVALID_EXPORTER_NAME_TYPE_MSG = 'An Exporter must have a string name'
_DUPLICATE_EXPORTER_NAMES_MSG = '`exporters` must have unique names.'
_NONE_EXPORTER_NAME_MSG = (
'An Exporter cannot have a name that is `None` or empty.')
_INVALID_TRAIN_SPEC_MSG = '`train_spec` must have type `tf.estimator.TrainSpec`'
_INVALID_EVAL_SPEC_MSG = '`eval_spec` must have type `tf.estimator.EvalSpec`'
_INVALID_CONFIG_FOR_STD_SERVER_MSG = 'Could not start server; .*TF_CONFIG'
_INVALID_LOCAL_TASK_WITH_CLUSTER = '`task.type` in TF_CONFIG cannot be `local`'
_INVALID_TASK_TYPE = '`estimator.config` must have task_type set.'
# The message should NOT have 'local' word as part of it. As (?!word) is looking
# ahead, so, the $ (ending) check is required; otherwise, it will match
# partially and return successuful.
_INVALID_TASK_TO_RUN = (
'Task type .* is not supported. Supported task types are ((?!local).)*$')
_INVALID_EMPTY_EVAL_RESULT_ERR = (
'Internal error: `Estimator.evaluate` should never return empty result')
_INVALID_EVAL_RESULT_TYPE_ERR = '`Estimator.evaluate` should return dict.'
_MISSING_GLOBAL_STEP_IN_EVAL_RESULT_ERR = (
'Internal error: `Estimator.evaluate` result should have `global_step`')
_INVALID_EVAL_TASK_ID_ERR = (
'there can only be one `evaluator` task .*with task id 0')
_TF_CONFIG_FOR_CHIEF = {
'cluster': {
run_config_lib.TaskType.CHIEF: ['host0:0'],
run_config_lib.TaskType.PS: ['host1:1', 'host2:2'],
run_config_lib.TaskType.WORKER: ['host3:3', 'host4:4']
},
'task': {
'type': run_config_lib.TaskType.CHIEF,
'index': 0
}
}
_TF_CONFIG_FOR_MASTER = {
'cluster': {
run_config_lib.TaskType.MASTER: ['host0:0'],
run_config_lib.TaskType.PS: ['host1:1', 'host2:2'],
run_config_lib.TaskType.WORKER: ['host3:3', 'host4:4']
},
'task': {
'type': run_config_lib.TaskType.MASTER,
'index': 0
}
}
_TF_CONFIG_FOR_WORKER = {
'cluster': {
run_config_lib.TaskType.CHIEF: ['host0:0'],
run_config_lib.TaskType.PS: ['host1:1', 'host2:2'],
run_config_lib.TaskType.WORKER: ['host3:3', 'host4:4']
},
'task': {
'type': run_config_lib.TaskType.WORKER,
'index': 1
}
}
_TF_CONFIG_FOR_PS = {
'cluster': {
run_config_lib.TaskType.CHIEF: ['host0:0'],
run_config_lib.TaskType.PS: ['host1:1', 'host2:2'],
run_config_lib.TaskType.WORKER: ['host3:3', 'host4:4']
},
'task': {
'type': run_config_lib.TaskType.PS,
'index': 1
}
}
_TF_CONFIG_FOR_EVALUATOR = {
'cluster': {
run_config_lib.TaskType.CHIEF: ['host0:0'],
run_config_lib.TaskType.PS: ['host1:1', 'host2:2'],
run_config_lib.TaskType.WORKER: ['host3:3', 'host4:4']
},
'task': {
'type': run_config_lib.TaskType.EVALUATOR,
'index': 0
}
}
_TF_CONFIG_FOR_GOOGLE = {'environment': 'google'}
class _FakeHook(session_run_hook.SessionRunHook):
"""Fake implementation of `SessionRunHook`."""
class _InvalidHook(object):
"""Invalid hook (not a subclass of `SessionRunHook`)."""
def _create_exporter(name):
class FakeExporter(exporter_lib.Exporter):
def __init__(self, name):
self._name = name
@property
def name(self):
return self._name
def export(self, *args, **kwargs):
del args, kwargs
return FakeExporter(name=name)
def _create_run_config_with_cluster_spec(tf_config):
with test.mock.patch.dict('os.environ', {'TF_CONFIG': json.dumps(tf_config)}):
return run_config_lib.RunConfig()
class TrainSpecTest(test.TestCase):
"""Tests TrainSpec."""
def testRequiredArgumentsSet(self):
"""Tests that no errors are raised when all required arguments are set."""
spec = training.TrainSpec(input_fn=lambda: 1)
self.assertEqual(1, spec.input_fn())
self.assertIsNone(spec.max_steps)
self.assertEqual(0, len(spec.hooks))
def testAllArgumentsSet(self):
"""Tests that no errors are raised when all arguments are set."""
hooks = [_FakeHook()]
spec = training.TrainSpec(input_fn=lambda: 1, max_steps=2, hooks=hooks)
self.assertEqual(1, spec.input_fn())
self.assertEqual(2, spec.max_steps)
self.assertEqual(tuple(hooks), spec.hooks)
def testInvalidInputFn(self):
with self.assertRaisesRegexp(TypeError, _INVALID_INPUT_FN_MSG):
training.TrainSpec(input_fn='invalid')
def testInvalidMaxStep(self):
with self.assertRaisesRegexp(ValueError, _INVALID_MAX_STEPS_MSG):
training.TrainSpec(input_fn=lambda: 1, max_steps=0)
def testInvalidHook(self):
with self.assertRaisesRegexp(TypeError, _INVALID_HOOK_MSG):
training.TrainSpec(input_fn=lambda: 1, hooks=[_InvalidHook()])
class EvalSpecTest(test.TestCase):
"""Tests EvalSpec."""
def testRequiredArgumentsSet(self):
"""Tests that no errors are raised when all required arguments are set."""
spec = training.EvalSpec(input_fn=lambda: 1)
self.assertEqual(1, spec.input_fn())
self.assertEqual(_DEFAULT_EVAL_STEPS, spec.steps)
self.assertIsNone(spec.name)
self.assertEqual(0, len(spec.hooks))
self.assertEqual(0, len(spec.exporters))
self.assertEqual(_DEFAULT_EVAL_DELAY_SECS, spec.start_delay_secs)
self.assertEqual(_DEFAULT_EVAL_THROTTLE_SECS, spec.throttle_secs)
def testAllArgumentsSet(self):
"""Tests that no errors are raised when all arguments are set."""
hooks = [_FakeHook()]
exporter = _create_exporter('a')
spec = training.EvalSpec(
input_fn=lambda: 1,
steps=2,
name='name',
hooks=hooks,
exporters=exporter,
start_delay_secs=3,
throttle_secs=4)
self.assertEqual(1, spec.input_fn())
self.assertEqual(2, spec.steps)
self.assertEqual('name', spec.name)
self.assertEqual(tuple(hooks), spec.hooks)
self.assertEqual((exporter,), spec.exporters)
self.assertEqual(3, spec.start_delay_secs)
self.assertEqual(4, spec.throttle_secs)
def testListOfExporters(self):
"""Tests that no errors are raised with multiple exporters."""
exporters = [_create_exporter('a'), _create_exporter('b')]
spec = training.EvalSpec(input_fn=lambda: 1, exporters=exporters)
self.assertEqual(1, spec.input_fn())
self.assertEqual(tuple(exporters), spec.exporters)
def testInvalidInputFn(self):
with self.assertRaisesRegexp(TypeError, _INVALID_INPUT_FN_MSG):
training.EvalSpec(input_fn='invalid')
def testInvalidMaxStep(self):
with self.assertRaisesRegexp(ValueError, _INVALID_STEPS_MSG):
training.EvalSpec(input_fn=lambda: 1, steps=0)
def testInvalidName(self):
with self.assertRaisesRegexp(TypeError, _INVALID_NAME_MSG):
training.EvalSpec(input_fn=lambda: 1, name=123)
def testInvalidHook(self):
with self.assertRaisesRegexp(TypeError, _INVALID_HOOK_MSG):
training.EvalSpec(input_fn=lambda: 1, hooks=[_InvalidHook()])
def testInvalidDelaySecs(self):
with self.assertRaisesRegexp(ValueError, _INVALID_EVAL_DELAY_SECS_MSG):
training.EvalSpec(input_fn=lambda: 1, start_delay_secs=-1)
def testInvalidThrottleSecs(self):
with self.assertRaisesRegexp(ValueError, _INVALID_EVAL_THROTTLE_SECS_MSG):
training.EvalSpec(input_fn=lambda: 1, throttle_secs=-1)
def testInvalidTypeOfListOfExporters(self):
with self.assertRaisesRegexp(TypeError, _INVALID_EXPORTER_MSG):
training.EvalSpec(
input_fn=lambda: 1, exporters=[_create_exporter('a'),
_FakeHook()])
def testInvalidTypeOfIndividualExporter(self):
with self.assertRaisesRegexp(TypeError, _INVALID_EXPORTER_MSG):
training.EvalSpec(input_fn=lambda: 1, exporters=_FakeHook())
def testInvalidTypeOfExporterName(self):
with self.assertRaisesRegexp(ValueError, _INVALID_EXPORTER_NAME_TYPE_MSG):
training.EvalSpec(input_fn=lambda: 1,
exporters=_create_exporter(name=123))
def testMultipleExportersWithTheSameName(self):
with self.assertRaisesRegexp(ValueError, _DUPLICATE_EXPORTER_NAMES_MSG):
training.EvalSpec(
input_fn=lambda: 1,
exporters=[_create_exporter('a'), _create_exporter('a')])
def testMultipleExportersAndOneWithoutAName(self):
with self.assertRaisesRegexp(ValueError, _NONE_EXPORTER_NAME_MSG):
training.EvalSpec(
input_fn=lambda: 1,
exporters=[_create_exporter('a'),
_create_exporter(None)])
def testSingleExporterWithoutAName(self):
with self.assertRaisesRegexp(ValueError, _NONE_EXPORTER_NAME_MSG):
training.EvalSpec(input_fn=lambda: 1, exporters=_create_exporter(None))
class TrainAndEvaluteTest(test.TestCase):
def _mock_executor_instance(self):
mock_instance = test.mock.Mock()
mock_instance.call_task = {}
def task_fn(name):
def _fn():
mock_instance.call_task[name] = 1
return _fn
mock_instance.run_chief = task_fn('chief')
mock_instance.run_master = task_fn('master')
mock_instance.run_ps = task_fn('ps')
mock_instance.run_evaluator = task_fn('evaluator')
mock_instance.run_worker = task_fn('worker')
mock_instance.run_local = task_fn('local')
return mock_instance
def _test_run_task_in_distributed_training(self, run_config):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.config = run_config
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
with test.mock.patch.object(training, '_TrainingExecutor') as mock_executor:
mock_executor_instance = self._mock_executor_instance()
mock_executor.return_value = mock_executor_instance
training.train_and_evaluate(mock_est, mock_train_spec, mock_eval_spec)
mock_executor.assert_called_with(estimator=mock_est,
train_spec=mock_train_spec,
eval_spec=mock_eval_spec)
return mock_executor_instance
def test_run_chief(self):
mock_executor = self._test_run_task_in_distributed_training(
run_config=_create_run_config_with_cluster_spec(_TF_CONFIG_FOR_CHIEF))
self.assertEqual(1, mock_executor.call_task['chief'])
def test_run_worker(self):
mock_executor = self._test_run_task_in_distributed_training(
run_config=_create_run_config_with_cluster_spec(_TF_CONFIG_FOR_WORKER))
self.assertEqual(1, mock_executor.call_task['worker'])
def test_run_ps(self):
mock_executor = self._test_run_task_in_distributed_training(
run_config=_create_run_config_with_cluster_spec(_TF_CONFIG_FOR_PS))
self.assertEqual(1, mock_executor.call_task['ps'])
def test_run_evaluator(self):
mock_executor = self._test_run_task_in_distributed_training(
run_config=_create_run_config_with_cluster_spec(
_TF_CONFIG_FOR_EVALUATOR))
self.assertEqual(1, mock_executor.call_task['evaluator'])
def test_error_out_if_evaluator_task_id_is_non_zero(self):
tf_config = {
'cluster': {
run_config_lib.TaskType.CHIEF: ['host0:0'],
},
'task': {
'type': run_config_lib.TaskType.EVALUATOR,
'index': 1
}
}
with self.assertRaisesRegexp(ValueError, _INVALID_EVAL_TASK_ID_ERR):
self._test_run_task_in_distributed_training(
run_config=_create_run_config_with_cluster_spec(tf_config))
def test_run_local(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.config = run_config_lib.RunConfig()
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
with test.mock.patch.object(training, '_TrainingExecutor') as mock_executor:
mock_executor_instance = self._mock_executor_instance()
mock_executor.return_value = mock_executor_instance
training.train_and_evaluate(mock_est, mock_train_spec, mock_eval_spec)
self.assertEqual(1, mock_executor_instance.call_task['local'])
mock_executor.assert_called_with(estimator=mock_est,
train_spec=mock_train_spec,
eval_spec=mock_eval_spec)
def test_invalid_local_task(self):
tf_config = {
'cluster': {
run_config_lib.TaskType.CHIEF: ['host0:0'],
'local': ['hos1:1'],
},
'task': {
'type': 'local',
'index': 0
}
}
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.config = _create_run_config_with_cluster_spec(tf_config)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
with self.assertRaisesRegexp(ValueError, _INVALID_LOCAL_TASK_WITH_CLUSTER):
training.train_and_evaluate(mock_est, mock_train_spec, mock_eval_spec)
def test_unsupported_task_due_to_missing_run_task(self):
unsupported_task = 'alloc'
tf_config = {
'cluster': {
run_config_lib.TaskType.CHIEF: ['host0:0'],
unsupported_task: ['hos1:1'],
},
'task': {
'type': unsupported_task,
'index': 0
}
}
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.config = _create_run_config_with_cluster_spec(tf_config)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
with test.mock.patch.object(training, '_TrainingExecutor') as mock_executor:
# mock_instance has no run_alloc method.
mock_instance = self._mock_executor_instance()
mock_executor.return_value = mock_instance
with self.assertRaisesRegexp(ValueError, _INVALID_TASK_TO_RUN):
training.train_and_evaluate(mock_est, mock_train_spec, mock_eval_spec)
def test_unsupported_task_due_to_not_callable(self):
unsupported_task = 'alloc'
tf_config = {
'cluster': {
run_config_lib.TaskType.CHIEF: ['host0:0'],
unsupported_task: ['hos1:1'],
},
'task': {
'type': unsupported_task,
'index': 0
}
}
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.config = _create_run_config_with_cluster_spec(tf_config)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
with test.mock.patch.object(training, '_TrainingExecutor') as mock_executor:
mock_instance = self._mock_executor_instance()
mock_instance.run_alloc = 123 # not callable
mock_executor.return_value = mock_instance
with self.assertRaisesRegexp(ValueError, _INVALID_TASK_TO_RUN):
training.train_and_evaluate(mock_est, mock_train_spec, mock_eval_spec)
def test_invalid_estimator(self):
invalid_estimator = object()
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
with self.assertRaisesRegexp(TypeError, _INVALID_ESTIMATOR_MSG):
training.train_and_evaluate(invalid_estimator, mock_train_spec,
mock_eval_spec)
def test_invalid_task_type(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.config = test.mock.Mock()
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.Mock()
mock_est.config.cluster_spec = {'1': 'dummy'}
mock_est.config.task_type = ''
with self.assertRaisesRegexp(ValueError, _INVALID_TASK_TYPE):
training.train_and_evaluate(mock_est, mock_train_spec, mock_eval_spec)
class TrainingExecutorConstructorTest(test.TestCase):
"""Tests constructor of _TrainingExecutor."""
def testRequiredArgumentsSet(self):
estimator = estimator_lib.Estimator(model_fn=lambda features: features)
train_spec = training.TrainSpec(input_fn=lambda: 1)
eval_spec = training.EvalSpec(input_fn=lambda: 1)
executor = training._TrainingExecutor(estimator, train_spec, eval_spec)
self.assertEqual(estimator, executor.estimator)
def test_invalid_estimator(self):
invalid_estimator = object()
train_spec = training.TrainSpec(input_fn=lambda: 1)
eval_spec = training.EvalSpec(input_fn=lambda: 1)
with self.assertRaisesRegexp(TypeError, _INVALID_ESTIMATOR_MSG):
training._TrainingExecutor(invalid_estimator, train_spec, eval_spec)
def test_invalid_train_spec(self):
estimator = estimator_lib.Estimator(model_fn=lambda features: features)
invalid_train_spec = object()
eval_spec = training.EvalSpec(input_fn=lambda: 1)
with self.assertRaisesRegexp(TypeError, _INVALID_TRAIN_SPEC_MSG):
training._TrainingExecutor(estimator, invalid_train_spec, eval_spec)
def test_invalid_eval_spec(self):
estimator = estimator_lib.Estimator(model_fn=lambda features: features)
train_spec = training.TrainSpec(input_fn=lambda: 1)
invalid_eval_spec = object()
with self.assertRaisesRegexp(TypeError, _INVALID_EVAL_SPEC_MSG):
training._TrainingExecutor(estimator, train_spec, invalid_eval_spec)
class _TrainingExecutorTrainingTest(object):
"""Tests training of _TrainingExecutor."""
def __init__(self, run_config):
self._run_config = run_config
def _run_task(self, executor):
return getattr(executor, 'run_' + self._run_config.task_type)()
@test.mock.patch.object(time, 'sleep')
@test.mock.patch.object(server_lib, 'Server')
def test_train_with_train_spec(self, mock_server, unused_mock_sleep):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.config = self._run_config
train_spec = training.TrainSpec(
input_fn=lambda: 1, max_steps=2, hooks=[_FakeHook()])
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_server_instance = mock_server.return_value
executor = training._TrainingExecutor(mock_est, train_spec, mock_eval_spec)
self._run_task(executor)
mock_server.assert_called_with(
mock_est.config.cluster_spec,
job_name=mock_est.config.task_type,
task_index=mock_est.config.task_id,
config=test.mock.ANY,
start=False)
self.assertTrue(mock_server_instance.start.called)
mock_est.train.assert_called_with(input_fn=train_spec.input_fn,
max_steps=train_spec.max_steps,
hooks=train_spec.hooks,
saving_listeners=test.mock.ANY)
mock_est.evaluate.assert_not_called()
mock_est.export_savedmodel.assert_not_called()
@test.mock.patch.object(time, 'sleep')
@test.mock.patch.object(server_lib, 'Server')
def test_no_server_startup_in_google(self, mock_server, unused_mock_sleep):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.config = self._run_config
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
executor = training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec)
tf_config = {'TF_CONFIG': json.dumps(_TF_CONFIG_FOR_GOOGLE)}
with test.mock.patch.dict('os.environ', tf_config):
self._run_task(executor)
mock_server.assert_not_called()
def test_fail_with_empty_cluster_spec(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = None
mock_est.config.master = 'grpc://...'
mock_est.config.task_type = 'worker'
mock_est.config.task_id = 2
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
self._run_task(training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec))
def test_fail_with_empty_master(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = {'worker': 'dummy'}
mock_est.config.master = ''
mock_est.config.task_type = 'worker'
mock_est.config.task_id = 2
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
self._run_task(training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec))
def test_fail_with_empty_task_type(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = {'worker': 'dummy'}
mock_est.config.master = 'grpc://...'
mock_est.config.task_type = ''
mock_est.config.task_id = 2
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
self._run_task(training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec))
def test_fail_with_none_task_id(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = {'worker': 'dummy'}
mock_est.config.master = 'grpc://...'
mock_est.config.task_type = 'worker'
mock_est.config.task_id = None
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
self._run_task(training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec))
class TrainingExecutorRunWorkerTest(_TrainingExecutorTrainingTest,
test.TestCase):
"""Tests run_worker of _TrainingExecutor."""
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
_TrainingExecutorTrainingTest.__init__(
self,
run_config=_create_run_config_with_cluster_spec(_TF_CONFIG_FOR_WORKER))
@test.mock.patch.object(server_lib, 'Server')
def test_delay_for_worker(self, _):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.config = self._run_config
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
executor = training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec)
expected_secs = (self._run_config.task_id + 1) * _DELAY_SECS_PER_WORKER
with test.mock.patch.object(time, 'sleep') as mock_sleep:
mock_sleep.side_effect = lambda s: self.assertEqual(expected_secs, s)
self._run_task(executor)
self.assertTrue(mock_sleep.called)
class TrainingExecutorRunChiefTest(_TrainingExecutorTrainingTest,
test.TestCase):
"""Tests run_chief of _TrainingExecutor."""
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
test.TestCase.__init__(self, methodName)
_TrainingExecutorTrainingTest.__init__(
self,
run_config=_create_run_config_with_cluster_spec(_TF_CONFIG_FOR_CHIEF))
@test.mock.patch.object(server_lib, 'Server')
def test_no_delay_for_chief(self, _):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.config = self._run_config
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
executor = training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec)
with test.mock.patch.object(time, 'sleep') as mock_sleep:
self._run_task(executor)
mock_sleep.assert_not_called()
class TrainingExecutorRunMasterTest(test.TestCase):
"""Tests run_chief of _TrainingExecutor."""
def setUp(self):
self._run_config = _create_run_config_with_cluster_spec(
_TF_CONFIG_FOR_MASTER)
@test.mock.patch.object(server_lib, 'Server')
def test_no_delay_for_master(self, _):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.evaluate = lambda *args, **kw: {ops.GraphKeys.GLOBAL_STEP: 123}
mock_est.config = self._run_config
mock_train_spec = test.mock.Mock(spec=training.TrainSpec, max_steps=123)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec, exporters=[])
executor = training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec)
with test.mock.patch.object(time, 'sleep') as mock_sleep:
executor.run_master()
mock_sleep.assert_not_called()
@test.mock.patch.object(time, 'sleep')
@test.mock.patch.object(server_lib, 'Server')
def test_train_with_train_spec(self, mock_server, unused_mock_sleep):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.evaluate = lambda *args, **kw: {ops.GraphKeys.GLOBAL_STEP: 123}
mock_est.config = self._run_config
train_spec = training.TrainSpec(
input_fn=lambda: 1, max_steps=2, hooks=[_FakeHook()])
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec, exporters=[])
mock_server_instance = mock_server.return_value
executor = training._TrainingExecutor(mock_est, train_spec, mock_eval_spec)
executor.run_master()
mock_server.assert_called_with(
mock_est.config.cluster_spec,
job_name=mock_est.config.task_type,
task_index=mock_est.config.task_id,
config=test.mock.ANY,
start=False)
self.assertTrue(mock_server_instance.start.called)
mock_est.train.assert_called_with(input_fn=train_spec.input_fn,
max_steps=train_spec.max_steps,
hooks=train_spec.hooks,
saving_listeners=test.mock.ANY)
mock_est.export_savedmodel.assert_not_called()
@test.mock.patch.object(time, 'sleep')
@test.mock.patch.object(server_lib, 'Server')
def test_no_server_startup_in_google(self, mock_server, unused_mock_sleep):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.evaluate = lambda *args, **kw: {ops.GraphKeys.GLOBAL_STEP: 123}
mock_est.config = self._run_config
mock_train_spec = test.mock.Mock(spec=training.TrainSpec, max_steps=123)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec, exporters=[])
executor = training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec)
tf_config = {'TF_CONFIG': json.dumps(_TF_CONFIG_FOR_GOOGLE)}
with test.mock.patch.dict('os.environ', tf_config):
executor.run_master()
mock_server.assert_not_called()
def test_fail_with_empty_cluster_spec(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = None
mock_est.config.master = 'grpc://...'
mock_est.config.task_type = 'worker'
mock_est.config.task_id = 2
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
training._TrainingExecutor(
mock_est, mock_train_spec, mock_eval_spec).run_master()
def test_fail_with_empty_master(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = {'worker': 'dummy'}
mock_est.config.master = ''
mock_est.config.task_type = 'worker'
mock_est.config.task_id = 2
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
training._TrainingExecutor(
mock_est, mock_train_spec, mock_eval_spec).run_master()
def test_fail_with_empty_task_type(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = {'worker': 'dummy'}
mock_est.config.master = 'grpc://...'
mock_est.config.task_type = ''
mock_est.config.task_id = 2
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
training._TrainingExecutor(
mock_est, mock_train_spec, mock_eval_spec).run_master()
def test_fail_with_none_task_id(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = {'worker': 'dummy'}
mock_est.config.master = 'grpc://...'
mock_est.config.task_type = 'worker'
mock_est.config.task_id = None
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
training._TrainingExecutor(
mock_est, mock_train_spec, mock_eval_spec).run_master()
@test.mock.patch.object(server_lib, 'Server')
def test_run_master_triggers_evaluate_and_export(self, _):
def estimator_train(saving_listeners, *args, **kwargs):
# There shalt be a saving_listener. Estimator is going to call
# `after_save`.
del args, kwargs
saving_listeners[0].begin()
saving_listeners[0].after_save(session=None, global_step_value=None)
mock_est = test.mock.Mock(
spec=estimator_lib.Estimator, model_dir='path/', train=estimator_train)
mock_est.latest_checkpoint.return_value = 'checkpoint_path/'
mock_est.config = self._run_config
exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter)
exporter.name = 'see_whether_export_is_called'
train_spec = training.TrainSpec(input_fn=lambda: 1, max_steps=300)
eval_spec = training.EvalSpec(
input_fn=lambda: 1, steps=2, exporters=exporter)
eval_result = {_GLOBAL_STEP_KEY: train_spec.max_steps}
mock_est.evaluate.return_value = eval_result
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
executor.run_master()
mock_est.evaluate.assert_called_with(
name=eval_spec.name,
input_fn=eval_spec.input_fn,
steps=eval_spec.steps,
checkpoint_path='checkpoint_path/',
hooks=eval_spec.hooks)
self.assertEqual(1, exporter.export.call_count)
exporter.export.assert_called_with(
estimator=mock_est,
export_path=os.path.join('path/', 'export', exporter.name),
checkpoint_path='checkpoint_path/',
eval_result=eval_result,
is_the_final_export=True)
@test.mock.patch.object(basic_session_run_hooks, 'SecondOrStepTimer')
@test.mock.patch.object(server_lib, 'Server')
def test_run_master_throttle_eval(self, _, mock_timer_class):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator, model_dir='path/')
mock_timer = test.mock.Mock()
mock_timer_class.return_value = mock_timer
def estimator_train(saving_listeners, *args, **kwargs):
del args, kwargs
saving_listeners[0].begin()
# Call three times.
mock_timer.should_trigger_for_step.return_value = True
saving_listeners[0].after_save(session=None, global_step_value=None)
mock_timer.should_trigger_for_step.return_value = False
saving_listeners[0].after_save(session=None, global_step_value=None)
mock_timer.should_trigger_for_step.return_value = True
saving_listeners[0].after_save(session=None, global_step_value=None)
mock_est.train = estimator_train
mock_est.latest_checkpoint.side_effect = ['ckpt1', 'ckpt2']
mock_est.config = self._run_config
exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter)
exporter.name = 'see_whether_export_is_called'
train_spec = training.TrainSpec(input_fn=lambda: 1, max_steps=300)
eval_spec = training.EvalSpec(
input_fn=lambda: 1, steps=2, exporters=exporter, throttle_secs=10)
mock_est.evaluate.side_effect = [
{_GLOBAL_STEP_KEY: train_spec.max_steps //2},
{_GLOBAL_STEP_KEY: train_spec.max_steps}
]
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
executor.run_master()
self.assertEqual(2, mock_est.evaluate.call_count)
self.assertEqual(2, exporter.export.call_count)
is_final_export_list = [call[1]['is_the_final_export']
for call in exporter.export.call_args_list]
self.assertEqual([False, True], is_final_export_list)
@test.mock.patch.object(basic_session_run_hooks, 'SecondOrStepTimer')
@test.mock.patch.object(server_lib, 'Server')
def test_run_master_throttle_eval_which_skips_final_ckpt(
self, _, mock_timer_class):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator, model_dir='path/')
mock_timer = test.mock.Mock()
mock_timer_class.return_value = mock_timer
def estimator_train(saving_listeners, *args, **kwargs):
del args, kwargs
saving_listeners[0].begin()
# Call two times.
mock_timer.should_trigger_for_step.return_value = True
saving_listeners[0].after_save(session=None, global_step_value=None)
# The final ckpt is skipped by the timer. It will be picked up the final
# export check in the code.
mock_timer.should_trigger_for_step.return_value = False
saving_listeners[0].after_save(session=None, global_step_value=None)
mock_est.train = estimator_train
mock_est.latest_checkpoint.side_effect = ['ckpt1', 'ckpt2']
mock_est.config = self._run_config
exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter)
exporter.name = 'see_whether_export_is_called'
train_spec = training.TrainSpec(input_fn=lambda: 1, max_steps=300)
eval_spec = training.EvalSpec(
input_fn=lambda: 1, steps=2, exporters=exporter, throttle_secs=10)
mock_est.evaluate.side_effect = [
{_GLOBAL_STEP_KEY: train_spec.max_steps //2},
{_GLOBAL_STEP_KEY: train_spec.max_steps}
]
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
executor.run_master()
self.assertEqual(2, mock_est.evaluate.call_count)
self.assertEqual(2, exporter.export.call_count)
is_final_export_list = [call[1]['is_the_final_export']
for call in exporter.export.call_args_list]
self.assertEqual([False, True], is_final_export_list)
class TrainingExecutorRunEvaluatorTest(test.TestCase):
"""Tests run_evaluator of _TrainingExecutor."""
def _set_up_mock_est_to_train_and_evaluate_once(self, mock_est,
mock_train_spec):
"""Sets global step in eval result to end the while True eval loop."""
training_max_step = 200
mock_est.evaluate.return_value = {_GLOBAL_STEP_KEY: training_max_step}
mock_train_spec.max_steps = training_max_step
def test_evaluate_with_evaluate_spec(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.latest_checkpoint.return_value = 'latest_it_is'
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec)
eval_spec = training.EvalSpec(
input_fn=lambda: 1, steps=2, hooks=[_FakeHook()], name='cont_eval',
start_delay_secs=0, throttle_secs=0)
executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec)
executor.run_evaluator()
mock_est.evaluate.assert_called_with(
name='cont_eval',
input_fn=eval_spec.input_fn,
steps=eval_spec.steps,
checkpoint_path='latest_it_is',
hooks=eval_spec.hooks)
self.assertFalse(mock_est.train.called)
def test_evaluate_multiple_times(self):
training_max_step = 200
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.model_dir = compat.as_bytes(test.get_temp_dir())
mock_est.evaluate.side_effect = [
{_GLOBAL_STEP_KEY: training_max_step // 2},
{_GLOBAL_STEP_KEY: training_max_step}
]
mock_est.latest_checkpoint.side_effect = ['path_1', 'path_2']
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_train_spec.max_steps = training_max_step
exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter)
exporter.name = 'see_how_many_times_export_is_called'
mock_est.times_export_was_called = 0
mock_est.times_final_export_was_called = 0
def export(estimator, export_path, checkpoint_path, eval_result,
is_the_final_export):
del export_path, checkpoint_path, eval_result
estimator.times_export_was_called += 1
# final_export is happend at the end.
self.assertEqual(0, estimator.times_final_export_was_called)
if is_the_final_export:
estimator.times_final_export_was_called += 1
exporter.export = export
eval_spec = training.EvalSpec(
input_fn=lambda: 1,
start_delay_secs=0,
throttle_secs=0,
exporters=exporter)
executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec)
executor.run_evaluator()
self.assertEqual(2, mock_est.evaluate.call_count)
self.assertEqual(2, mock_est.times_export_was_called)
self.assertEqual(1, mock_est.times_final_export_was_called)
def test_final_export_is_true_in_the_end(self):
training_max_step = 200
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.model_dir = compat.as_bytes(test.get_temp_dir())
mock_est.evaluate.side_effect = [
{_GLOBAL_STEP_KEY: training_max_step // 2},
{_GLOBAL_STEP_KEY: training_max_step}
]
mock_est.latest_checkpoint.side_effect = ['path_1', 'path_2']
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_train_spec.max_steps = training_max_step
mock_est.times_export_fn_was_called = 0
mock_est.times_the_final_export_was_true = 0
def export(estimator, export_path, checkpoint_path, eval_result,
is_the_final_export):
del export_path, checkpoint_path, eval_result
estimator.times_export_fn_was_called += 1
if is_the_final_export:
estimator.times_the_final_export_was_true += 1
exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter)
exporter.name = 'see_how_many_times_export_is_called'
exporter.export = export
eval_spec = training.EvalSpec(
input_fn=lambda: 1,
start_delay_secs=0,
throttle_secs=0,
exporters=exporter)
executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec)
executor.run_evaluator()
self.assertEqual(2, mock_est.evaluate.call_count)
self.assertEqual(2, mock_est.times_export_fn_was_called)
self.assertEqual(1, mock_est.times_the_final_export_was_true)
def test_skip_evaluation_due_to_ckpt(self):
training_max_step = 200
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.evaluate.side_effect = [
{_GLOBAL_STEP_KEY: training_max_step // 2},
{_GLOBAL_STEP_KEY: training_max_step}
]
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_train_spec.max_steps = training_max_step
self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec)
# First two items are invalid, next two items are same.
mock_est.latest_checkpoint.side_effect = [
None, '', 'same', 'same', 'path_2'
]
eval_spec = training.EvalSpec(
input_fn=lambda: 1, start_delay_secs=0, throttle_secs=0)
executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec)
with test.mock.patch.object(logging, 'warning') as mock_log:
executor.run_evaluator()
# Three checkpoint paths are invalid.
self.assertEqual(5, mock_est.latest_checkpoint.call_count)
self.assertEqual(2, mock_est.evaluate.call_count)
# Two warning logs are expected (last warning time is reset after a
# successuful evaluation)
self.assertEqual(2, mock_log.call_count)
def test_sleep_start_delay_secs(self):
training_max_step = 200
start_delay_secs = 123
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.evaluate.return_value = {_GLOBAL_STEP_KEY: training_max_step}
mock_est.model_dir = compat.as_bytes(test.get_temp_dir())
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_train_spec.max_steps = training_max_step
eval_spec = training.EvalSpec(
input_fn=lambda: 1, steps=2, hooks=[_FakeHook()], name='cont_eval',
start_delay_secs=start_delay_secs, throttle_secs=0)
executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec)
with test.mock.patch.object(time, 'sleep') as mock_sleep:
executor.run_evaluator()
mock_sleep.assert_called_with(start_delay_secs)
self.assertTrue(mock_est.evaluate.called)
@test.mock.patch.object(time, 'time')
@test.mock.patch.object(time, 'sleep')
def test_throttle_secs(self, mock_sleep, mock_time):
throttle_secs = 123
operation_secs = 12
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec)
eval_spec = training.EvalSpec(
input_fn=lambda: 1, start_delay_secs=0, throttle_secs=throttle_secs)
mock_time.side_effect = [921, 921 + operation_secs]
executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec)
# Disable logging as it calls time.time also.
with test.mock.patch.object(logging, 'info'):
executor.run_evaluator()
mock_sleep.assert_called_with(throttle_secs - operation_secs)
self.assertTrue(mock_est.evaluate.called)
def test_that_export_is_called(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec)
def export(estimator, *args, **kwargs):
del args, kwargs
estimator.export_was_called = True
exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter)
exporter.name = 'see_whether_export_is_called'
exporter.export = export
eval_spec = training.EvalSpec(
input_fn=lambda: 1,
steps=2,
start_delay_secs=0,
throttle_secs=0,
exporters=exporter)
executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec)
executor.run_evaluator()
# Verify that export was called on the right estimator.
self.assertTrue(mock_est.export_was_called)
def test_errors_out_if_evaluate_returns_empty_dict(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
train_spec = training.TrainSpec(input_fn=lambda: 1)
eval_spec = training.EvalSpec(input_fn=(lambda: 1),
start_delay_secs=0, throttle_secs=0)
mock_est.evaluate.return_value = {}
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
with self.assertRaisesRegexp(RuntimeError, _INVALID_EMPTY_EVAL_RESULT_ERR):
executor.run_evaluator()
def test_errors_out_if_evaluate_returns_non_dict(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
train_spec = training.TrainSpec(input_fn=lambda: 1)
eval_spec = training.EvalSpec(input_fn=(lambda: 1),
start_delay_secs=0, throttle_secs=0)
mock_est.evaluate.return_value = 123
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
with self.assertRaisesRegexp(TypeError, _INVALID_EVAL_RESULT_TYPE_ERR):
executor.run_evaluator()
def test_errors_out_if_evaluate_returns_dict_without_global_step(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
train_spec = training.TrainSpec(input_fn=lambda: 1)
eval_spec = training.EvalSpec(input_fn=(lambda: 1),
start_delay_secs=0, throttle_secs=0)
mock_est.evaluate.return_value = {'loss': 123}
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
with self.assertRaisesRegexp(RuntimeError,
_MISSING_GLOBAL_STEP_IN_EVAL_RESULT_ERR):
executor.run_evaluator()
class TrainingExecutorRunPsTest(test.TestCase):
"""Tests run_ps of _TrainingExecutor."""
@test.mock.patch.object(server_lib, 'Server')
def test_std_server(self, mock_server):
mock_server_instance = test.mock.Mock()
mock_server.return_value = mock_server_instance
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_est.config = _create_run_config_with_cluster_spec(_TF_CONFIG_FOR_PS)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
executor = training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec)
executor.run_ps()
mock_server.assert_called_with(
mock_est.config.cluster_spec,
job_name=mock_est.config.task_type,
task_index=mock_est.config.task_id,
config=test.mock.ANY,
start=False)
self.assertTrue(mock_server_instance.start.called)
self.assertTrue(mock_server_instance.join.called)
def test_fail_with_empty_cluster_spec(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = None
mock_est.config.master = 'grpc://...'
mock_est.config.task_type = 'gs'
mock_est.config.task_id = 2
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec).run_ps()
def test_fail_with_empty_master(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = {'gs': 'dummy'}
mock_est.config.master = ''
mock_est.config.task_type = 'gs'
mock_est.config.task_id = 2
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec).run_ps()
def test_fail_with_empty_task_type(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = {'gs': 'dummy'}
mock_est.config.master = 'grpc://...'
mock_est.config.task_type = ''
mock_est.config.task_id = 2
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec).run_ps()
def test_fail_with_none_task_id(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_eval_spec = test.mock.Mock(spec=training.EvalSpec)
mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig)
mock_est.config.cluster_spec = {'gs': 'dummy'}
mock_est.config.master = 'grpc://...'
mock_est.config.task_type = 'gs'
mock_est.config.task_id = None
with self.assertRaisesRegexp(RuntimeError,
_INVALID_CONFIG_FOR_STD_SERVER_MSG):
training._TrainingExecutor(mock_est, mock_train_spec,
mock_eval_spec).run_ps()
class StopAtSecsHookTest(test.TestCase):
"""Tests StopAtSecsHook."""
@test.mock.patch.object(time, 'time')
def test_stops_after_time(self, mock_time):
mock_time.return_value = 1484695987.209386
hook = training._StopAtSecsHook(1000)
with ops.Graph().as_default():
no_op = control_flow_ops.no_op()
# some time passed before training starts
mock_time.return_value += 250
with monitored_session.MonitoredSession(hooks=[hook]) as sess:
self.assertFalse(sess.should_stop())
sess.run(no_op)
self.assertFalse(sess.should_stop())
mock_time.return_value += 500
sess.run(no_op)
self.assertFalse(sess.should_stop())
mock_time.return_value += 400
sess.run(no_op)
self.assertFalse(sess.should_stop())
mock_time.return_value += 200
sess.run(no_op)
self.assertTrue(sess.should_stop())
class TrainingExecutorRunLocalTest(test.TestCase):
"""Tests run_local of _TrainingExecutor."""
def unique_checkpoint_every_time_fn(self):
return 'checkpoint_path_%s/' % random.random()
def test_send_stop_at_secs_to_train(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator, model_dir='path/')
mock_est.latest_checkpoint = self.unique_checkpoint_every_time_fn
train_spec = training.TrainSpec(
input_fn=lambda: 1, max_steps=2, hooks=[_FakeHook()])
eval_spec = training.EvalSpec(
input_fn=lambda: 1, hooks=[_FakeHook()], throttle_secs=100)
mock_est.evaluate.return_value = {_GLOBAL_STEP_KEY: train_spec.max_steps}
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
executor.run_local()
stop_hook = mock_est.train.call_args[1]['hooks'][-1]
self.assertIsInstance(stop_hook, training._StopAtSecsHook)
self.assertEqual(eval_spec.throttle_secs, stop_hook._stop_after_secs)
def test_runs_in_a_loop_until_max_steps(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator, model_dir='path/')
mock_est.latest_checkpoint = self.unique_checkpoint_every_time_fn
mock_est.times_export_was_called = 0
mock_est.times_final_export_was_called = 0
def export(estimator, export_path, checkpoint_path, eval_result,
is_the_final_export):
del export_path, checkpoint_path, eval_result
estimator.times_export_was_called += 1
# final_export is happend at the end.
self.assertEqual(0, estimator.times_final_export_was_called)
if is_the_final_export:
estimator.times_final_export_was_called += 1
exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter)
exporter.name = 'see_how_many_times_export_is_called'
exporter.export = export
train_spec = training.TrainSpec(
input_fn=lambda: 1, max_steps=300, hooks=[_FakeHook()])
eval_spec = training.EvalSpec(
input_fn=lambda: 1,
hooks=[_FakeHook()],
throttle_secs=100,
exporters=exporter)
# should be called 3 times.
mock_est.evaluate.side_effect = [{
_GLOBAL_STEP_KEY: train_spec.max_steps - 100
}, {
_GLOBAL_STEP_KEY: train_spec.max_steps - 50
}, {
_GLOBAL_STEP_KEY: train_spec.max_steps
}]
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
executor.run_local()
self.assertEqual(3, mock_est.train.call_count)
self.assertEqual(3, mock_est.evaluate.call_count)
self.assertEqual(3, mock_est.times_export_was_called)
self.assertEqual(1, mock_est.times_final_export_was_called)
def test_handles_no_new_checkpoint_found(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator, model_dir='path/')
mock_est.latest_checkpoint.return_value = (
'no_new_checkpoints_after_the_first_train_step')
train_spec = training.TrainSpec(
input_fn=lambda: 1, max_steps=300, hooks=[_FakeHook()])
eval_spec = training.EvalSpec(
input_fn=lambda: 1, hooks=[_FakeHook()], throttle_secs=100)
# It was going to be called 3 times.
mock_est.evaluate.side_effect = [{
_GLOBAL_STEP_KEY: train_spec.max_steps - 100
}, {
_GLOBAL_STEP_KEY: train_spec.max_steps - 50
}, {
_GLOBAL_STEP_KEY: train_spec.max_steps
}]
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
with self.assertRaisesRegexp(RuntimeError, _STALE_CHECKPOINT_MSG):
executor.run_local()
def test_final_export_is_true_in_the_end(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator, model_dir='path/')
mock_est.latest_checkpoint = self.unique_checkpoint_every_time_fn
mock_est.times_export_fn_was_called = 0
mock_est.times_the_final_export_was_true = 0
def export(estimator, export_path, checkpoint_path, eval_result,
is_the_final_export):
del export_path, checkpoint_path, eval_result
estimator.times_export_fn_was_called += 1
if is_the_final_export:
estimator.times_the_final_export_was_true += 1
exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter)
exporter.name = 'see_how_many_times_export_is_called'
exporter.export = export
train_spec = training.TrainSpec(
input_fn=lambda: 1, max_steps=300, hooks=[_FakeHook()])
eval_spec = training.EvalSpec(
input_fn=lambda: 1,
hooks=[_FakeHook()],
throttle_secs=100,
exporters=exporter)
# should be called 3 times.
mock_est.evaluate.side_effect = [{
_GLOBAL_STEP_KEY: train_spec.max_steps - 100
}, {
_GLOBAL_STEP_KEY: train_spec.max_steps - 50
}, {
_GLOBAL_STEP_KEY: train_spec.max_steps
}]
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
executor.run_local()
self.assertEqual(3, mock_est.train.call_count)
self.assertEqual(3, mock_est.evaluate.call_count)
self.assertEqual(3, mock_est.times_export_fn_was_called)
self.assertEqual(1, mock_est.times_the_final_export_was_true)
def test_train_and_evaluate_args(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator, model_dir='path/')
mock_est.latest_checkpoint.return_value = 'checkpoint_path/'
train_spec = training.TrainSpec(
input_fn=lambda: 1, max_steps=300, hooks=[_FakeHook()])
eval_spec = training.EvalSpec(
input_fn=lambda: 1, steps=2, hooks=[_FakeHook()], name='local_eval')
mock_est.evaluate.return_value = {_GLOBAL_STEP_KEY: train_spec.max_steps}
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
executor.run_local()
mock_est.evaluate.assert_called_with(
name=eval_spec.name,
input_fn=eval_spec.input_fn,
steps=eval_spec.steps,
checkpoint_path='checkpoint_path/',
hooks=eval_spec.hooks)
train_args = mock_est.train.call_args[1]
self.assertEqual(list(train_spec.hooks), list(train_args['hooks'][:-1]))
self.assertEqual(train_spec.input_fn, train_args['input_fn'])
self.assertEqual(train_spec.max_steps, train_args['max_steps'])
def test_errors_out_if_throttle_secs_is_zero(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
train_spec = training.TrainSpec(input_fn=lambda: 1)
eval_spec = training.EvalSpec(input_fn=lambda: 1, throttle_secs=0)
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
with self.assertRaisesRegexp(ValueError, 'throttle_secs'):
executor.run_local()
def test_that_export_is_called_with_run_local(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
mock_train_spec = test.mock.Mock(spec=training.TrainSpec)
mock_train_spec.max_steps = 200
mock_est.evaluate.return_value = {
_GLOBAL_STEP_KEY: mock_train_spec.max_steps
}
# _validate_hooks would have made sure that train_spec.hooks is [], when
# None were passed.
mock_train_spec.hooks = []
def export(estimator, *args, **kwargs):
del args, kwargs
estimator.export_was_called = True
exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter)
exporter.name = 'see_whether_export_is_called'
exporter.export = export
eval_spec = training.EvalSpec(
input_fn=lambda: 1,
steps=2,
start_delay_secs=0,
throttle_secs=213,
exporters=exporter)
executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec)
executor.run_local()
self.assertTrue(mock_est.export_was_called)
def test_errors_out_if_evaluate_returns_empty_dict(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
train_spec = training.TrainSpec(input_fn=lambda: 1)
eval_spec = training.EvalSpec(input_fn=(lambda: 1), throttle_secs=123)
mock_est.evaluate.return_value = {}
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
with self.assertRaisesRegexp(RuntimeError, _INVALID_EMPTY_EVAL_RESULT_ERR):
executor.run_local()
def test_errors_out_if_evaluate_returns_non_dict(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
train_spec = training.TrainSpec(input_fn=lambda: 1)
eval_spec = training.EvalSpec(input_fn=(lambda: 1), throttle_secs=123)
mock_est.evaluate.return_value = 123
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
with self.assertRaisesRegexp(TypeError, _INVALID_EVAL_RESULT_TYPE_ERR):
executor.run_local()
def test_errors_out_if_evaluate_returns_dict_without_global_step(self):
mock_est = test.mock.Mock(spec=estimator_lib.Estimator)
train_spec = training.TrainSpec(input_fn=lambda: 1)
eval_spec = training.EvalSpec(input_fn=(lambda: 1), throttle_secs=123)
mock_est.evaluate.return_value = {'loss': 123}
executor = training._TrainingExecutor(mock_est, train_spec, eval_spec)
with self.assertRaisesRegexp(RuntimeError,
_MISSING_GLOBAL_STEP_IN_EVAL_RESULT_ERR):
executor.run_local()
class TrainAndEvaluateIntegrationTest(test.TestCase):
def setUp(self):
self._model_dir = tempfile.mkdtemp()
def tearDown(self):
if self._model_dir:
shutil.rmtree(self._model_dir)
def _as_label(self, data_in_float):
return np.rint(data_in_float).astype(np.int64)
def _get_exporter(self, name, fc):
feature_spec = feature_column.make_parse_example_spec(fc)
serving_input_receiver_fn = (
export_lib.build_parsing_serving_input_receiver_fn(feature_spec))
return exporter_lib.LatestExporter(
name, serving_input_fn=serving_input_receiver_fn)
def _extract_loss_and_global_step(self, event_folder):
"""Returns the loss and global step in last event."""
event_paths = glob.glob(os.path.join(event_folder, 'events*'))
loss = None
global_step_count = None
for e in summary_iterator.summary_iterator(event_paths[-1]):
current_loss = None
for v in e.summary.value:
if v.tag == 'loss':
current_loss = v.simple_value
# If loss is not found, global step is meaningless.
if current_loss is None:
continue
current_global_step = e.step
if global_step_count is None or current_global_step > global_step_count:
global_step_count = current_global_step
loss = current_loss
return (loss, global_step_count)
def test_complete_flow_with_non_distributed_configuration(self):
n_classes = 3
input_dimension = 2
batch_size = 10
eval_name = 'foo'
exporter_name = 'saved_model_exporter'
# max_steps should be larger than save_summary_steps
max_steps = 10
save_summary_steps = 2
data = np.linspace(
0., n_classes - 1., batch_size * input_dimension, dtype=np.float32)
x_data = data.reshape(batch_size, input_dimension)
y_data = np.reshape(self._as_label(data[:batch_size]), (batch_size, 1))
# learn y = x
train_input_fn = numpy_io.numpy_input_fn(
x={'x': x_data},
y=y_data,
batch_size=batch_size,
num_epochs=None,
shuffle=True)
eval_input_fn = numpy_io.numpy_input_fn(
x={'x': x_data},
y=y_data,
batch_size=batch_size,
num_epochs=1,
shuffle=False)
predict_input_fn = numpy_io.numpy_input_fn(
x={'x': x_data},
batch_size=batch_size,
shuffle=False)
feature_columns = [
feature_column.numeric_column('x', shape=(input_dimension,))]
est = dnn.DNNClassifier(
hidden_units=(2, 2),
feature_columns=feature_columns,
n_classes=n_classes,
config=run_config_lib.RunConfig(save_summary_steps=save_summary_steps),
model_dir=self._model_dir)
train_spec = training.TrainSpec(input_fn=train_input_fn,
max_steps=max_steps)
eval_spec = training.EvalSpec(
name=eval_name, input_fn=eval_input_fn, steps=None,
exporters=self._get_exporter(exporter_name, feature_columns),
throttle_secs=2)
training.train_and_evaluate(est, train_spec, eval_spec)
# Make sure nothing is stuck in limbo.
writer_cache.FileWriterCache.clear()
# Examine the training events. Use a range to check global step to avoid
# flakyness due to global step race condition.
training_loss, training_global_step = self._extract_loss_and_global_step(
est.model_dir)
self.assertIsNotNone(training_loss)
self.assertTrue(
max_steps - save_summary_steps < training_global_step <= max_steps)
# Examine the eval events. The global step should be accurate.
eval_loss, eval_global_step = self._extract_loss_and_global_step(
event_folder=os.path.join(est.model_dir, 'eval_' + eval_name))
self.assertIsNotNone(eval_loss)
self.assertEqual(max_steps, eval_global_step)
# Examine the export folder.
export_dir = os.path.join(os.path.join(est.model_dir, 'export'),
exporter_name)
self.assertTrue(gfile.Exists(export_dir))
# Examine the ckpt for predict.
predicted_proba = np.array([
x[prediction_keys.PredictionKeys.PROBABILITIES]
for x in est.predict(predict_input_fn)
])
self.assertAllEqual((batch_size, n_classes), predicted_proba.shape)
if __name__ == '__main__':
test.main()
|
{
"content_hash": "7cde9fb77b932980ff560889526b94f8",
"timestamp": "",
"source": "github",
"line_count": 1671,
"max_line_length": 80,
"avg_line_length": 39.3081986834231,
"alnum_prop": 0.6706503866999574,
"repo_name": "mdrumond/tensorflow",
"id": "d88ca2c925c7544dd1e73b4310d486c3a2f847fe",
"size": "66374",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/python/estimator/training_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "87912"
},
{
"name": "C++",
"bytes": "12683412"
},
{
"name": "CMake",
"bytes": "72419"
},
{
"name": "CSS",
"bytes": "774"
},
{
"name": "Go",
"bytes": "26396"
},
{
"name": "HTML",
"bytes": "486716"
},
{
"name": "Java",
"bytes": "50995"
},
{
"name": "JavaScript",
"bytes": "12972"
},
{
"name": "Jupyter Notebook",
"bytes": "1882397"
},
{
"name": "Makefile",
"bytes": "23413"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "64592"
},
{
"name": "Protocol Buffer",
"bytes": "135251"
},
{
"name": "Python",
"bytes": "11583592"
},
{
"name": "Shell",
"bytes": "268408"
},
{
"name": "TypeScript",
"bytes": "668194"
}
],
"symlink_target": ""
}
|
from __future__ import print_function, division
import subprocess
import time
from math import floor
from lastfm_auth import LastFMInstance, TokenRequestException, \
AuthenticationException, NotAuthenticatedException, \
ScrobbleException, NowPlayingException
#
# Change the scrobble percentage here
SCROBBLE_THRESHOLD = 50 # percent
#
LOOP_DURATION = 1
class MusicInfo:
def __init__(self):
self.artist = ""
self.status = ""
self.album = ""
self.title = ""
self.duration = -1
self.position = -1
self.elapsed = 0
self.started = str(time.time())
self.scrobbledTrack = False
def __eq__(self, other):
return self.artist == other.artist \
and self.album == other.album \
and self.title == other.title \
and self.duration == other.duration
def __cmp__(self, other): # Python 2 exclusive
return self.__cmp__(other)
def __ne__(self, other):
return not self == other
@property
def percentagePlayed(self):
return floor((self.elapsed * 100.0) / self.duration)
class CMUSStatus:
def __init__(self):
self.scrobbleThreshold = SCROBBLE_THRESHOLD
if self.scrobbleThreshold < 50: self.scrobbleThreshold = 50
if self.scrobbleThreshold > 99: self.scrobbleThreshold = 99
self.nowPlayingInfo = MusicInfo()
try:
self.lastFMInstance = LastFMInstance()
except AuthenticationException:
print("Error retrieving last.fm session")
exit(1)
except NotAuthenticatedException as exception:
print("Please allow cmus-scrobble to access your account")
print(exception)
exit(1)
except TokenRequestException:
print("Error retrieving access token. Please check your internet connection.\nExiting program..")
exit(1)
except ValueError:
print("Error parsing JSON from last.fm server")
def scrobble(self):
self.nowPlayingInfo.scrobbledTrack = True
try:
self.lastFMInstance.scrobble(
artist=self.nowPlayingInfo.artist,
album=self.nowPlayingInfo.album,
title=self.nowPlayingInfo.title,
started=self.nowPlayingInfo.started
)
except ScrobbleException:
print("Could not scrobble track to last.fm")
except ValueError:
print("Error parsing JSON from last.fm server")
def updateNowPlaying(self):
try:
self.lastFMInstance.updateNowPlaying(
artist=self.nowPlayingInfo.artist,
album=self.nowPlayingInfo.album,
title=self.nowPlayingInfo.title
)
except NowPlayingException:
print("Could not send now playing info to last.fm")
except ValueError:
print("Error parsing JSON from last.fm server")
def reset(self, newMusicInfo=None):
if newMusicInfo is None:
self.nowPlayingInfo = MusicInfo()
else:
self.nowPlayingInfo = newMusicInfo
self.updateNowPlaying()
def apply(self, remoteOutput): # called every LOOP_DURATION seconds
newMusicInfo = MusicInfo()
for line in remoteOutput.splitlines():
if line.startswith("status "):
newMusicInfo.status = line[7:]
if newMusicInfo.status != "playing":
return
if line.startswith("duration "):
newMusicInfo.duration = int(line[9:])
if line.startswith("position "):
newMusicInfo.position = int(line[9:])
if line.startswith("tag artist "):
newMusicInfo.artist = line[11:]
if line.startswith("tag album "):
newMusicInfo.album = line[10:]
if line.startswith("tag title "):
newMusicInfo.title = line[10:]
if newMusicInfo != self.nowPlayingInfo:
self.reset(newMusicInfo)
return
self.nowPlayingInfo.elapsed += LOOP_DURATION
if not self.nowPlayingInfo.scrobbledTrack and self.nowPlayingInfo.duration > 30: # Scrobble minimum length is 30s according to last.fm api rules
if self.nowPlayingInfo.percentagePlayed >= self.scrobbleThreshold \
or self.nowPlayingInfo.elapsed >= 4 * 60: # Scrobble if elapsed duration reaches 4m according to last.fm api rules
self.scrobble()
def __str__(self):
return "{0} - {1} ({2}) {3} : {4}%" \
.format(self.nowPlayingInfo.artist,
self.nowPlayingInfo.title,
self.nowPlayingInfo.album,
self.nowPlayingInfo.position,
self.nowPlayingInfo.percentagePlayed)
def scrobblerLoop():
status = CMUSStatus()
while True:
try:
res = subprocess.check_output(["cmus-remote", "-Q"], stderr=subprocess.STDOUT)
status.apply(res.decode(encoding="utf-8"))
except subprocess.CalledProcessError:
print("cmus is not running")
"""
# This doesn't really work if it's run as a background process
# so it'd be better to not do it at all
#
try:
if sys.version_info[0] == 2:
inp = raw_input("Enter q to quit or any other key to retry.")
else:
inp = input("Enter q to quit or any other key to retry.")
if inp == 'q':
exit(0)
except SyntaxError:
pass
"""
time.sleep(LOOP_DURATION)
if __name__ == "__main__":
scrobblerLoop()
|
{
"content_hash": "49a39ade775fa887ea9910f10ad5dcbb",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 153,
"avg_line_length": 32.932203389830505,
"alnum_prop": 0.5779722079258878,
"repo_name": "gouthamank/cmus-scrobble",
"id": "c73b937de28858703261948e887a7584c5a2d61a",
"size": "5852",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cmus-scrobble.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12176"
}
],
"symlink_target": ""
}
|
"""
Helper methods for working with S3 buckets.
"""
import sys
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
from widely.util import sizeof_fmt
class NoSuchBucket(Exception):
"""
Raised when there is no bucket for the specified sitename.
"""
pass
class NoWidelyDotfile(Exception):
"""
Raised when there is no .widely in the current directory.
"""
pass
def get_buckets():
"""
Returns a list of all accessible buckets.
"""
from widely.commands.auth import get_credentials
get_credentials()
conn = S3Connection()
buckets = conn.get_all_buckets()
return buckets
def get_specified_bucket(sitename):
"""
Returns the bucket for the specified sitename.
"""
from widely.commands.auth import get_credentials
get_credentials()
conn = S3Connection()
try:
bucket = conn.get_bucket(sitename)
bucket.get_website_configuration()
except S3ResponseError:
raise NoSuchBucket
return bucket
def get_current_bucket():
"""
Returns the bucket associated with the current directory, if it is
a widely directory.
"""
try:
sitename = open('.widely', 'r').read().split()[0]
except IOError:
raise NoWidelyDotfile
return get_specified_bucket(sitename)
def get_current_or_specified_bucket(arguments):
"""
Returns a bucket. If one was specified and it exists, it is that
one, otherwise, it is the current directory's bucket if it exists.
"""
sitename = get_current_or_specified_sitename(arguments)
try:
return get_specified_bucket(sitename)
except NoSuchBucket:
print(' !\tSite not found')
sys.exit(1)
def websites_from_buckets(buckets):
"""
Filter a list of buckets into only those which are configured to
be a website.
"""
for bucket in buckets:
try:
bucket.get_website_configuration()
yield bucket
except S3ResponseError:
continue
def bucket_size(bucket):
"""
Returns the total number of bytes in a bucket.
"""
return sum(key.size for key in bucket.get_all_keys())
def readable_bucket_size(bucket):
"""
Returns the number of bytes in a bucket in a human readable form.
"""
return sizeof_fmt(bucket_size(bucket))
def get_current_or_specified_sitename(arguments):
"""
Returns the bucket name/sitename from the arguments if there was
one specified, otherwise from the .widely if it exists.
"""
sitename = arguments['<SITENAME>']
if sitename:
return sitename
else:
try:
with open('.widely', 'r') as f:
return f.read().split()[0]
except IOError:
print(' !\tNo site specified.')
print(" !\tRun this command from a site folder or specify which "
"site to use with --site SITENAME.")
sys.exit(1)
|
{
"content_hash": "8cc0610f567f1fe028499cc14b167bee",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 77,
"avg_line_length": 24.636363636363637,
"alnum_prop": 0.6370345521637034,
"repo_name": "zeckalpha/widely",
"id": "d4d39863da278a807270c1db7142cfd438d937c7",
"size": "2981",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "widely/bucket.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "28812"
}
],
"symlink_target": ""
}
|
"""A simple utility for constructing filesystem-like trees from beets
libraries.
"""
from __future__ import division, absolute_import, print_function
from collections import namedtuple
from beets import util
Node = namedtuple('Node', ['files', 'dirs'])
def _insert(node, path, itemid):
"""Insert an item into a virtual filesystem node."""
if len(path) == 1:
# Last component. Insert file.
node.files[path[0]] = itemid
else:
# In a directory.
dirname = path[0]
rest = path[1:]
if dirname not in node.dirs:
node.dirs[dirname] = Node({}, {})
_insert(node.dirs[dirname], rest, itemid)
def libtree(lib):
"""Generates a filesystem-like directory tree for the files
contained in `lib`. Filesystem nodes are (files, dirs) named
tuples in which both components are dictionaries. The first
maps filenames to Item ids. The second maps directory names to
child node tuples.
"""
root = Node({}, {})
for item in lib.items():
dest = item.destination(fragment=True)
parts = util.components(dest)
_insert(root, parts, item.id)
return root
|
{
"content_hash": "15c8c819601035e9fb5e0c7f0dd2a076",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 69,
"avg_line_length": 30.736842105263158,
"alnum_prop": 0.6421232876712328,
"repo_name": "diego-plan9/beets",
"id": "7f9a049eeb0a7ea2ff6acf0815b81ef51c1bdcc5",
"size": "1839",
"binary": false,
"copies": "26",
"ref": "refs/heads/master",
"path": "beets/vfs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2951"
},
{
"name": "HTML",
"bytes": "3307"
},
{
"name": "JavaScript",
"bytes": "85950"
},
{
"name": "Python",
"bytes": "1820166"
},
{
"name": "Shell",
"bytes": "7413"
}
],
"symlink_target": ""
}
|
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('payments', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='payment',
name='token',
field=models.CharField(max_length=32, default='viki'),
preserve_default=False,
),
]
|
{
"content_hash": "0523d020038952ed5d16358f8b177825",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 66,
"avg_line_length": 22.176470588235293,
"alnum_prop": 0.5649867374005305,
"repo_name": "vladimiroff/humble-media",
"id": "73e590b7850119b3bc3e89583b4439d3fdad8869",
"size": "394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "humblemedia/payments/migrations/0002_payment_token.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2504"
},
{
"name": "JavaScript",
"bytes": "28011"
},
{
"name": "Python",
"bytes": "69551"
}
],
"symlink_target": ""
}
|
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='git_branch_panel',
version='0.1',
packages=['git_branch_panel'],
include_package_data=True,
license='BSD License',
description='A Django Debug Toolbar panel to display the current git branch',
long_description=README,
url='http://github.com/rantecki/git-branch-debug-panel',
author='Richard Antecki',
author_email='richard@antecki.id.au',
requires=['GitPython'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
)
|
{
"content_hash": "4f0c97c17e92050f7b0dec482c364be8",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 81,
"avg_line_length": 32.733333333333334,
"alnum_prop": 0.6476578411405295,
"repo_name": "rantecki/git-branch-debug-panel",
"id": "44cfef673f6c8cae7f4367a58eae23d39cd235b8",
"size": "982",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2058"
}
],
"symlink_target": ""
}
|
"""Support for Linksys Access Points."""
import base64
import logging
import requests
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_VERIFY_SSL
INTERFACES = 2
DEFAULT_TIMEOUT = 10
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_VERIFY_SSL, default=True): cv.boolean,
}
)
def get_scanner(hass, config):
"""Validate the configuration and return a Linksys AP scanner."""
try:
return LinksysAPDeviceScanner(config[DOMAIN])
except ConnectionError:
return None
class LinksysAPDeviceScanner(DeviceScanner):
"""This class queries a Linksys Access Point."""
def __init__(self, config):
"""Initialize the scanner."""
self.host = config[CONF_HOST]
self.username = config[CONF_USERNAME]
self.password = config[CONF_PASSWORD]
self.verify_ssl = config[CONF_VERIFY_SSL]
self.last_results = []
# Check if the access point is accessible
response = self._make_request()
if not response.status_code == 200:
raise ConnectionError("Cannot connect to Linksys Access Point")
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return self.last_results
def get_device_name(self, device):
"""
Return the name (if known) of the device.
Linksys does not provide an API to get a name for a device,
so we just return None
"""
return None
def _update_info(self):
"""Check for connected devices."""
from bs4 import BeautifulSoup as BS
_LOGGER.info("Checking Linksys AP")
self.last_results = []
for interface in range(INTERFACES):
request = self._make_request(interface)
self.last_results.extend(
[
x.find_all("td")[1].text
for x in BS(request.content, "html.parser").find_all(
class_="section-row"
)
]
)
return True
def _make_request(self, unit=0):
"""Create a request to get the data."""
# No, the '&&' is not a typo - this is expected by the web interface.
login = base64.b64encode(bytes(self.username, "utf8")).decode("ascii")
pwd = base64.b64encode(bytes(self.password, "utf8")).decode("ascii")
url = "https://{}/StatusClients.htm&&unit={}&vap=0".format(self.host, unit)
return requests.get(
url,
timeout=DEFAULT_TIMEOUT,
verify=self.verify_ssl,
cookies={"LoginName": login, "LoginPWD": pwd},
)
|
{
"content_hash": "accf6cf1131fd02ba62e32e7a113716e",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 88,
"avg_line_length": 30.544554455445546,
"alnum_prop": 0.6077795786061588,
"repo_name": "fbradyirl/home-assistant",
"id": "df24a409b9872a308599afe36696185bd8786143",
"size": "3085",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/linksys_ap/device_tracker.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1829"
},
{
"name": "Python",
"bytes": "16494727"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17784"
}
],
"symlink_target": ""
}
|
"""This module is deprecated. Please use `airflow.providers.amazon.aws.hooks.lambda_function`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.amazon.aws.hooks.lambda_function import AwsLambdaHook # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.amazon.aws.hooks.lambda_function`.",
DeprecationWarning,
stacklevel=2,
)
|
{
"content_hash": "8052000ab3e7304e9dbaffb5db7ab3a1",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 98,
"avg_line_length": 32.666666666666664,
"alnum_prop": 0.7678571428571429,
"repo_name": "spektom/incubator-airflow",
"id": "727cdf26f4e52bf964d6aba8e19373bcb5a8dab0",
"size": "1180",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "airflow/contrib/hooks/aws_lambda_hook.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "17179"
},
{
"name": "HTML",
"bytes": "148492"
},
{
"name": "JavaScript",
"bytes": "25233"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "9768581"
},
{
"name": "Shell",
"bytes": "221415"
},
{
"name": "TSQL",
"bytes": "879"
}
],
"symlink_target": ""
}
|
from sympy import Symbol
from sympy.matrices import Matrix, MatrixSymbol
from copy import copy
from sympy import sympify
"""
Module "arraypy" describes tensor and it's bases - Multidimentional arrays.
Module consists of Arraypy class, TensorArray class and converting functions:
list2arraypy, matrix2arraypy, list2tensor, matrix2tensor.
"""
class Arraypy(object):
"""
N-dimentional array.
Parameters
==========
self._dims - tuple, array dimension.
self._rank - Length of self._dims, rank of array.
self._sparse - boolean variable. True means that array is sparse.
self._name - custom _name of the element or default value of the element
self._start_index - first, starting index.
self._end_index - last, maximum index.
self._loop_size - Counts number of elements in array.
self._output - dictionary. Dictionary key is an element index.
Dictionary value - is array value.
self._current_index - current index (used in iterator)
self._iterator_index_count - count of indices (in iterator)
index - list, represent current index in calculating process.
[0,0,0]; [0,0,1]; [0,0,2] etc (for 3-dim array).
"""
def __init__(self, *arg):
"""
Class constructor
Creates n-dimensional array.
Input:
*arg - custom list of arguments. It could be:
-Array dimension
-Name of the Symbol element
-Default element
-Sparse
-Custom range of dimensions
"""
# main variables declaration
self._name = '0'
self._sparse = True
self._dims = [1]
self._start_index = [0]
self._end_index = [1]
j = 0
# --recognition of constructor arguments--
for i in arg:
# for arguments of type: a = Arraypy( (3,3) )
if isinstance(i, (tuple)):
self._dims = i
self._start_index = [0 for j in range(len(self._dims))]
self._end_index = [j for j in self._dims]
# for arguments of type: a = Arraypy( 3 )
if isinstance(i, int):
self._dims[0] = i
self._start_index = [0]
self._end_index = [i]
# for string arguments
if isinstance(i, str):
i = i.strip()
# a = Arraypy ('sparse')
if i == 'sparse':
self._sparse = True
# a = Arraypy ('0..3, 1..4')
elif len(i.split('..')) != 1:
self._dims = i
# splitting the string by commas ','. Length of resulted
# list will be the rank of array.
# '0..3, 1..4' -> ['0..3' , '1..4']
temp = self._dims.split(',')
self._rank = len(temp)
self._dims = []
k = 0
for temp_str in temp:
# splitting every k-th string by '..'. Resulted digits
# will be a start index and end index.
# Difference between end index and start index
# will be dimension
# ['0..3'] -> [['0'], ['3']]
temp[k] = temp_str.split('..')
if len(temp[k]) != 2:
raise SyntaxError('Wrong argument syntax')
# cleaning from spaces. If resulted string is digit,
# then converting it to integer.
# [['0'], ['3']] -> [[0], [3]]
for j in range(2):
temp[k][j] = temp[k][j].strip()
if temp[k][j].isdigit() is False:
raise TypeError('Wrong type')
temp[k][j] = int(temp[k][j])
self._dims.append(temp[k][1] - temp[k][0] + 1)
k += 1
self._dims = tuple(self._dims)
self._start_index = [temp[k][0] for k in range(self._rank)]
self._end_index = [
temp[k][1] + 1
for k in range(self._rank)]
# a = Arraypy('Py')
else:
self._name = i
# for list arguments
if isinstance(i, (list)):
# a = Arraypy( [2, 4, 1] )
# first list element - rank
# second list element - length of every dimension
# third list element - start index
if isinstance(i[0], int):
if len(i) != 3:
raise TypeError('This argument must be lenght of 3')
for j in i:
if not isinstance(j, int):
raise TypeError(
'All list elements must be the same type (tuple or int)')
if i[0] < 1 or i[1] < 1:
raise ValueError(
'_rank and length of each dimensions must be greater than 0')
self._rank = i[0]
self._dims = tuple([i[1] for j in range(i[0])])
self._start_index = tuple([i[2] for j in range(i[0])])
self._end_index = tuple([i[2] + i[1] for j in range(i[0])])
# a = Arraypy( [(0, 3), (1, 4)] )
elif isinstance(i[0], tuple):
self._dims = []
self._start_index = []
self._end_index = []
for j in i:
if not isinstance(j, tuple):
raise TypeError(
'All list elements must be the same type (tuple or int)')
if len(j) != 2:
raise TypeError('Every tuple must be size of 2')
if j[0] > j[1]:
raise ValueError(
'Right border must be greater than left border')
self._start_index.append(j[0])
self._end_index.append(j[1] + 1)
self._dims.append(j[1] - j[0] + 1)
self._start_index = tuple(self._start_index)
self._end_index = tuple(self._end_index)
# rank - length of tuple with dimensions
self._rank = len(self._dims)
self._output = {}
# check if self._name is not digit (except '0')
if self._name[0].isdigit():
if self._name.isdigit() and self._name == '0':
self._name = int(self._name)
else:
raise ValueError('Element name cant start from digits')
# index - is an index of current array element
index = [self._start_index[i] for i in range(self._rank)]
# counting number of elements in array(equals to number of loops),
# which is product of every self._dims element
self._loop_size = self._dims[0]
for i in range(1, self._rank):
self._loop_size *= self._dims[i]
# --setting elements value to dictionary self._output--
if not (self._sparse and self._name == 0):
for i in range(self._loop_size):
if isinstance(self._name, str):
self._output[tuple(index)] = Symbol(
self._name + str(list(index)))
else:
self._output[tuple(index)] = self._name
index = self.next_index(index)
self._dims = tuple(self._dims)
self._start_index = tuple(self._start_index)
self._end_index = tuple(self._end_index)
def __add__(self, other):
"""Overload operator '+'. Returns new Arraypy instance, per elemental
sum of two Arraypy instances. Both arrays must have the same shape and
start index.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, list2arraypy
>>> a = list2arraypy([1 for i in range (4)], (2,2))
>>> b = list2arraypy([4 for i in range (4)], (2,2))
>>> c = a + b
>>> print (c)
5 5
5 5
"""
if other == 0:
return self
if not isinstance(other, Arraypy):
raise TypeError('Both operands must be Arraypy type')
if self._dims != other._dims:
raise ValueError('Both operands must be same shape')
if self._start_index != other._start_index or self._end_index != other._end_index:
raise ValueError(
'Both operands must have the same start index and end index')
# forming list of tuples for Arraypy constructor of type
# a = Arraypy( [(a, b), (c, d), ... , (y, z)] )
arg = [(self.start_index[i], self.end_index[i])
for i in range(self._rank)]
res = Arraypy(arg)
index = tuple(copy(self._start_index))
# per elemental sum
for i in range(self._loop_size):
res[index] = self.__getitem__(index) + other[index]
index = self.next_index(index)
return res
def __sub__(self, other):
"""
Overloads operator '-'. Returns new Arraypy instance, per elemental
difference of two Arraypy instances. Both arrays must have the same
shape and start index.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, list2arraypy
>>> a = list2arraypy([1 for i in range (4)], (2,2))
>>> b = list2arraypy([4 for i in range (4)], (2,2))
>>> c = a - b
>>> print (c)
-3 -3
-3 -3
"""
if other == 0:
return self
if not isinstance(other, Arraypy):
raise TypeError('Both operands must be Arraypy type')
if self._dims != other._dims:
raise ValueError('Both operands must be same shape')
if self._start_index != other._start_index or self._end_index != other._end_index:
raise ValueError(
'Both operands must have the same start index and end index')
# forming list of tuples for Arraypy constructor of type
# a = Arraypy( [(a, b), (c, d), ... , (y, z)] )
arg = [(self.start_index[i], self.end_index[i])
for i in range(self._rank)]
res = Arraypy(arg)
index = tuple(copy(self._start_index))
# per elemental difference
for i in range(self._loop_size):
res[index] = self.__getitem__(index) - other[index]
index = self.next_index(index)
return res
def __mul__(self, other):
"""
Overloads *.
n-dimensional arrays can be multiplyed on atom types (int, float, Symbol)
Examples
========
"""
# forming list of tuples for Arraypy constructor of type
# a = Arraypy( [(a, b), (c, d), ... , (y, z)] )
arg = [(self.start_index[i], self.end_index[i])
for i in range(self._rank)]
res = Arraypy(arg)
idx = self.start_index
for i in range(len(self)):
res[idx] = self[idx] * other
idx = self.next_index(idx)
return res
def __truediv__(self, other):
"""
Overloads /.
n-dimensional arrays can be divided on atom types (int, float, Symbol)
Examples
========
"""
# forming list of tuples for Arraypy constructor of type
# a = Arraypy( [(a, b), (c, d), ... , (y, z)] )
arg = [(self.start_index[i], self.end_index[i])
for i in range(self._rank)]
res = Arraypy(arg)
idx = self.start_index
for i in range(len(self)):
res[idx] = self[idx] / other
idx = self.next_index(idx)
return res
# imethods. += -= *= /=
def __iadd__(self, other):
return self + other
def __isub__(self, other):
return self - other
def __imul__(self, other):
return self * other
def __itruediv__(self, other):
return self / other
__rmul__ = __mul__
__div__ = __truediv__
def __eq__(self, other):
"""
Overloads '=='.
Arraypy instances can be compared to each other.
Instances equal if they have same shape, indexes and data.
Examples
========
>>> from sympy import Arraypy
>>> a = Arraypy((2, 3))
>>> b = Arraypy((2, 3))
>>> a == b
True
>>> c = a.reshape((3,2))
>>> c == b
False
>>> a[0,0] = 1
>>> b[0,0] = 2
>>> a == b
False
"""
if not isinstance(other, Arraypy):
return False
#raise TypeError('Compared instances must be Arraypy type')
if (self.shape != other.shape or self.start_index !=
other.start_index or self.end_index != other.end_index):
return False
idx = self.start_index
for i in range(len(self)):
if (self[idx] != other[idx]):
return False
idx = self.next_index(idx)
return True
def __getitem__(self, index):
"""Allows to get items from arraypy.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, list2arraypy
>>> a = list2arraypy(list(range(4)), (2,2))
>>> print (a)
0 1
2 3
>>> print(a[0,0])
0
>>> print(a.__getitem__((1,1)))
3
"""
if isinstance(index, int):
index = (index,)
if len(index) != self._rank:
raise ValueError('Wrong number of array axes')
# check if input index can exist in current indexing
for i in range(self._rank):
if index[i] >= self._end_index[
i] or index[i] < self._start_index[i]:
raise ValueError('Value ' + str(i) + ' out of border')
# returning element. If array is sparse and index not in dictionary
# then return '0'
try:
if self._sparse:
if index in self._output:
return self._output[index]
else:
return 0
else:
return self._output[index]
except NameError:
print('Something BAD happend!')
def __setitem__(self, index, value):
"""Allows to set items to Arraypy.
Examples
========
from sympy.tensor.arraypy import Arraypy
a = Arraypy((2,2))
a[0,0] = 1
a.__setitem__((1,1),1)
print (a)
1 0
0 1
"""
if isinstance(index, int):
index = (index,)
if len(index) != self._rank:
raise ValueError('Wrong number of array axes')
# check if input index can exist in current indexing
for i in range(self._rank):
if index[i] >= self._end_index[
i] or index[i] < self._start_index[i]:
raise ValueError('Value ' + str(i) + ' out of border')
# temporary fix. Arraypy sympify is not correct
if not isinstance(value, Arraypy):
value = sympify(value)
# setting element. If array is sparse, index in dictionary and value is
# 0 then poping it from dictionary
# If array is sparse, index NOT in dictionary and value is 0 then do
# nothing
try:
# temporary fix. To allow Arraypy to be an element of Arraypy
if not isinstance(value, Arraypy):
if self._sparse and value == 0 and index in self._output:
self._output.pop(index)
elif value == 0 and index not in self._output:
exit
else:
self._output[index] = value
else:
self._output[index] = value
except NameError:
print('Something BAD happend!')
def __len__(self):
"""Overload common function len(). Returns number of elements in array.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy
>>> a = Arraypy( (3,3) )
>>> len(a)
9
>>> a.__len__()
9
"""
return self._loop_size
def __str__(self):
"""Returns string, allows to use standart functions print() and str().
Examples
========
>>> from sympy.tensor.arraypy import Arraypy
>>> a = Arraypy ( (2, 2), 'Py' )
>>> print (a)
Py[0, 0] Py[0, 1]
Py[1, 0] Py[1, 1]
"""
out_str = ''
index = list(copy(self._start_index))
# forming output string
for i in range(self._loop_size):
if self._sparse and not (tuple(index) in self._output):
out_str += '0' + ' '
else:
out_str += str(self._output[tuple(index)]) + ' '
# code below are equal to method .next_index with few additions.
j = self._rank - 1
index[j] += 1
if (index[j] == self._end_index[j]) and (j != 0):
# if dimension is changes, then adding '\n'
out_str += '\n'
while (index[j] == self._end_index[j]) and j > 0:
index[j] = self._start_index[j]
j -= 1
index[j] += 1
return out_str
def __copy__(self):
"""Overload commom python function "copy". Makes right copy of Arraypy
instance.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, copy
>>> a = Arraypy((2,2))
>>> b = copy(a)
>>> c = a
"""
# creating new instance of Arraypy. All parameters are coping from
# current array
res = Arraypy(self._dims)
res._name = self._name
res._sparse = self._sparse
res._start_index = copy(self._start_index)
res._end_index = copy(self._end_index)
res._output = copy(self._output)
return res
def __iter__(self):
"""Arraypy iterator."""
self._next_iter_index = self._start_index
self._current_index = self._start_index
self._iterator_index_number = 0
return self
def __next__(self):
"""Next elemenet in Arraypy in iteration process.
Allows to use Arraypy instance in for loop.
"""
if (self._iterator_index_number == self._loop_size):
raise StopIteration
else:
self._iterator_index_number += 1
self._current_index = self._next_iter_index
self._next_iter_index = self.next_index(self._current_index)
return self[self._current_index]
def next_index(self, index):
"""Returns tuple that represents next index of Arraypy instance.
Input argument - current index.
This method allows user to organize loop over whole array.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy
>>> a = Arraypy((2,2,2))
>>> idx = (0, 0, 0)
>>> for i in range(0, len(a)):
... print (idx)
... a[idx] = i*10
... idx = a.next_index(idx)
...
(0, 0, 0)
(0, 0, 1)
(0, 1, 0)
(0, 1, 1)
(1, 0, 0)
(1, 0, 1)
(1, 1, 0)
(1, 1, 1)
========
If input index will be last possible index, then result will be the
first index(equal to ._start_index)
>>> idx = (1,1,1)
>>> idx = a.next_index(idx)
>>> print (idx)
(0, 0, 0)
"""
# check if input index can exist in current indexing
index = list(index)
for i in range(0, self._rank):
if index[i] >= self._end_index[
i] or index[i] < self._start_index[i]:
raise IndexError('Wrong index')
j = self._rank - 1
# increasing index by 1. (0, 0, 0) -> (0, 0, 1)
index[j] += 1
# in index exceeds top border, then index changes this way
# ( self._start_index = (0, 0, 0) и self._end_index = (2, 2, 2) ):
# (0, 0, 1) -> (0, 0, 2) -> (0, 1, 0)
# (0, 1, 0) -> (0, 1, 1)
# (0, 1, 1) -> (0, 1, 2) - > (0, 2, 0) -> (1, 0, 0)
# and so on...
if (index[j] == self._end_index[j]) and (j != 0):
while (index[j] == self._end_index[j]) and j > 0:
index[j] = self._start_index[j]
j -= 1
index[j] += 1
# if index == (2, 0, 0), then index sets to self._start_index -> (0, 0,
# 0)
if index[0] >= self._end_index[0]:
index = copy(self._start_index)
index = tuple(index)
return index
def reshape(self, new_shape):
"""Returns Arraypy instance with new shape. Elements number must be
suitable to new shape. The only argument of method sets new shape.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy
>>> a = Arraypy( '1..2, 1..3', 'Py' )
>>> a.shape
(2, 3)
>>> a.start_index
(1, 1)
>>> print (a)
Py[1, 1] Py[1, 2] Py[1, 3]
Py[2, 1] Py[2, 2] Py[2, 3]
>>> b = a.reshape((3,2))
>>> b.shape
(3, 2)
>>> b.start_index
(0, 0)
>>> print (b)
Py[1, 1] Py[1, 2]
Py[1, 3] Py[2, 1]
Py[2, 2] Py[2, 3]
"""
if (isinstance(new_shape, int)):
new_shape = (new_shape,)
prod = 1
for i in new_shape:
prod *= i
# if product of shape elements equals to number of elements in array
# then
if (prod == self.__len__()):
new_base = Arraypy(new_shape)
idx1 = self._start_index
idx2 = new_base._start_index
for i in range(self.__len__()):
new_base[idx2] = self[idx1]
idx2 = new_base.next_index(idx2)
idx1 = self.next_index(idx1)
else:
raise ValueError(
'Number of elements of New shaped array must be equal to number of elements in Old shape')
return new_base
@property
def shape(self):
"""Returs array shape (dimension).
Examples
========
>>> from sympy.tensor.arraypy import Arraypy
>>> a = Arraypy((3,3))
>>> a.shape
(3, 3)
"""
return self._dims
@property
def start_index(self):
"""Returns the first index.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy
>>> a = Arraypy ( [(0, 2), (1, 3)] )
>>> a.start_index
(0, 1)
"""
return self._start_index
@property
def end_index(self):
"""Returns the last possible index.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy
>>> a = Arraypy ( [(0, 2), (1, 3)] )
>>> a.end_index
(2, 3)
"""
res = tuple([self._end_index[i] - 1 for i in range(self._rank)])
return res
@property
def iter_index(self):
"""Return current index in iteration process.
Use it only in loops over Arraypy/TensorArray.
"""
return self._current_index
@property
def rank(self):
"""Returns rank of arrray.
Examples
========
from sympy.tensor.arraypy import Arraypy
a = Arraypy ( (3,4,5,6,3) )
a.rank
5
"""
return self._rank
@property
def index_list(self):
"""Returns list of all possible indicies.
The indices are sorted in ascending: from very first to very last.
Another way to organize loops over Arraypy or TensorArray.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy
>>> a = Arraypy((2,2))
>>> print(a.index_list)
[(0, 0), (0, 1), (1, 0), (1, 1)]
>>> for i in a.index_list:
... a[i] = 5
>>> print(a)
5 5
5 5
"""
result_list = []
idx = self.start_index
for i in range(len(self)):
result_list.append(idx)
idx = self.next_index(idx)
return result_list
def to_matrix(self):
"""
Converts Arraypy to Matrix. Can convert only 2-dim array, else will raise error.
Examples
========
from sympy.tensor.arraypy import Arraypy, list2arraypy
a = list2arraypy( [1 for i in range(9)], (3,3))
b = a.to_matrix()
print(b)
[1, 1, 1]
[1, 1, 1]
[1, 1, 1]
type(b)
<class 'sympy.matrices.matrices.MutableMatrix'>
"""
if self._rank != 2:
raise ValueError('Dimensions must be of size of 2')
# creating matrix of needed shape: self._dims[0] on self._dims[1]
x = MatrixSymbol(0, self._dims[0], self._dims[1])
res_matrix = Matrix(x)
# res_matrix = Matrix(self.shape[0], self.shape[1], [0 for i in range
# self.shape[0]*self.shape[1]])
# filling matrix with Arraypy elements
idx = self._start_index
idx = tuple(idx)
for i in range(len(res_matrix)):
res_matrix[i] = self.__getitem__(idx)
idx = self.next_index(idx)
return res_matrix
def to_tensor(self, ind_char):
"""Convert Arraypy to TensorArray. TensorArray uses Arraypy as base. The only
parametrer is used to set valency of TensorArray. Valency tuple length must
be equal to shape tuple legth.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, list2arraypy
>>> a = list2arraypy(list(range(9)), (3,3))
>>> b = a.to_tensor((-1,1))
>>> type(b)
<class 'sympy.tensor.arraypy.TensorArray'>
"""
return TensorArray(self, ind_char)
def to_list(self):
"""
Conveting Arraypy to one-dim list
Examples
========
>>> from sympy.tensor.arraypy import Arraypy
>>> a = Arraypy ( (2,2) )
>>> a = Arraypy ( (2,2), 'Py' )
>>> print (a)
Py[0, 0] Py[0, 1]
Py[1, 0] Py[1, 1]
>>> b = a.to_list()
>>> print (b)
[Py[0, 0], Py[0, 1], Py[1, 0], Py[1, 1]]
"""
res = []
idx = self._start_index
for i in range(self.__len__()):
res.append(self.__getitem__(idx))
idx = self.next_index(idx)
return res
class TensorArray(Arraypy):
"""TensorArray based on Arraypy.
Parameters
==========
self.base - Arraypy base.
self._ind_char - index character
+all Arraypy variables
self._dims - tuple, array dimension. Refers to self.base._dims
self._rank - Length of self._dims.
self._sparse - boolean variable. True means that array is sparse.
self._name - custom _name of the element or default value of the element
self._start_index - first, starting index. Refers to self.base._start_index
self._end_index - last, maximum index. Refers to self.base._end_index
self._loop_size - Counts number of elements in array.
self._output - dictionary. Dictionary key is an element index. Dictionary
value - is array value. Refers to self.base._output
"""
def __init__(self, array, ind_char):
"""
Class TensorArray constructor.
Input:
-array - Arraypy array
-_ind_char - tuple type, index character (valency). For example (-1,1,1)
"""
if isinstance(ind_char, int):
ind_char = (ind_char,)
if isinstance(ind_char, list):
ind_char = tuple(ind_char)
if not isinstance(ind_char, (list, tuple)):
raise TypeError('Wrong type. ind_char must be list or tuple.')
for i in ind_char:
if i != 1 and i != -1:
raise ValueError('Valency (ind_char) must be 1 or -1')
if len(ind_char) != array._rank:
raise ValueError(
'Length of Valency (ind_char) must be equal to length of Dimension of array')
if isinstance(array, TensorArray):
raise TypeError('Wrong type. Fisrt argument must be array')
elif isinstance(array, Arraypy):
# overwriting parameters
self.base = copy(array)
self._output = self.base._output
self._name = self.base._name
self._sparse = self.base._sparse
self._dims = self.base._dims
self._rank = self.base._rank
self._start_index = self.base._start_index
self._end_index = self.base._end_index
self._loop_size = self.base._loop_size
self._ind_char = ind_char
def __add__(self, other):
"""Overloads operator "+". But unlike Arraypy, it works only with
tensors with the same index character.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, TensorArray, list2tensor
>>> a = list2tensor ([3 for i in range(9)], (3,3), (1,-1))
>>> b = list2tensor ([2 for i in range(9)], (3,3), (1,-1))
>>> c = a + b
>>> print (c)
5 5 5
5 5 5
5 5 5
"""
if self._ind_char != other._ind_char:
raise ValueError('Both tensors must be the same ind_char')
res_base = self.base + other.base
res_tensor = TensorArray(res_base, self._ind_char)
return res_tensor
def __sub__(self, other):
"""
Overloads operator "-". But unlike Arraypy, it works only with tensors
with the same index character.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, TensorArray, list2tensor
>>> a = list2tensor ([3 for i in range(9)], (3,3), (1,-1))
>>> b = list2tensor ([2 for i in range(9)], (3,3), (1,-1))
>>> c = a - b
>>> print (c)
1 1 1
1 1 1
1 1 1
"""
if self._ind_char != other._ind_char:
raise ValueError('Both tensors must be the same ind_char')
res_base = self.base - other.base
res_tensor = TensorArray(res_base, self._ind_char)
return res_tensor
def __mul__(self, other):
"""
Overloads *.
n-dimensional arrays can be multiplyed on atom types (int, float, Symbol)
"""
return TensorArray(self.base * other, self.ind_char)
def __truediv__(self, other):
"""
Overloads *.
n-dimensional arrays can be multiplyed on atom types (int, float, Symbol)
"""
return TensorArray(self.base / other, self.ind_char)
__rmul__ = __mul__
__div__ = __truediv__
def __eq__(self, other):
"""
Overloads '=='.
TensorArray instances can be compared to each other.
Instances equal if they have same shape, indexes and data.
Examples
========
>>> from sympy import Arraypy, TensorArray, list2tensor
>>> a = list2tensor ([i for i in range(9)], (3, 3), (1, -1))
>>> b = list2tensor ([i for i in range(9)], (3, 3), (1, 1))
>>> c = list2tensor ([0 for i in range(9)], (3, 3), (1, -1))
>>> d = list2tensor ([i for i in range(9)], 9, -1)
>>> e = list2tensor ([i for i in range(9)], (3, 3), (1, -1))
>>> a == b
False
>>> a == c
False
>>> a == d
False
>>> a == e
True
"""
if not isinstance(other, TensorArray):
raise TypeError('Compared instances must be TensorArray type')
if (not(self._ind_char == other._ind_char)
or not(self.base == other.base)):
return False
return True
def __copy__(self):
"""Overload commom python function "copy". Makes right copy of Arraypy
object.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, TensorArray, copy
>>> a = TensorArray(Arraypy((2,2)), (1,1))
>>> b = copy(a)
>>> c = a
"""
return TensorArray(copy(self.base), copy(self._ind_char))
@property
def type_pq(self):
"""Returns tuple, that represents valency of the TensorArray in (P,Q)
format, where P is upper (contrvarian) index and Q is lower
(covariant).
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, TensorArray
>>> a = Arraypy ((3,3,3,3,3)).to_tensor((1, 1, -1, 1, -1))
>>> a.type_pq
(3, 2)
"""
p = 0
q = 0
for i in self._ind_char:
if i == 1:
p += 1
else:
q += 1
return (p, q)
@property
def ind_char(self):
"""Returns tuple, index caracter.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, TensorArray, list2tensor
>>> a = list2tensor ([3 for i in range(9)], (3,3), (1,-1))
>>> a.ind_char
(1, -1)
"""
return self._ind_char
def contract(self, idx1, idx2):
"""Method returns new TensorArray instance, contract of current tensor.
Result tensor rank will be current rank – 2 and valency will be
(p - 1, q - 1).
Takes 2 parameters: first and second index number.
Index numbers counts from “1”.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, TensorArray, list2tensor, list2arraypy
>>> a = list2tensor(list(range(27)), (3,3,3), (1, -1, 1))
>>> b = a.contract(1,2)
>>> print (b)
36 39 42
>>> b.ind_char
(1,)
>>> b.rank
1
>>> b.shape
(3,)
>>> d = list2arraypy(list(range(9)), (3,3))
>>> d = d.to_tensor((1,-1))
>>> print (d)
0 1 2
3 4 5
6 7 8
>>> e = d.contract(1,2)
>>> print (e)
12
"""
if idx1 > self._rank or idx2 > self._rank or idx1 == idx2:
raise ValueError('Wrong index')
if idx1 < 1 or idx2 < 0:
raise ValueError('Index starts from 1')
idx1 -= 1
idx2 -= 1
if self._ind_char[idx1] == self._ind_char[idx2]:
raise ValueError('Indexes must have diferent valency (ind_char)')
for i in self._dims:
if self._dims[0] != i:
raise TypeError('Can''t do that in dimension like this')
# making idx1 greater then idx2
if idx1 < idx2:
temp = idx1
idx1 = idx2
idx2 = temp
# creating result tensor.
# result tensor valency will be (p-1, q-1)
arg = [(self.start_index[i], self.end_index[i])
for i in range(self.rank) if i != idx1 and i != idx2]
if arg == []:
arg = 1
new_ind_char = [self.ind_char[i]
for i in range(self.rank) if i != idx1 and i != idx2]
if new_ind_char == []:
new_ind_char = 1
result_tensor = TensorArray(Arraypy(arg), new_ind_char)
for i in result_tensor.index_list:
temp_index = list(i)
if (len(result_tensor) == 1):
temp_index = [self.start_index[idx2], self.start_index[idx1]]
else:
temp_index.insert(idx2, self.start_index[idx2])
temp_index.insert(idx1, self.start_index[idx1])
for j in range(0, self.shape[idx1]):
result_tensor[i] += self[tuple(temp_index)]
temp_index[idx1] += 1
temp_index[idx2] += 1
return result_tensor
def reshape(self, new_shape, ind_char):
"""reshape method are overloaded and now requires 2 arguments.
-Shape of new tensor base
-Index character of new tensor.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, TensorArray, list2tensor
>>> a = list2tensor(list(range(6)), (3,2), (1, -1))
>>> print (a)
0 1
2 3
4 5
>>> b = a.reshape(6, 1)
>>> print (b)
0 1 2 3 4 5
>>> b.shape
(6,)
>>> b.ind_char
(1,)
>>> c = a.reshape((2,3), (-1,-1))
>>> print (c)
0 1 2
3 4 5
>>> c.shape
(2, 3)
>>> c.ind_char
(-1, -1)
"""
if isinstance(ind_char, tuple):
if len(ind_char) != len(new_shape):
raise ValueError(
'ind_char tuple length must be equal to new shape length')
for i in ind_char:
if i != 1 and i != -1:
raise ValueError('!!!ind_char elements must be 1 or -1')
# reshaping Arraypy and creating tensor with new base
new_base = self.base.reshape(new_shape)
new_tensor = TensorArray(new_base, ind_char)
return new_tensor
def to_arraypy(self):
"""
Returns Arraypy - base of the current TensorArray object.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, TensorArray, list2tensor
>>> a = list2tensor (list(range(9)), (3, 3), (1, -1))
>>> b = a.to_arraypy()
>>> type(b)
<class 'sympy.tensor.arraypy.Arraypy'>
>>> print (b)
0 1 2
3 4 5
6 7 8
"""
return copy(self.base)
def to_tensor(self, ind_char):
"""Converting TensorArray to TensorArray is not required, so this method is not
implemented."""
raise NotImplementedError()
def matrix2arraypy(matrix):
"""matrix2arraypy converts Matrix instance to Arraypy. Matrix class alredy
has wide list of usfull methods and functions, which is used in tensor
package.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, TensorArray, matrix2arraypy
>>> from sympy import Matrix
>>> a = Matrix(((1,2),(3,4)))
>>> print (a)
Matrix([[1, 2], [3, 4]])
>>> b = matrix2arraypy(a)
>>> type(b)
<class 'sympy.tensor.arraypy.Arraypy'>
>>> print (b)
1 2
3 4
"""
if not isinstance(matrix, Matrix):
raise TypeError('Input attr must be Matrix type')
else:
n = matrix.shape
massiv = Arraypy(n)
idx = massiv._start_index
for i in range(len(matrix)):
massiv[idx] = matrix[i]
idx = massiv.next_index(idx)
return massiv
def matrix2tensor(matrix, ind_char=(-1, -1)):
"""
Convert Matrix to TensorArray.
Function take 2 arguments. First is a Matrix. The second is a tuple that
represents index character. By default it is (-1,-1).
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, TensorArray, matrix2tensor
>>> from sympy import Matrix
>>> a = Matrix(((1,2),(3,4)))
>>> print (a)
Matrix([[1, 2], [3, 4]])
>>> b = matrix2tensor(a, (1,-1))
>>> type(b)
<class 'sympy.tensor.arraypy.TensorArray'>
>>> print (b)
1 2
3 4
"""
if not isinstance(matrix, Matrix):
raise TypeError('Input attr must be Matrix type')
else:
n = matrix.shape
massiv = TensorArray(Arraypy(n), ind_char)
idx = massiv._start_index
for i in range(len(matrix)):
massiv[idx] = matrix[i]
idx = massiv.next_index(idx)
return massiv
def list2arraypy(list_arr, shape=0):
"""Convert list to Arraypy.
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, list2arraypy
>>> a = list2arraypy(list(range(3*3)), (3,3))
>>> print (a)
0 1 2
3 4 5
6 7 8
"""
if not isinstance(list_arr, list):
raise TypeError('First attr must be list type')
# checking shape type
if shape == 0:
shape = len(list_arr)
elif isinstance(shape, (tuple, list)):
mult = 1
for i in shape:
mult *= i
if mult != len(list_arr):
raise ValueError(
'Length of input list must be equal to product of shape elements')
elif isinstance(shape, int):
if shape != len(list_arr):
raise ValueError(
'Length of input list must be equal to product of shape elements')
else:
raise TypeError('Second attr must be tuple, list or int')
# creating new Arraypy and filling it with list elements
result = Arraypy(shape)
idx = result._start_index
for i in range(len(list_arr)):
result[idx] = list_arr[i]
idx = result.next_index(idx)
return result
def list2tensor(list_arr, shape=0, ind_char=0):
"""Convert list to TensorArray. It takes 3 arguments.
-a list, which elements will be elements of the tensor base.
-a tuple, shape of the new tensor (by default it is 0, which will mean that
result tensor will be vector)
-a tuple with index character (by default it feels with -1)
Examples
========
>>> from sympy.tensor.arraypy import Arraypy, TensorArray, list2tensor
>>> a = list2tensor([i*2 for i in range(9)], (3,3), (-1,1))
>>> type(a)
<class 'sympy.tensor.arraypy.TensorArray'>
>>> print (a)
0 2 4
6 8 10
12 14 16
"""
if not isinstance(list_arr, list):
raise TypeError('Fisrt attr must be list type')
# checking shape type
if shape == 0:
shape = len(list_arr)
elif isinstance(shape, (tuple, list)):
mult = 1
for i in shape:
mult *= i
if mult != len(list_arr):
raise ValueError(
'Length of input list must be equal to product of shape elements')
elif isinstance(shape, int):
if shape != len(list_arr):
raise ValueError(
'Length of input list must be equal to product of shape elements')
else:
raise TypeError('Second attr must be tuple, list or int')
if ind_char == 0:
if isinstance(shape, tuple):
ind_char = tuple([-1 for i in range(len(shape))])
elif isinstance(shape, int):
ind_char = -1
# creating new tensor and filling it with list elements
result = TensorArray(Arraypy(shape), ind_char)
idx = result._start_index
for i in range(len(list_arr)):
result[idx] = list_arr[i]
idx = result.next_index(idx)
return result
|
{
"content_hash": "d1e03517683f5f2cb25769cc0293bee3",
"timestamp": "",
"source": "github",
"line_count": 1412,
"max_line_length": 106,
"avg_line_length": 30.43201133144476,
"alnum_prop": 0.5000698161508029,
"repo_name": "AunShiLord/sympy",
"id": "45690902a98da9874c045a197842bfe886dbd52e",
"size": "43002",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sympy/tensor/arraypy.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "13716936"
},
{
"name": "Ruby",
"bytes": "304"
},
{
"name": "Scheme",
"bytes": "125"
},
{
"name": "Shell",
"bytes": "4008"
},
{
"name": "Tcl",
"bytes": "1048"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
}
|
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1JobStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, active=None, completion_time=None, conditions=None, failed=None, start_time=None, succeeded=None):
"""
V1JobStatus - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'active': 'int',
'completion_time': 'datetime',
'conditions': 'list[V1JobCondition]',
'failed': 'int',
'start_time': 'datetime',
'succeeded': 'int'
}
self.attribute_map = {
'active': 'active',
'completion_time': 'completionTime',
'conditions': 'conditions',
'failed': 'failed',
'start_time': 'startTime',
'succeeded': 'succeeded'
}
self._active = active
self._completion_time = completion_time
self._conditions = conditions
self._failed = failed
self._start_time = start_time
self._succeeded = succeeded
@property
def active(self):
"""
Gets the active of this V1JobStatus.
The number of actively running pods.
:return: The active of this V1JobStatus.
:rtype: int
"""
return self._active
@active.setter
def active(self, active):
"""
Sets the active of this V1JobStatus.
The number of actively running pods.
:param active: The active of this V1JobStatus.
:type: int
"""
self._active = active
@property
def completion_time(self):
"""
Gets the completion_time of this V1JobStatus.
Represents time when the job was completed. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC.
:return: The completion_time of this V1JobStatus.
:rtype: datetime
"""
return self._completion_time
@completion_time.setter
def completion_time(self, completion_time):
"""
Sets the completion_time of this V1JobStatus.
Represents time when the job was completed. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC.
:param completion_time: The completion_time of this V1JobStatus.
:type: datetime
"""
self._completion_time = completion_time
@property
def conditions(self):
"""
Gets the conditions of this V1JobStatus.
The latest available observations of an object's current state. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/
:return: The conditions of this V1JobStatus.
:rtype: list[V1JobCondition]
"""
return self._conditions
@conditions.setter
def conditions(self, conditions):
"""
Sets the conditions of this V1JobStatus.
The latest available observations of an object's current state. More info: https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/
:param conditions: The conditions of this V1JobStatus.
:type: list[V1JobCondition]
"""
self._conditions = conditions
@property
def failed(self):
"""
Gets the failed of this V1JobStatus.
The number of pods which reached phase Failed.
:return: The failed of this V1JobStatus.
:rtype: int
"""
return self._failed
@failed.setter
def failed(self, failed):
"""
Sets the failed of this V1JobStatus.
The number of pods which reached phase Failed.
:param failed: The failed of this V1JobStatus.
:type: int
"""
self._failed = failed
@property
def start_time(self):
"""
Gets the start_time of this V1JobStatus.
Represents time when the job was acknowledged by the job controller. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC.
:return: The start_time of this V1JobStatus.
:rtype: datetime
"""
return self._start_time
@start_time.setter
def start_time(self, start_time):
"""
Sets the start_time of this V1JobStatus.
Represents time when the job was acknowledged by the job controller. It is not guaranteed to be set in happens-before order across separate operations. It is represented in RFC3339 form and is in UTC.
:param start_time: The start_time of this V1JobStatus.
:type: datetime
"""
self._start_time = start_time
@property
def succeeded(self):
"""
Gets the succeeded of this V1JobStatus.
The number of pods which reached phase Succeeded.
:return: The succeeded of this V1JobStatus.
:rtype: int
"""
return self._succeeded
@succeeded.setter
def succeeded(self, succeeded):
"""
Sets the succeeded of this V1JobStatus.
The number of pods which reached phase Succeeded.
:param succeeded: The succeeded of this V1JobStatus.
:type: int
"""
self._succeeded = succeeded
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1JobStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
{
"content_hash": "fd627484cc681ba79e32dd6d5ea497c6",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 208,
"avg_line_length": 30.30204081632653,
"alnum_prop": 0.5843211206896551,
"repo_name": "sebgoa/client-python",
"id": "afd0b4b61fcac6cdc5ced5d1a99b8a149bbfca50",
"size": "7441",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/v1_job_status.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5855378"
},
{
"name": "Shell",
"bytes": "16387"
}
],
"symlink_target": ""
}
|
from json import JSONEncoder,JSONDecoder
import json
import logging
logger = logging.getLogger(__name__)
class Chirp():
def __init__(self,_method,_name = "",_uri = "",_port = 0 ,_protocol = "",_config = {},_sender=''):
self.method = _method
self.name = _name
self.uri = _uri
self.port = _port
self.protocol= _protocol
self.config = _config
if _sender == '':
self.sender = _name
else:
self.sender = _sender
class ChirpEncoder(JSONEncoder):
def default(self, o):
return o.__dict__
class ChirpDecoder(JSONDecoder):
def decode(self, s):
dict = json.loads(s)
return Chirp(dict['method'],dict['name'],dict['uri'],dict['port'],dict['protocol'],dict['config'],dict['sender'])
|
{
"content_hash": "07f7b867d87a446f76029b637b6b71b5",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 121,
"avg_line_length": 23.61764705882353,
"alnum_prop": 0.5703611457036114,
"repo_name": "rahulmadhavan/chirp-python",
"id": "c6d9ad3cc91e26e23be0dfa41ad2b3b68a78a204",
"size": "803",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chirp/chirp.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "11082"
}
],
"symlink_target": ""
}
|
from flask import Flask
from flask_restful import reqparse, abort, Api, Resource
app = Flask(__name__)
api = Api(app)
TODOS = {
'todo1': {'task': 'build an API'},
'todo2': {'task': '?????'},
'todo3': {'task': 'profit!'},
}
def abort_if_todo_doesnt_exist(todo_id):
if todo_id not in TODOS:
abort(404, message="Todo {} doesn't exist".format(todo_id))
parser = reqparse.RequestParser()
parser.add_argument('task', type=str)
# Todo
# show a single todo item and lets you delete them
class Todo(Resource):
def get(self, todo_id):
abort_if_todo_doesnt_exist(todo_id)
return TODOS[todo_id]
def delete(self, todo_id):
abort_if_todo_doesnt_exist(todo_id)
del TODOS[todo_id]
return '', 204
def put(self, todo_id):
args = parser.parse_args()
task = {'task': args['task']}
TODOS[todo_id] = task
return task, 201
# TodoList
# shows a list of all todos, and lets you POST to add new tasks
class TodoList(Resource):
def get(self):
return TODOS
def post(self):
args = parser.parse_args()
todo_id = 'todo%d' % (len(TODOS) + 1)
TODOS[todo_id] = {'task': args['task']}
return TODOS[todo_id], 201
##
## Actually setup the Api resource routing here
##
api.add_resource(TodoList, '/todos')
api.add_resource(Todo, '/todos/<string:todo_id>')
if __name__ == '__main__':
app.run(debug=True)
|
{
"content_hash": "1dc333b46bbf67c986ab477cd446c0f3",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 67,
"avg_line_length": 24.440677966101696,
"alnum_prop": 0.6005547850208044,
"repo_name": "mitchfriedman/flask-restful",
"id": "860cd62dba1702ae1af87ec571ac9550da4b7f6d",
"size": "1442",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "examples/todo.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "4723"
},
{
"name": "Python",
"bytes": "187669"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.