repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
drewandersonnz/openshift-tools | openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/openshift_health_checker/test/package_availability_test.py | 26 | 1983 | import pytest
from openshift_checks.package_availability import PackageAvailability
@pytest.mark.parametrize('pkg_mgr,openshift_is_containerized,is_active', [
('yum', False, True),
('yum', True, False),
('dnf', True, False),
('dnf', False, False),
])
def test_is_active(pkg_mgr, openshift_is_containerized, is_active):
task_vars = dict(
ansible_pkg_mgr=pkg_mgr,
openshift_is_containerized=openshift_is_containerized,
)
assert PackageAvailability(None, task_vars).is_active() == is_active
@pytest.mark.parametrize('task_vars,must_have_packages,must_not_have_packages', [
(
dict(openshift_service_type='origin'),
set(),
set(['openshift-master', 'openshift-node']),
),
(
dict(
openshift_service_type='origin',
group_names=['oo_masters_to_config'],
),
set(['origin-master']),
set(['origin-node']),
),
(
dict(
openshift_service_type='atomic-openshift',
group_names=['oo_nodes_to_config'],
),
set(['atomic-openshift-node']),
set(['atomic-openshift-master']),
),
(
dict(
openshift_service_type='atomic-openshift',
group_names=['oo_masters_to_config', 'oo_nodes_to_config'],
),
set(['atomic-openshift-master', 'atomic-openshift-node']),
set(),
),
])
def test_package_availability(task_vars, must_have_packages, must_not_have_packages):
return_value = {}
def execute_module(module_name=None, module_args=None, *_):
assert module_name == 'check_yum_update'
assert 'packages' in module_args
assert set(module_args['packages']).issuperset(must_have_packages)
assert not set(module_args['packages']).intersection(must_not_have_packages)
return {'foo': return_value}
result = PackageAvailability(execute_module, task_vars).run()
assert result['foo'] is return_value
| apache-2.0 |
nitin-cherian/LifeLongLearning | Python/PythonProgrammingLanguage/Encapsulation/encap_env/lib/python3.5/site-packages/dateutil/tz/win.py | 32 | 11397 | # This code was originally contributed by Jeffrey Harris.
import datetime
import struct
from six.moves import winreg
from six import text_type
try:
import ctypes
from ctypes import wintypes
except ValueError:
# ValueError is raised on non-Windows systems for some horrible reason.
raise ImportError("Running tzwin on non-Windows system")
from ._common import tzrangebase
__all__ = ["tzwin", "tzwinlocal", "tzres"]
ONEWEEK = datetime.timedelta(7)
TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones"
TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones"
TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation"
def _settzkeyname():
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
try:
winreg.OpenKey(handle, TZKEYNAMENT).Close()
TZKEYNAME = TZKEYNAMENT
except WindowsError:
TZKEYNAME = TZKEYNAME9X
handle.Close()
return TZKEYNAME
TZKEYNAME = _settzkeyname()
class tzres(object):
"""
Class for accessing `tzres.dll`, which contains timezone name related
resources.
.. versionadded:: 2.5.0
"""
p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char
def __init__(self, tzres_loc='tzres.dll'):
# Load the user32 DLL so we can load strings from tzres
user32 = ctypes.WinDLL('user32')
# Specify the LoadStringW function
user32.LoadStringW.argtypes = (wintypes.HINSTANCE,
wintypes.UINT,
wintypes.LPWSTR,
ctypes.c_int)
self.LoadStringW = user32.LoadStringW
self._tzres = ctypes.WinDLL(tzres_loc)
self.tzres_loc = tzres_loc
def load_name(self, offset):
"""
Load a timezone name from a DLL offset (integer).
>>> from dateutil.tzwin import tzres
>>> tzr = tzres()
>>> print(tzr.load_name(112))
'Eastern Standard Time'
:param offset:
A positive integer value referring to a string from the tzres dll.
..note:
Offsets found in the registry are generally of the form
`@tzres.dll,-114`. The offset in this case if 114, not -114.
"""
resource = self.p_wchar()
lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR)
nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0)
return resource[:nchar]
def name_from_string(self, tzname_str):
"""
Parse strings as returned from the Windows registry into the time zone
name as defined in the registry.
>>> from dateutil.tzwin import tzres
>>> tzr = tzres()
>>> print(tzr.name_from_string('@tzres.dll,-251'))
'Dateline Daylight Time'
>>> print(tzr.name_from_string('Eastern Standard Time'))
'Eastern Standard Time'
:param tzname_str:
A timezone name string as returned from a Windows registry key.
:return:
Returns the localized timezone string from tzres.dll if the string
is of the form `@tzres.dll,-offset`, else returns the input string.
"""
if not tzname_str.startswith('@'):
return tzname_str
name_splt = tzname_str.split(',-')
try:
offset = int(name_splt[1])
except:
raise ValueError("Malformed timezone string.")
return self.load_name(offset)
class tzwinbase(tzrangebase):
"""tzinfo class based on win32's timezones available in the registry."""
def __init__(self):
raise NotImplementedError('tzwinbase is an abstract base class')
def __eq__(self, other):
# Compare on all relevant dimensions, including name.
if not isinstance(other, tzwinbase):
return NotImplemented
return (self._std_offset == other._std_offset and
self._dst_offset == other._dst_offset and
self._stddayofweek == other._stddayofweek and
self._dstdayofweek == other._dstdayofweek and
self._stdweeknumber == other._stdweeknumber and
self._dstweeknumber == other._dstweeknumber and
self._stdhour == other._stdhour and
self._dsthour == other._dsthour and
self._stdminute == other._stdminute and
self._dstminute == other._dstminute and
self._std_abbr == other._std_abbr and
self._dst_abbr == other._dst_abbr)
@staticmethod
def list():
"""Return a list of all time zones known to the system."""
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
with winreg.OpenKey(handle, TZKEYNAME) as tzkey:
result = [winreg.EnumKey(tzkey, i)
for i in range(winreg.QueryInfoKey(tzkey)[0])]
return result
def display(self):
return self._display
def transitions(self, year):
"""
For a given year, get the DST on and off transition times, expressed
always on the standard time side. For zones with no transitions, this
function returns ``None``.
:param year:
The year whose transitions you would like to query.
:return:
Returns a :class:`tuple` of :class:`datetime.datetime` objects,
``(dston, dstoff)`` for zones with an annual DST transition, or
``None`` for fixed offset zones.
"""
if not self.hasdst:
return None
dston = picknthweekday(year, self._dstmonth, self._dstdayofweek,
self._dsthour, self._dstminute,
self._dstweeknumber)
dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek,
self._stdhour, self._stdminute,
self._stdweeknumber)
# Ambiguous dates default to the STD side
dstoff -= self._dst_base_offset
return dston, dstoff
def _get_hasdst(self):
return self._dstmonth != 0
@property
def _dst_base_offset(self):
return self._dst_base_offset_
class tzwin(tzwinbase):
def __init__(self, name):
self._name = name
# multiple contexts only possible in 2.7 and 3.1, we still support 2.6
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name)
with winreg.OpenKey(handle, tzkeyname) as tzkey:
keydict = valuestodict(tzkey)
self._std_abbr = keydict["Std"]
self._dst_abbr = keydict["Dlt"]
self._display = keydict["Display"]
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
tup = struct.unpack("=3l16h", keydict["TZI"])
stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1
dstoffset = stdoffset-tup[2] # + DaylightBias * -1
self._std_offset = datetime.timedelta(minutes=stdoffset)
self._dst_offset = datetime.timedelta(minutes=dstoffset)
# for the meaning see the win32 TIME_ZONE_INFORMATION structure docs
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx
(self._stdmonth,
self._stddayofweek, # Sunday = 0
self._stdweeknumber, # Last = 5
self._stdhour,
self._stdminute) = tup[4:9]
(self._dstmonth,
self._dstdayofweek, # Sunday = 0
self._dstweeknumber, # Last = 5
self._dsthour,
self._dstminute) = tup[12:17]
self._dst_base_offset_ = self._dst_offset - self._std_offset
self.hasdst = self._get_hasdst()
def __repr__(self):
return "tzwin(%s)" % repr(self._name)
def __reduce__(self):
return (self.__class__, (self._name,))
class tzwinlocal(tzwinbase):
def __init__(self):
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey:
keydict = valuestodict(tzlocalkey)
self._std_abbr = keydict["StandardName"]
self._dst_abbr = keydict["DaylightName"]
try:
tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME,
sn=self._std_abbr)
with winreg.OpenKey(handle, tzkeyname) as tzkey:
_keydict = valuestodict(tzkey)
self._display = _keydict["Display"]
except OSError:
self._display = None
stdoffset = -keydict["Bias"]-keydict["StandardBias"]
dstoffset = stdoffset-keydict["DaylightBias"]
self._std_offset = datetime.timedelta(minutes=stdoffset)
self._dst_offset = datetime.timedelta(minutes=dstoffset)
# For reasons unclear, in this particular key, the day of week has been
# moved to the END of the SYSTEMTIME structure.
tup = struct.unpack("=8h", keydict["StandardStart"])
(self._stdmonth,
self._stdweeknumber, # Last = 5
self._stdhour,
self._stdminute) = tup[1:5]
self._stddayofweek = tup[7]
tup = struct.unpack("=8h", keydict["DaylightStart"])
(self._dstmonth,
self._dstweeknumber, # Last = 5
self._dsthour,
self._dstminute) = tup[1:5]
self._dstdayofweek = tup[7]
self._dst_base_offset_ = self._dst_offset - self._std_offset
self.hasdst = self._get_hasdst()
def __repr__(self):
return "tzwinlocal()"
def __str__(self):
# str will return the standard name, not the daylight name.
return "tzwinlocal(%s)" % repr(self._std_abbr)
def __reduce__(self):
return (self.__class__, ())
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
""" dayofweek == 0 means Sunday, whichweek 5 means last instance """
first = datetime.datetime(year, month, 1, hour, minute)
# This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6),
# Because 7 % 7 = 0
weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1)
wd = weekdayone + ((whichweek - 1) * ONEWEEK)
if (wd.month != month):
wd -= ONEWEEK
return wd
def valuestodict(key):
"""Convert a registry key's values to a dictionary."""
dout = {}
size = winreg.QueryInfoKey(key)[1]
tz_res = None
for i in range(size):
key_name, value, dtype = winreg.EnumValue(key, i)
if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN:
# If it's a DWORD (32-bit integer), it's stored as unsigned - convert
# that to a proper signed integer
if value & (1 << 31):
value = value - (1 << 32)
elif dtype == winreg.REG_SZ:
# If it's a reference to the tzres DLL, load the actual string
if value.startswith('@tzres'):
tz_res = tz_res or tzres()
value = tz_res.name_from_string(value)
value = value.rstrip('\x00') # Remove trailing nulls
dout[key_name] = value
return dout
| mit |
ceejatec/git-repo | subcmds/prune.py | 90 | 1792 | #
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from color import Coloring
from command import PagedCommand
class Prune(PagedCommand):
common = True
helpSummary = "Prune (delete) already merged topics"
helpUsage = """
%prog [<project>...]
"""
def Execute(self, opt, args):
all_branches = []
for project in self.GetProjects(args):
all_branches.extend(project.PruneHeads())
if not all_branches:
return
class Report(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'status')
self.project = self.printer('header', attr='bold')
out = Report(all_branches[0].project.config)
out.project('Pending Branches')
out.nl()
project = None
for branch in all_branches:
if project != branch.project:
project = branch.project
out.nl()
out.project('project %s/' % project.relpath)
out.nl()
commits = branch.commits
date = branch.date
print('%s %-33s (%2d commit%s, %s)' % (
branch.name == project.CurrentBranch and '*' or ' ',
branch.name,
len(commits),
len(commits) != 1 and 's' or ' ',
date))
| apache-2.0 |
StephenKing/summerschool-2015-ryu | ryu/tests/unit/packet/test_dhcp.py | 11 | 10523 | # Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import logging
import struct
import unittest
from nose.tools import eq_
from ryu.lib import addrconv
from ryu.lib.packet import dhcp
LOG = logging.getLogger(__name__)
class Test_dhcp_offer(unittest.TestCase):
op = dhcp.DHCP_BOOT_REPLY
chaddr = 'aa:aa:aa:aa:aa:aa'
htype = 1
hlen = 6
hops = 0
xid = 1
secs = 0
flags = 1
ciaddr = '192.168.10.10'
yiaddr = '192.168.20.20'
siaddr = '192.168.30.30'
giaddr = '192.168.40.40'
sname = 'abc'
boot_file = ''
option_list = [
dhcp.option(dhcp.DHCP_MESSAGE_TYPE_OPT, '\x02', 1),
dhcp.option(dhcp.DHCP_SUBNET_MASK_OPT, '\xff\xff\xff\x00', 4),
dhcp.option(dhcp.DHCP_GATEWAY_ADDR_OPT, '\xc0\xa8\x0a\x09', 4),
dhcp.option(dhcp.DHCP_DNS_SERVER_ADDR_OPT, '\xc0\xa8\x0a\x09', 4),
dhcp.option(dhcp.DHCP_IP_ADDR_LEASE_TIME_OPT, '\x00\x03\xf4\x80', 4),
dhcp.option(dhcp.DHCP_RENEWAL_TIME_OPT, '\x00\x01\xfa\x40', 4),
dhcp.option(dhcp.DHCP_REBINDING_TIME_OPT, '\x00\x03\x75\xf0', 4),
dhcp.option(dhcp.DHCP_SERVER_IDENTIFIER_OPT, '\xc0\xa8\x0a\x09', 4)]
magic_cookie = '99.130.83.99'
options = dhcp.options(option_list=option_list, options_len=50,
magic_cookie=magic_cookie)
dh = dhcp.dhcp(op, chaddr, options, htype=htype, hlen=hlen,
hops=hops, xid=xid, secs=secs, flags=flags,
ciaddr=ciaddr, yiaddr=yiaddr, siaddr=siaddr,
giaddr=giaddr, sname=sname, boot_file=boot_file)
buf = "\x02\x01\x06\x00\x00\x00\x00\x01\x00\x00\x00\x01\xc0\xa8\x0a\x0a"\
+ "\xc0\xa8\x14\x14\xc0\xa8\x1e\x1e\xc0\xa8\x28\x28\xaa\xaa\xaa\xaa"\
+ "\xaa\xaa\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x61\x62\x63\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\
+ "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x63\x82\x53\x63"\
+ "\x35\x01\x02\x01\x04\xff\xff\xff\x00\x03\x04\xc0\xa8\x0a\x09\x06"\
+ "\x04\xc0\xa8\x0a\x09\x33\x04\x00\x03\xf4\x80\x3a\x04\x00\x01\xfa"\
+ "\x40\x3b\x04\x00\x03\x75\xf0\x36\x04\xc0\xa8\x0a\x09\xff"
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
eq_(self.op, self.dh.op)
eq_(self.htype, self.dh.htype)
eq_(self.hlen, self.dh.hlen)
eq_(self.hops, self.dh.hops)
eq_(self.xid, self.dh.xid)
eq_(self.secs, self.dh.secs)
eq_(self.flags, self.dh.flags)
eq_(self.ciaddr, self.dh.ciaddr)
eq_(self.yiaddr, self.dh.yiaddr)
eq_(self.siaddr, self.dh.siaddr)
eq_(self.giaddr, self.dh.giaddr)
eq_(self.chaddr, self.dh.chaddr)
eq_(self.sname, self.dh.sname)
eq_(self.boot_file, self.dh.boot_file)
eq_(str(self.options), str(self.dh.options))
def test_parser(self):
_res = self.dh.parser(str(self.buf))
if type(_res) is tuple:
res = _res[0]
else:
res = _res
eq_(self.op, res.op)
eq_(self.htype, res.htype)
eq_(self.hlen, res.hlen)
eq_(self.hops, res.hops)
eq_(self.xid, res.xid)
eq_(self.secs, res.secs)
eq_(self.flags, res.flags)
eq_(self.ciaddr, res.ciaddr)
eq_(self.yiaddr, res.yiaddr)
eq_(self.siaddr, res.siaddr)
eq_(self.giaddr, res.giaddr)
eq_(self.chaddr, res.chaddr)
# sname is 64 byte length. rest of data is filled by '\x00'.
eq_(self.sname.ljust(64, '\x00'), res.sname)
# boof_file is 128 byte length. rest of data is filled by '\x00'.
eq_(self.boot_file.ljust(128, '\x00'), res.boot_file)
eq_(str(self.options), str(res.options))
def test_parser_corrupted(self):
buf = self.buf[:128 - (14 + 20 + 8)]
_res = self.dh.parser(buf)
def test_serialize(self):
data = bytearray()
prev = None
buf = self.dh.serialize(data, prev)
res = struct.unpack_from(dhcp.dhcp._DHCP_PACK_STR, str(buf))
eq_(self.op, res[0])
eq_(self.htype, res[1])
eq_(self.hlen, res[2])
eq_(self.hops, res[3])
eq_(self.xid, res[4])
eq_(self.secs, res[5])
eq_(self.flags, res[6])
eq_(self.ciaddr, addrconv.ipv4.bin_to_text(res[7]))
eq_(self.yiaddr, addrconv.ipv4.bin_to_text(res[8]))
eq_(self.siaddr, addrconv.ipv4.bin_to_text(res[9]))
eq_(self.giaddr, addrconv.ipv4.bin_to_text(res[10]))
eq_(self.chaddr, addrconv.mac.bin_to_text(res[11][:6]))
# sname is 64 byte length. rest of data is filled by '\x00'.
eq_(self.sname.ljust(64, '\x00'), res[12])
# boof_file is 128 byte length. rest of data is filled by '\x00'.
eq_(self.boot_file.ljust(128, '\x00'), res[13])
options = dhcp.options.parser(
buf[struct.calcsize(dhcp.dhcp._DHCP_PACK_STR):])
eq_(str(self.options), str(options))
def test_to_string(self):
option_values = ['tag', 'length', 'value']
opt_str_list = []
for option in self.option_list:
_opt_str = ','.join(['%s=%s' % (k, repr(getattr(option, k)))
for k, v in inspect.getmembers(option)
if k in option_values])
opt_str = '%s(%s)' % (dhcp.option.__name__, _opt_str)
opt_str_list.append(opt_str)
option_str = '[%s]' % ', '.join(opt_str_list)
opts_vals = {'magic_cookie': repr(self.magic_cookie),
'option_list': option_str,
'options_len': repr(self.options.options_len)}
_options_str = ','.join(['%s=%s' % (k, opts_vals[k])
for k, v in inspect.getmembers(self.options)
if k in opts_vals])
options_str = '%s(%s)' % (dhcp.options.__name__, _options_str)
dhcp_values = {'op': repr(self.op),
'htype': repr(self.htype),
'hlen': repr(self.hlen),
'hops': repr(self.hops),
'xid': repr(self.xid),
'secs': repr(self.secs),
'flags': repr(self.flags),
'ciaddr': repr(self.ciaddr),
'yiaddr': repr(self.yiaddr),
'siaddr': repr(self.siaddr),
'giaddr': repr(self.giaddr),
'chaddr': repr(self.chaddr),
'sname': repr(self.sname),
'boot_file': repr(self.boot_file),
'options': options_str}
_dh_str = ','.join(['%s=%s' % (k, dhcp_values[k])
for k, v in inspect.getmembers(self.dh)
if k in dhcp_values])
dh_str = '%s(%s)' % (dhcp.dhcp.__name__, _dh_str)
eq_(str(self.dh), dh_str)
eq_(repr(self.dh), dh_str)
def test_json(self):
jsondict = self.dh.to_jsondict()
dh = dhcp.dhcp.from_jsondict(jsondict['dhcp'])
eq_(str(self.dh), str(dh))
class Test_dhcp_offer_with_hlen_zero(unittest.TestCase):
op = dhcp.DHCP_BOOT_REPLY
chaddr = 'aa:aa:aa:aa:aa:aa'
htype = 1
hlen = 6
hops = 0
xid = 1
secs = 0
flags = 1
ciaddr = '192.168.10.10'
yiaddr = '192.168.20.20'
siaddr = '192.168.30.30'
giaddr = '192.168.40.40'
sname = 'abc'
boot_file = ''
option_list = [
dhcp.option(dhcp.DHCP_MESSAGE_TYPE_OPT, '\x02', 1),
dhcp.option(dhcp.DHCP_SUBNET_MASK_OPT, '\xff\xff\xff\x00', 4),
dhcp.option(dhcp.DHCP_GATEWAY_ADDR_OPT, '\xc0\xa8\x0a\x09', 4),
dhcp.option(dhcp.DHCP_DNS_SERVER_ADDR_OPT, '\xc0\xa8\x0a\x09', 4),
dhcp.option(dhcp.DHCP_IP_ADDR_LEASE_TIME_OPT, '\x00\x03\xf4\x80', 4),
dhcp.option(dhcp.DHCP_RENEWAL_TIME_OPT, '\x00\x01\xfa\x40', 4),
dhcp.option(dhcp.DHCP_REBINDING_TIME_OPT, '\x00\x03\x75\xf0', 4),
dhcp.option(dhcp.DHCP_SERVER_IDENTIFIER_OPT, '\xc0\xa8\x0a\x09', 4)]
magic_cookie = '99.130.83.99'
options = dhcp.options(option_list=option_list, options_len=50,
magic_cookie=magic_cookie)
dh = dhcp.dhcp(op, chaddr, options, htype=htype, hlen=0,
hops=hops, xid=xid, secs=secs, flags=flags,
ciaddr=ciaddr, yiaddr=yiaddr, siaddr=siaddr,
giaddr=giaddr, sname=sname, boot_file=boot_file)
def test_init(self):
eq_(self.op, self.dh.op)
eq_(self.htype, self.dh.htype)
eq_(self.hlen, self.dh.hlen)
eq_(self.hops, self.dh.hops)
eq_(self.xid, self.dh.xid)
eq_(self.secs, self.dh.secs)
eq_(self.flags, self.dh.flags)
eq_(self.ciaddr, self.dh.ciaddr)
eq_(self.yiaddr, self.dh.yiaddr)
eq_(self.siaddr, self.dh.siaddr)
eq_(self.giaddr, self.dh.giaddr)
eq_(self.chaddr, self.dh.chaddr)
eq_(self.sname, self.dh.sname)
eq_(self.boot_file, self.dh.boot_file)
eq_(str(self.options), str(self.dh.options))
| apache-2.0 |
jeffzheng1/tensorflow | tensorflow/python/ops/data_flow_ops.py | 4 | 58078 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#==============================================================================
"""Data Flow Operations."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import hashlib
import re
import six
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import math_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_data_flow_ops import *
# pylint: enable=wildcard-import
def _as_type_list(dtypes):
"""Convert dtypes to a list of types."""
assert dtypes is not None
if not (isinstance(dtypes, list) or isinstance(dtypes, tuple)):
# We have a single type.
return [dtypes]
else:
# We have a list or tuple of types.
return list(dtypes)
def _as_shape_list(shapes, dtypes, unknown_dim_allowed=False,
unknown_rank_allowed=False):
"""Convert shapes to a list of tuples of int (or None)."""
if unknown_dim_allowed:
if (not isinstance(shapes, collections.Sequence)
or not shapes
or any(shape is None or isinstance(shape, int) for shape in shapes)):
raise ValueError(
"When providing partial shapes, a list of shapes must be provided.")
if shapes is None: return None
if isinstance(shapes, tensor_shape.TensorShape):
shapes = [shapes]
if not isinstance(shapes, (tuple, list)):
raise TypeError(
"shapes must be a TensorShape or a list or tuple of TensorShapes.")
if all(shape is None or isinstance(shape, int) for shape in shapes):
# We have a single shape.
shapes = [shapes]
shapes = [tensor_shape.as_shape(shape) for shape in shapes]
if not unknown_dim_allowed:
if any([not shape.is_fully_defined() for shape in shapes]):
raise ValueError("All shapes must be fully defined: %s" % shapes)
if not unknown_rank_allowed:
if any([shape.dims is None for shape in shapes]):
raise ValueError("All shapes must have a defined rank: %s" % shapes)
return shapes
def _as_name_list(names, dtypes):
if names is None:
return None
if not isinstance(names, (list, tuple)):
names = [names]
if len(names) != len(dtypes):
raise ValueError("List of names must have the same length as the list "
"of dtypes")
return list(names)
def _shape_common(s1, s2):
"""The greatest lower bound (ordered by specificity) TensorShape."""
s1 = tensor_shape.TensorShape(s1)
s2 = tensor_shape.TensorShape(s2)
if s1.ndims is None or s2.ndims is None or s1.ndims != s2.ndims:
return tensor_shape.unknown_shape()
d = [
d1 if d1 is not None and d1 == d2 else None
for (d1, d2) in zip(s1.as_list(), s2.as_list())]
return tensor_shape.TensorShape(d)
# pylint: disable=protected-access
class QueueBase(object):
"""Base class for queue implementations.
A queue is a TensorFlow data structure that stores tensors across
multiple steps, and exposes operations that enqueue and dequeue
tensors.
Each queue element is a tuple of one or more tensors, where each
tuple component has a static dtype, and may have a static shape. The
queue implementations support versions of enqueue and dequeue that
handle single elements, versions that support enqueuing and
dequeuing a batch of elements at once.
See [`tf.FIFOQueue`](#FIFOQueue) and
[`tf.RandomShuffleQueue`](#RandomShuffleQueue) for concrete
implementations of this class, and instructions on how to create
them.
@@enqueue
@@enqueue_many
@@dequeue
@@dequeue_many
@@size
@@close
"""
def __init__(self, dtypes, shapes, names, queue_ref):
"""Constructs a queue object from a queue reference.
The two optional lists, `shapes` and `names`, must be of the same length
as `dtypes` if provided. The values at a given index `i` indicate the
shape and name to use for the corresponding queue component in `dtypes`.
Args:
dtypes: A list of types. The length of dtypes must equal the number
of tensors in each element.
shapes: Constraints on the shapes of tensors in an element:
A list of shape tuples or None. This list is the same length
as dtypes. If the shape of any tensors in the element are constrained,
all must be; shapes can be None if the shapes should not be constrained.
names: Optional list of names. If provided, the `enqueue()` and
`dequeue()` methods will use dictionaries with these names as keys.
Must be None or a list or tuple of the same length as `dtypes`.
queue_ref: The queue reference, i.e. the output of the queue op.
Raises:
ValueError: If one of the arguments is invalid.
"""
self._dtypes = dtypes
if shapes is not None:
if len(shapes) != len(dtypes):
raise ValueError("Queue shapes must have the same length as dtypes")
self._shapes = [tensor_shape.TensorShape(s) for s in shapes]
else:
self._shapes = [tensor_shape.unknown_shape() for _ in self._dtypes]
if names is not None:
if len(names) != len(dtypes):
raise ValueError("Queue names must have the same length as dtypes")
self._names = names
else:
self._names = None
self._queue_ref = queue_ref
self._name = self._queue_ref.op.name.split("/")[-1]
@staticmethod
def from_list(index, queues):
"""Create a queue using the queue reference from `queues[index]`.
Args:
index: An integer scalar tensor that determines the input that gets
selected.
queues: A list of `QueueBase` objects.
Returns:
A `QueueBase` object.
Raises:
TypeError: When `queues` is not a list of `QueueBase` objects,
or when the data types of `queues` are not all the same.
"""
if ((not queues) or
(not isinstance(queues, list)) or
(not all(isinstance(x, QueueBase) for x in queues))):
raise TypeError("A list of queues expected")
dtypes = queues[0].dtypes
if not all([dtypes == q.dtypes for q in queues[1:]]):
raise TypeError("Queues do not have matching component dtypes.")
names = queues[0].names
if not all([names == q.names for q in queues[1:]]):
raise TypeError("Queues do not have matching component names.")
queue_shapes = [q.shapes for q in queues]
reduced_shapes = [
six.moves.reduce(_shape_common, s) for s in zip(*queue_shapes)]
queue_refs = [x.queue_ref for x in queues]
selected_queue = control_flow_ops.ref_select(index, queue_refs)
return QueueBase(dtypes=dtypes, shapes=reduced_shapes, names=names,
queue_ref=selected_queue)
@property
def queue_ref(self):
"""The underlying queue reference."""
return self._queue_ref
@property
def name(self):
"""The name of the underlying queue."""
return self._queue_ref.op.name
@property
def dtypes(self):
"""The list of dtypes for each component of a queue element."""
return self._dtypes
@property
def shapes(self):
"""The list of shapes for each component of a queue element."""
return self._shapes
@property
def names(self):
"""The list of names for each component of a queue element."""
return self._names
def _check_enqueue_dtypes(self, vals):
"""Validate and convert `vals` to a list of `Tensor`s.
The `vals` argument can be a Tensor, a list or tuple of tensors, or a
dictionary with tensor values.
If it is a dictionary, the queue must have been constructed with a
`names` attribute and the dictionary keys must math the queue names.
If the queue was constructed with a `names` attribute, `vals` must
be a dictionary.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary..
Returns:
A list of `Tensor` objects.
Raises:
ValueError: If `vals` is invalid.
"""
if isinstance(vals, dict):
if not self._names:
raise ValueError("Queue must have names to enqueue a dictionary")
if sorted(self._names) != sorted(vals.keys()):
raise ValueError("Keys in dictionary to enqueue do not match "
"names of Queue. Dictionary: (%s), Queue: (%s)" %
(sorted(vals.keys()), sorted(self._names)))
# The order of values in `self._names` indicates the order in which the
# tensors in the dictionary `vals` must be listed.
vals = [vals[k] for k in self._names]
else:
if self._names:
raise ValueError("You must enqueue a dictionary in a Queue with names")
if not isinstance(vals, (list, tuple)):
vals = [vals]
tensors = []
for i, (val, dtype) in enumerate(zip(vals, self._dtypes)):
tensors.append(ops.convert_to_tensor(val, dtype=dtype,
name="component_%d" % i))
return tensors
def _scope_vals(self, vals):
"""Return a list of values to pass to `name_scope()`.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary.
Returns:
The values in vals as a list.
"""
if isinstance(vals, (list, tuple)):
return vals
elif isinstance(vals, dict):
return vals.values()
else:
return [vals]
def enqueue(self, vals, name=None):
"""Enqueues one element to this queue.
If the queue is full when this operation executes, it will block
until the element has been enqueued.
At runtime, this operation may raise an error if the queue is
[closed](#QueueBase.close) before or during its execution. If the
queue is closed before this operation runs,
`tf.errors.CancelledError` will be raised. If this operation is
blocked, and either (i) the queue is closed by a close operation
with `cancel_pending_enqueues=True`, or (ii) the session is
[closed](../../api_docs/python/client.md#Session.close),
`tf.errors.CancelledError` will be raised.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary containing
the values to enqueue.
name: A name for the operation (optional).
Returns:
The operation that enqueues a new tuple of tensors to the queue.
"""
with ops.name_scope(name, "%s_enqueue" % self._name,
self._scope_vals(vals)) as scope:
vals = self._check_enqueue_dtypes(vals)
# NOTE(mrry): Not using a shape function because we need access to
# the `QueueBase` object.
for val, shape in zip(vals, self._shapes):
val.get_shape().assert_is_compatible_with(shape)
return gen_data_flow_ops._queue_enqueue(self._queue_ref, vals, name=scope)
def enqueue_many(self, vals, name=None):
"""Enqueues zero or more elements to this queue.
This operation slices each component tensor along the 0th dimension to
make multiple queue elements. All of the tensors in `vals` must have the
same size in the 0th dimension.
If the queue is full when this operation executes, it will block
until all of the elements have been enqueued.
At runtime, this operation may raise an error if the queue is
[closed](#QueueBase.close) before or during its execution. If the
queue is closed before this operation runs,
`tf.errors.CancelledError` will be raised. If this operation is
blocked, and either (i) the queue is closed by a close operation
with `cancel_pending_enqueues=True`, or (ii) the session is
[closed](../../api_docs/python/client.md#Session.close),
`tf.errors.CancelledError` will be raised.
Args:
vals: A tensor, a list or tuple of tensors, or a dictionary
from which the queue elements are taken.
name: A name for the operation (optional).
Returns:
The operation that enqueues a batch of tuples of tensors to the queue.
"""
with ops.name_scope(name, "%s_EnqueueMany" % self._name,
self._scope_vals(vals)) as scope:
vals = self._check_enqueue_dtypes(vals)
# NOTE(mrry): Not using a shape function because we need access to
# the `QueueBase` object.
batch_dim = vals[0].get_shape().with_rank_at_least(1)[0]
for val, shape in zip(vals, self._shapes):
batch_dim = batch_dim.merge_with(
val.get_shape().with_rank_at_least(1)[0])
val.get_shape()[1:].assert_is_compatible_with(shape)
return gen_data_flow_ops._queue_enqueue_many(
self._queue_ref, vals, name=scope)
def _dequeue_return_value(self, tensors):
"""Return the value to return from a dequeue op.
If the queue has names, return a dictionary with the
names as keys. Otherwise return either a single tensor
or a list of tensors depending on the length of `tensors`.
Args:
tensors: List of tensors from the dequeue op.
Returns:
A single tensor, a list of tensors, or a dictionary
of tensors.
"""
if self._names:
# The returned values in `tensors` are in the same order as
# the names in `self._names`.
return {n: tensors[i] for i, n in enumerate(self._names)}
elif len(tensors) == 1:
return tensors[0]
else:
return tensors
def dequeue(self, name=None):
"""Dequeues one element from this queue.
If the queue is empty when this operation executes, it will block
until there is an element to dequeue.
At runtime, this operation may raise an error if the queue is
[closed](#QueueBase.close) before or during its execution. If the
queue is closed, the queue is empty, and there are no pending
enqueue operations that can fulfill this request,
`tf.errors.OutOfRangeError` will be raised. If the session is
[closed](../../api_docs/python/client.md#Session.close),
`tf.errors.CancelledError` will be raised.
Args:
name: A name for the operation (optional).
Returns:
The tuple of tensors that was dequeued.
"""
if name is None:
name = "%s_Dequeue" % self._name
ret = gen_data_flow_ops._queue_dequeue(
self._queue_ref, self._dtypes, name=name)
# NOTE(mrry): Not using a shape function because we need access to
# the `QueueBase` object.
op = ret[0].op
for output, shape in zip(op.values(), self._shapes):
output.set_shape(shape)
return self._dequeue_return_value(ret)
def dequeue_many(self, n, name=None):
"""Dequeues and concatenates `n` elements from this queue.
This operation concatenates queue-element component tensors along
the 0th dimension to make a single component tensor. All of the
components in the dequeued tuple will have size `n` in the 0th dimension.
If the queue is closed and there are less than `n` elements left, then an
`OutOfRange` exception is raised.
At runtime, this operation may raise an error if the queue is
[closed](#QueueBase.close) before or during its execution. If the
queue is closed, the queue contains fewer than `n` elements, and
there are no pending enqueue operations that can fulfill this
request, `tf.errors.OutOfRangeError` will be raised. If the
session is [closed](../../api_docs/python/client.md#Session.close),
`tf.errors.CancelledError` will be raised.
Args:
n: A scalar `Tensor` containing the number of elements to dequeue.
name: A name for the operation (optional).
Returns:
The tuple of concatenated tensors that was dequeued.
"""
if name is None:
name = "%s_DequeueMany" % self._name
ret = gen_data_flow_ops._queue_dequeue_many(
self._queue_ref, n=n, component_types=self._dtypes, name=name)
# NOTE(mrry): Not using a shape function because we need access to
# the Queue object.
op = ret[0].op
batch_dim = tensor_shape.Dimension(tensor_util.constant_value(op.inputs[1]))
for output, shape in zip(op.values(), self._shapes):
output.set_shape(tensor_shape.TensorShape([batch_dim]).concatenate(shape))
return self._dequeue_return_value(ret)
def dequeue_up_to(self, n, name=None):
"""Dequeues and concatenates `n` elements from this queue.
**Note** This operation is not supported by all queues. If a queue does not
support DequeueUpTo, then a `tf.errors.UnimplementedError` is raised.
This operation concatenates queue-element component tensors along
the 0th dimension to make a single component tensor. If the queue
has not been closed, all of the components in the dequeued tuple
will have size `n` in the 0th dimension.
If the queue is closed and there are more than `0` but fewer than
`n` elements remaining, then instead of raising a
`tf.errors.OutOfRangeError` like [`dequeue_many`](#QueueBase.dequeue_many),
less than `n` elements are returned immediately. If the queue is
closed and there are `0` elements left in the queue, then a
`tf.errors.OutOfRangeError` is raised just like in `dequeue_many`.
Otherwise the behavior is identical to `dequeue_many`.
Args:
n: A scalar `Tensor` containing the number of elements to dequeue.
name: A name for the operation (optional).
Returns:
The tuple of concatenated tensors that was dequeued.
"""
if name is None:
name = "%s_DequeueUpTo" % self._name
ret = gen_data_flow_ops._queue_dequeue_up_to(
self._queue_ref, n=n, component_types=self._dtypes, name=name)
# NOTE(mrry): Not using a shape function because we need access to
# the Queue object.
op = ret[0].op
for output, shape in zip(op.values(), self._shapes):
output.set_shape(tensor_shape.TensorShape([None]).concatenate(shape))
return self._dequeue_return_value(ret)
def close(self, cancel_pending_enqueues=False, name=None):
"""Closes this queue.
This operation signals that no more elements will be enqueued in
the given queue. Subsequent `enqueue` and `enqueue_many`
operations will fail. Subsequent `dequeue` and `dequeue_many`
operations will continue to succeed if sufficient elements remain
in the queue. Subsequent `dequeue` and `dequeue_many` operations
that would block will fail immediately.
If `cancel_pending_enqueues` is `True`, all pending requests will also
be cancelled.
Args:
cancel_pending_enqueues: (Optional.) A boolean, defaulting to
`False` (described above).
name: A name for the operation (optional).
Returns:
The operation that closes the queue.
"""
if name is None:
name = "%s_Close" % self._name
return gen_data_flow_ops._queue_close(
self._queue_ref, cancel_pending_enqueues=cancel_pending_enqueues,
name=name)
def size(self, name=None):
"""Compute the number of elements in this queue.
Args:
name: A name for the operation (optional).
Returns:
A scalar tensor containing the number of elements in this queue.
"""
if name is None:
name = "%s_Size" % self._name
return gen_data_flow_ops._queue_size(self._queue_ref, name=name)
class RandomShuffleQueue(QueueBase):
"""A queue implementation that dequeues elements in a random order.
See [`tf.QueueBase`](#QueueBase) for a description of the methods on
this class.
@@__init__
"""
def __init__(self, capacity, min_after_dequeue, dtypes, shapes=None,
names=None, seed=None, shared_name=None,
name="random_shuffle_queue"):
"""Create a queue that dequeues elements in a random order.
A `RandomShuffleQueue` has bounded capacity; supports multiple
concurrent producers and consumers; and provides exactly-once
delivery.
A `RandomShuffleQueue` holds a list of up to `capacity`
elements. Each element is a fixed-length tuple of tensors whose
dtypes are described by `dtypes`, and whose shapes are optionally
described by the `shapes` argument.
If the `shapes` argument is specified, each component of a queue
element must have the respective fixed shape. If it is
unspecified, different queue elements may have different shapes,
but the use of `dequeue_many` is disallowed.
The `min_after_dequeue` argument allows the caller to specify a
minimum number of elements that will remain in the queue after a
`dequeue` or `dequeue_many` operation completes, to ensure a
minimum level of mixing of elements. This invariant is maintained
by blocking those operations until sufficient elements have been
enqueued. The `min_after_dequeue` argument is ignored after the
queue has been closed.
Args:
capacity: An integer. The upper bound on the number of elements
that may be stored in this queue.
min_after_dequeue: An integer (described above).
dtypes: A list of `DType` objects. The length of `dtypes` must equal
the number of tensors in each queue element.
shapes: (Optional.) A list of fully-defined `TensorShape` objects
with the same length as `dtypes`, or `None`.
names: (Optional.) A list of string naming the components in the queue
with the same length as `dtypes`, or `None`. If specified the dequeue
methods return a dictionary with the names as keys.
seed: A Python integer. Used to create a random seed. See
[`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed)
for behavior.
shared_name: (Optional.) If non-empty, this queue will be shared under
the given name across multiple sessions.
name: Optional name for the queue operation.
"""
dtypes = _as_type_list(dtypes)
shapes = _as_shape_list(shapes, dtypes)
names = _as_name_list(names, dtypes)
seed1, seed2 = random_seed.get_seed(seed)
if seed1 is None and seed2 is None:
seed1, seed2 = 0, 0
elif seed is None and shared_name is not None:
# This means that graph seed is provided but op seed is not provided.
# If shared_name is also provided, make seed2 depend only on the graph
# seed and shared_name. (seed2 from get_seed() is generally dependent on
# the id of the last op created.)
string = (str(seed1) + shared_name).encode("utf-8")
seed2 = int(hashlib.md5(string).hexdigest()[:8], 16) & 0x7FFFFFFF
queue_ref = gen_data_flow_ops._random_shuffle_queue(
component_types=dtypes, shapes=shapes, capacity=capacity,
min_after_dequeue=min_after_dequeue, seed=seed1, seed2=seed2,
shared_name=shared_name, name=name)
super(RandomShuffleQueue, self).__init__(dtypes, shapes, names, queue_ref)
class FIFOQueue(QueueBase):
"""A queue implementation that dequeues elements in first-in first-out order.
See [`tf.QueueBase`](#QueueBase) for a description of the methods on
this class.
@@__init__
"""
def __init__(self, capacity, dtypes, shapes=None, names=None,
shared_name=None, name="fifo_queue"):
"""Creates a queue that dequeues elements in a first-in first-out order.
A `FIFOQueue` has bounded capacity; supports multiple concurrent
producers and consumers; and provides exactly-once delivery.
A `FIFOQueue` holds a list of up to `capacity` elements. Each
element is a fixed-length tuple of tensors whose dtypes are
described by `dtypes`, and whose shapes are optionally described
by the `shapes` argument.
If the `shapes` argument is specified, each component of a queue
element must have the respective fixed shape. If it is
unspecified, different queue elements may have different shapes,
but the use of `dequeue_many` is disallowed.
Args:
capacity: An integer. The upper bound on the number of elements
that may be stored in this queue.
dtypes: A list of `DType` objects. The length of `dtypes` must equal
the number of tensors in each queue element.
shapes: (Optional.) A list of fully-defined `TensorShape` objects
with the same length as `dtypes`, or `None`.
names: (Optional.) A list of string naming the components in the queue
with the same length as `dtypes`, or `None`. If specified the dequeue
methods return a dictionary with the names as keys.
shared_name: (Optional.) If non-empty, this queue will be shared under
the given name across multiple sessions.
name: Optional name for the queue operation.
"""
dtypes = _as_type_list(dtypes)
shapes = _as_shape_list(shapes, dtypes)
names = _as_name_list(names, dtypes)
queue_ref = gen_data_flow_ops._fifo_queue(
component_types=dtypes, shapes=shapes, capacity=capacity,
shared_name=shared_name, name=name)
super(FIFOQueue, self).__init__(dtypes, shapes, names, queue_ref)
class PaddingFIFOQueue(QueueBase):
"""A FIFOQueue that supports batching variable-sized tensors by padding.
A `PaddingFIFOQueue` may contain components with dynamic shape, while also
supporting `dequeue_many`. See the constructor for more details.
See [`tf.QueueBase`](#QueueBase) for a description of the methods on
this class.
@@__init__
"""
def __init__(self, capacity, dtypes, shapes, names=None, shared_name=None,
name="padding_fifo_queue"):
"""Creates a queue that dequeues elements in a first-in first-out order.
A `PaddingFIFOQueue` has bounded capacity; supports multiple concurrent
producers and consumers; and provides exactly-once delivery.
A `PaddingFIFOQueue` holds a list of up to `capacity` elements. Each
element is a fixed-length tuple of tensors whose dtypes are
described by `dtypes`, and whose shapes are described by the `shapes`
argument.
The `shapes` argument must be specified; each component of a queue
element must have the respective shape. Shapes of fixed
rank but variable size are allowed by setting any shape dimension to None.
In this case, the inputs' shape may vary along the given dimension, and
`dequeue_many` will pad the given dimension with zeros up to the maximum
shape of all elements in the given batch.
Args:
capacity: An integer. The upper bound on the number of elements
that may be stored in this queue.
dtypes: A list of `DType` objects. The length of `dtypes` must equal
the number of tensors in each queue element.
shapes: A list of `TensorShape` objects, with the same length as
`dtypes`. Any dimension in the `TensorShape` containing value
`None` is dynamic and allows values to be enqueued with
variable size in that dimension.
names: (Optional.) A list of string naming the components in the queue
with the same length as `dtypes`, or `None`. If specified the dequeue
methods return a dictionary with the names as keys.
shared_name: (Optional.) If non-empty, this queue will be shared under
the given name across multiple sessions.
name: Optional name for the queue operation.
Raises:
ValueError: If shapes is not a list of shapes, or the lengths of dtypes
and shapes do not match, or if names is specified and the lengths of
dtypes and names do not match.
"""
dtypes = _as_type_list(dtypes)
shapes = _as_shape_list(shapes, dtypes, unknown_dim_allowed=True)
names = _as_name_list(names, dtypes)
if len(dtypes) != len(shapes):
raise ValueError("Shapes must be provided for all components, "
"but received %d dtypes and %d shapes."
% (len(dtypes), len(shapes)))
queue_ref = gen_data_flow_ops._padding_fifo_queue(
component_types=dtypes, shapes=shapes, capacity=capacity,
shared_name=shared_name, name=name)
super(PaddingFIFOQueue, self).__init__(dtypes, shapes, names, queue_ref)
class PriorityQueue(QueueBase):
"""A queue implementation that dequeues elements in prioritized order.
See [`tf.QueueBase`](#QueueBase) for a description of the methods on
this class.
@@__init__
"""
def __init__(self, capacity, types, shapes=None, names=None, shared_name=None,
name="priority_queue"):
"""Creates a queue that dequeues elements in a first-in first-out order.
A `PriorityQueue` has bounded capacity; supports multiple concurrent
producers and consumers; and provides exactly-once delivery.
A `PriorityQueue` holds a list of up to `capacity` elements. Each
element is a fixed-length tuple of tensors whose dtypes are
described by `types`, and whose shapes are optionally described
by the `shapes` argument.
If the `shapes` argument is specified, each component of a queue
element must have the respective fixed shape. If it is
unspecified, different queue elements may have different shapes,
but the use of `dequeue_many` is disallowed.
Enqueues and Dequeues to the `PriorityQueue` must include an additional
tuple entry at the beginning: the `priority`. The priority must be
an int64 scalar (for `enqueue`) or an int64 vector (for `enqueue_many`).
Args:
capacity: An integer. The upper bound on the number of elements
that may be stored in this queue.
types: A list of `DType` objects. The length of `types` must equal
the number of tensors in each queue element, except the first priority
element. The first tensor in each element is the priority,
which must be type int64.
shapes: (Optional.) A list of fully-defined `TensorShape` objects,
with the same length as `types`, or `None`.
names: (Optional.) A list of strings naming the components in the queue
with the same length as `dtypes`, or `None`. If specified, the dequeue
methods return a dictionary with the names as keys.
shared_name: (Optional.) If non-empty, this queue will be shared under
the given name across multiple sessions.
name: Optional name for the queue operation.
"""
types = _as_type_list(types)
shapes = _as_shape_list(shapes, types)
queue_ref = gen_data_flow_ops._priority_queue(
component_types=types, shapes=shapes, capacity=capacity,
shared_name=shared_name, name=name)
priority_dtypes = [_dtypes.int64] + types
priority_shapes = [()] + shapes if shapes else shapes
super(PriorityQueue, self).__init__(
priority_dtypes, priority_shapes, names, queue_ref)
# TODO(josh11b): class BatchQueue(QueueBase):
class Barrier(object):
"""Represents a key-value map that persists across graph executions."""
def __init__(self, types, shapes=None, shared_name=None, name="barrier"):
"""Creates a barrier that persists across different graph executions.
A barrier represents a key-value map, where each key is a string, and
each value is a tuple of tensors.
At runtime, the barrier contains 'complete' and 'incomplete'
elements. A complete element has defined tensors for all
components of its value tuple, and may be accessed using
take_many. An incomplete element has some undefined components in
its value tuple, and may be updated using insert_many.
The barrier call `take_many` outputs values in a particular order.
First, it only outputs completed values. Second, the order in which
completed values are returned matches the order in which their very
first component was inserted into the barrier. So, for example, for this
sequence of insertions and removals:
barrier = Barrier((tf.string, tf.int32), shapes=((), ()))
barrier.insert_many(0, keys=["k1", "k2"], values=["a", "b"]).run()
barrier.insert_many(1, keys=["k1"], values=[1]).run()
barrier.insert_many(0, keys=["k3"], values=["c"]).run()
barrier.insert_many(1, keys=["k3"], values=[3]).run()
barrier.insert_many(1, keys=["k2"], values=[2]).run()
(indices, keys, values) = barrier.take_many(2)
(indices_val, keys_val, values0_val, values1_val) =
session.run([indices, keys, values[0], values[1]])
The output will be (up to permutation of "k1" and "k2"):
indices_val == (-2**63, -2**63)
keys_val == ("k1", "k2")
values0_val == ("a", "b")
values1_val == (1, 2)
Note the key "k2" was inserted into the barrier before "k3". Even though
"k3" was completed first, both are complete by the time
take_many is called. As a result, "k2" is prioritized and "k1" and "k2"
are returned first. "k3" remains in the barrier until the next execution
of `take_many`. Since "k1" and "k2" had their first insertions into
the barrier together, their indices are the same (-2**63). The index
of "k3" will be -2**63 + 1, because it was the next new inserted key.
Args:
types: A single dtype or a tuple of dtypes, corresponding to the
dtypes of the tensor elements that comprise a value in this barrier.
shapes: Optional. Constraints on the shapes of tensors in the values:
a single tensor shape tuple; a tuple of tensor shape tuples
for each barrier-element tuple component; or None if the shape should
not be constrained.
shared_name: Optional. If non-empty, this barrier will be shared under
the given name across multiple sessions.
name: Optional name for the barrier op.
Raises:
ValueError: If one of the `shapes` indicate no elements.
"""
self._types = _as_type_list(types)
if shapes is not None:
shapes = _as_shape_list(shapes, self._types)
self._shapes = [tensor_shape.TensorShape(s) for s in shapes]
for i, shape in enumerate(self._shapes):
if shape.num_elements() == 0:
raise ValueError("Empty tensors are not supported, but received "
"shape '%s' at index %d" % (shape, i))
else:
self._shapes = [tensor_shape.unknown_shape() for _ in self._types]
self._barrier_ref = gen_data_flow_ops._barrier(
component_types=self._types, shapes=self._shapes,
shared_name=shared_name, name=name)
self._name = self._barrier_ref.op.name.split("/")[-1]
@property
def barrier_ref(self):
"""Get the underlying barrier reference."""
return self._barrier_ref
@property
def name(self):
"""The name of the underlying barrier."""
return self._barrier_ref.op.name
def insert_many(self, component_index, keys, values, name=None):
"""For each key, assigns the respective value to the specified component.
This operation updates each element at component_index.
Args:
component_index: The component of the value that is being assigned.
keys: A vector of keys, with length n.
values: An any-dimensional tensor of values, which are associated with the
respective keys. The first dimension must have length n.
name: Optional name for the op.
Returns:
The operation that performs the insertion.
Raises:
InvalidArgumentsError: If inserting keys and values without elements.
"""
if name is None:
name = "%s_BarrierInsertMany" % self._name
return gen_data_flow_ops._barrier_insert_many(
self._barrier_ref, keys, values, component_index, name=name)
def take_many(self,
num_elements,
allow_small_batch=False,
timeout=None,
name=None):
"""Takes the given number of completed elements from this barrier.
This operation concatenates completed-element component tensors along
the 0th dimension to make a single component tensor.
If barrier has no completed elements, this operation will block
until there are 'num_elements' elements to take.
Args:
num_elements: The number of elements to take.
allow_small_batch: If the barrier is closed, don't block if there are less
completed elements than requested, but instead return all available
completed elements.
TODO(b/25743580): the semantics of `allow_small_batch` are experimental
and may be extended to other cases in the future.
TODO(ebrevdo): If a take_many(allow_small_batch=True) is blocking
already when the barrier is closed, it will block for ever. Fix this
by using asynchronous operations.
timeout: This specifies the number of milliseconds to block
before returning with DEADLINE_EXCEEDED. (This option is not
supported yet.)
name: A name for the operation (optional).
Returns:
A tuple of (index, key, value_list).
"index" is a int64 tensor of length num_elements containing the
index of the insert_many call for which the very first component of
the given element was inserted into the Barrier, starting with
the value -2**63. Note, this value is different from the
index of the insert_many call for which the element was completed.
"key" is a string tensor of length num_elements containing the keys.
"value_list" is a tuple of tensors, each one with size num_elements
in the 0th dimension for each component in the barrier's values.
"""
if name is None:
name = "%s_BarrierTakeMany" % self._name
ret = gen_data_flow_ops._barrier_take_many(self._barrier_ref,
num_elements,
self._types,
allow_small_batch,
timeout,
name=name)
# NOTE(mrry): Not using a shape function because we need access to
# the Barrier object.
op = ret[0].op
if allow_small_batch:
batch_dim = None
else:
batch_dim = tensor_shape.Dimension(
tensor_util.constant_value(op.inputs[1]))
op.outputs[0].set_shape(tensor_shape.vector(batch_dim)) # indices
op.outputs[1].set_shape(tensor_shape.vector(batch_dim)) # keys
for output, shape in zip(op.outputs[2:], self._shapes): # value_list
output.set_shape(tensor_shape.TensorShape([batch_dim]).concatenate(shape))
return ret
def close(self, cancel_pending_enqueues=False, name=None):
"""Closes this barrier.
This operation signals that no more new key values will be inserted in the
given barrier. Subsequent InsertMany operations with new keys will fail.
InsertMany operations that just complement already existing keys with other
components, will continue to succeed. Subsequent TakeMany operations will
continue to succeed if sufficient elements remain in the barrier. Subsequent
TakeMany operations that would block will fail immediately.
If `cancel_pending_enqueues` is `True`, all pending requests to the
underlying queue will also be cancelled, and completing of already
started values is also not acceptable anymore.
Args:
cancel_pending_enqueues: (Optional.) A boolean, defaulting to
`False` (described above).
name: Optional name for the op.
Returns:
The operation that closes the barrier.
"""
if name is None:
name = "%s_BarrierClose" % self._name
return gen_data_flow_ops._barrier_close(
self._barrier_ref,
cancel_pending_enqueues=cancel_pending_enqueues,
name=name)
def ready_size(self, name=None):
"""Compute the number of complete elements in the given barrier.
Args:
name: A name for the operation (optional).
Returns:
A single-element tensor containing the number of complete elements in the
given barrier.
"""
if name is None:
name = "%s_BarrierReadySize" % self._name
return gen_data_flow_ops._barrier_ready_size(self._barrier_ref, name=name)
def incomplete_size(self, name=None):
"""Compute the number of incomplete elements in the given barrier.
Args:
name: A name for the operation (optional).
Returns:
A single-element tensor containing the number of incomplete elements in
the given barrier.
"""
if name is None:
name = "%s_BarrierIncompleteSize" % self._name
return gen_data_flow_ops._barrier_incomplete_size(
self._barrier_ref, name=name)
def initialize_all_tables(name="init_all_tables"):
"""Returns an Op that initializes all tables of the default graph.
Args:
name: Optional name for the initialization op.
Returns:
An Op that initializes all tables. Note that if there are
not tables the returned Op is a NoOp.
"""
initializers = ops.get_collection(ops.GraphKeys.TABLE_INITIALIZERS)
if initializers:
return control_flow_ops.group(*initializers, name=name)
return control_flow_ops.no_op(name=name)
ops.NotDifferentiable("LookupTableFind")
ops.NotDifferentiable("LookupTableInsert")
ops.NotDifferentiable("LookupTableSize")
ops.NotDifferentiable("HashTable")
ops.NotDifferentiable("InitializeTable")
ops.NotDifferentiable("InitializeTableFromTextFile")
ops.NotDifferentiable("MutableDenseHashTable")
ops.NotDifferentiable("MutableHashTable")
ops.NotDifferentiable("MutableHashTableOfTensors")
ops.RegisterShape("QueueSize")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("Queue")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("FIFOQueue")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("PaddingFIFOQueue")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("RandomShuffleQueue")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("PriorityQueue")(common_shapes.call_cpp_shape_fn)
# NOTE(mrry): The following ops use higher-level information in the
# Queue class to provide shape information.
ops.RegisterShape("QueueDequeue")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("QueueDequeueMany")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("QueueDequeueUpTo")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("QueueEnqueue")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("QueueEnqueueMany")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("QueueClose")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("Stack")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("StackPush")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("StackPop")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("StackClose")(common_shapes.call_cpp_shape_fn)
# NOTE(mrry): Uses higher-level information in the Barrier class to
# provide shape information.
ops.RegisterShape("BarrierReadySize")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("BarrierIncompleteSize")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("Barrier")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("BarrierTakeMany")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("BarrierClose")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("BarrierInsertMany")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("GetSessionHandle")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("GetSessionTensor")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("DeleteSessionTensor")(common_shapes.call_cpp_shape_fn)
@ops.RegisterShape("DynamicPartition")
def _DynamicPartitionShape(op):
"""Shape function for data_flow_ops.dynamic_partition."""
data_shape = op.inputs[0].get_shape()
partitions_shape = op.inputs[1].get_shape()
# If we don't know the rank of partitions, we don't know anything
mid = partitions_shape.ndims
if mid is None:
result_shape = tensor_shape.unknown_shape()
else:
# data_shape must start with partitions_shape
partitions_shape.assert_is_compatible_with(data_shape[:mid])
# The partition shape is dynamic in the 0th dimension, and matches
# data_shape in the remaining dimensions.
result_shape = tensor_shape.TensorShape([None]).concatenate(
data_shape[mid:])
return [result_shape] * op.get_attr("num_partitions")
@ops.RegisterShape("DynamicStitch")
def _DynamicStitchShape(op):
"""Shape function for data_flow_ops.dynamic_stitch."""
num_partitions = op.get_attr("N")
indices_shapes = [t.get_shape() for t in op.inputs[0:num_partitions]]
data_shapes = [t.get_shape() for t in op.inputs[num_partitions:]]
output_shape = tensor_shape.unknown_shape()
extra_shape = tensor_shape.TensorShape(None)
for indices_shape, data_shape in zip(indices_shapes, data_shapes):
indices_ndims = indices_shape.ndims
if indices_ndims is not None:
# Assert that data_shape starts with indices_shape
indices_shape.merge_with(data_shape[:indices_ndims])
# The rest belongs to output
extra_shape = extra_shape.merge_with(data_shape[indices_ndims:])
return [tensor_shape.TensorShape([None]).concatenate(extra_shape)]
ops.RegisterShape("LookupTableFind")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("LookupTableInsert")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("LookupTableImport")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("LookupTableSize")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("LookupTableExport")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("HashTable")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("MutableDenseHashTable")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("MutableHashTable")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("MutableHashTableOfTensors")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("InitializeTable")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("InitializeTableFromTextFile")(
common_shapes.call_cpp_shape_fn)
class ConditionalAccumulatorBase(object):
"""A conditional accumulator for aggregating gradients.
Up-to-date gradients (i.e., time step at which gradient was computed is
equal to the accumulator's time step) are added to the accumulator.
Extraction of the average gradient is blocked until the required number of
gradients has been accumulated.
"""
def __init__(self, dtype, shape, accumulator_ref):
"""Creates a new ConditionalAccumulator.
Args:
dtype: Datatype of the accumulated gradients.
shape: Shape of the accumulated gradients.
accumulator_ref: A handle to the conditional accumulator, created by sub-
classes
"""
self._dtype = dtype
if shape is not None:
self._shape = tensor_shape.TensorShape(shape)
else:
self._shape = tensor_shape.unknown_shape()
self._accumulator_ref = accumulator_ref
self._name = self._accumulator_ref.op.name.split("/")[-1]
@property
def accumulator_ref(self):
"""The underlying accumulator reference."""
return self._accumulator_ref
@property
def name(self):
"""The name of the underlying accumulator."""
return self._name
@property
def dtype(self):
"""The datatype of the gradients accumulated by this accumulator."""
return self._dtype
def num_accumulated(self, name=None):
"""Number of gradients that have currently been aggregated in accumulator.
Args:
name: Optional name for the operation.
Returns:
Number of accumulated gradients currently in accumulator.
"""
if name is None:
name = "%s_NumAccumulated" % self._name
return gen_data_flow_ops.accumulator_num_accumulated(
self._accumulator_ref, name=name)
def set_global_step(self, new_global_step, name=None):
"""Sets the global time step of the accumulator.
The operation logs a warning if we attempt to set to a time step that is
lower than the accumulator's own time step.
Args:
new_global_step: Value of new time step. Can be a variable or a constant
name: Optional name for the operation.
Returns:
Operation that sets the accumulator's time step.
"""
return gen_data_flow_ops.accumulator_set_global_step(
self._accumulator_ref,
math_ops.to_int64(ops.convert_to_tensor(new_global_step)),
name=name)
class ConditionalAccumulator(ConditionalAccumulatorBase):
"""A conditional accumulator for aggregating gradients.
Up-to-date gradients (i.e., time step at which gradient was computed is
equal to the accumulator's time step) are added to the accumulator.
Extraction of the average gradient is blocked until the required number of
gradients has been accumulated.
"""
def __init__(self,
dtype,
shape=None,
shared_name=None,
name="conditional_accumulator"):
"""Creates a new ConditionalAccumulator.
Args:
dtype: Datatype of the accumulated gradients.
shape: Shape of the accumulated gradients.
shared_name: Optional. If non-empty, this accumulator will be shared under
the given name across multiple sessions.
name: Optional name for the accumulator.
"""
accumulator_ref = gen_data_flow_ops.conditional_accumulator(
dtype=dtype, shape=shape, shared_name=shared_name, name=name)
super(ConditionalAccumulator, self).__init__(dtype, shape, accumulator_ref)
def apply_grad(self, grad, local_step=0, name=None):
"""Attempts to apply a gradient to the accumulator.
The attempt is silently dropped if the gradient is stale, i.e., local_step
is less than the accumulator's global time step.
Args:
grad: The gradient tensor to be applied.
local_step: Time step at which the gradient was computed.
name: Optional name for the operation.
Returns:
The operation that (conditionally) applies a gradient to the accumulator.
Raises:
ValueError: If grad is of the wrong shape
"""
grad = ops.convert_to_tensor(grad, self._dtype)
grad.get_shape().assert_is_compatible_with(self._shape)
local_step = math_ops.to_int64(ops.convert_to_tensor(local_step))
return gen_data_flow_ops.accumulator_apply_gradient(
self._accumulator_ref, local_step=local_step, gradient=grad, name=name)
def take_grad(self, num_required, name=None):
"""Attempts to extract the average gradient from the accumulator.
The operation blocks until sufficient number of gradients have been
successfully applied to the accumulator.
Once successful, the following actions are also triggered:
- Counter of accumulated gradients is reset to 0.
- Aggregated gradient is reset to 0 tensor.
- Accumulator's internal time step is incremented by 1.
Args:
num_required: Number of gradients that needs to have been aggregated
name: Optional name for the operation
Returns:
A tensor holding the value of the average gradient.
Raises:
InvalidArgumentError: If num_required < 1
"""
return gen_data_flow_ops.accumulator_take_gradient(
self._accumulator_ref, num_required, dtype=self._dtype, name=name)
class SparseConditionalAccumulator(ConditionalAccumulatorBase):
"""A conditional accumulator for aggregating sparse gradients.
Sparse gradients are represented by IndexedSlices.
Up-to-date gradients (i.e., time step at which gradient was computed is
equal to the accumulator's time step) are added to the accumulator.
Extraction of the average gradient is blocked until the required number of
gradients has been accumulated.
Args:
dtype: Datatype of the accumulated gradients.
shape: Shape of the accumulated gradients.
shared_name: Optional. If non-empty, this accumulator will be shared under
the given name across multiple sessions.
name: Optional name for the accumulator.
"""
def __init__(self,
dtype,
shape=None,
shared_name=None,
name="sparse_conditional_accumulator"):
accumulator_ref = gen_data_flow_ops.sparse_conditional_accumulator(
dtype=dtype, shape=shape, shared_name=shared_name, name=name)
super(SparseConditionalAccumulator,
self).__init__(dtype, shape, accumulator_ref)
def apply_indexed_slices_grad(self, grad, local_step=0, name=None):
"""Attempts to apply a gradient to the accumulator.
The attempt is silently dropped if the gradient is stale, i.e., local_step
is less than the accumulator's global time step.
Args:
grad: The gradient IndexedSlices to be applied.
local_step: Time step at which the gradient was computed.
name: Optional name for the operation.
Returns:
The operation that (conditionally) applies a gradient to the accumulator.
Raises:
InvalidArgumentError: If grad is of the wrong shape
"""
return self.apply_grad(
grad_indices=grad.indices,
grad_values=grad.values,
grad_shape=grad.dense_shape,
local_step=local_step,
name=name)
def apply_grad(self,
grad_indices,
grad_values,
grad_shape=None,
local_step=0,
name=None):
"""Attempts to apply a sparse gradient to the accumulator.
The attempt is silently dropped if the gradient is stale, i.e., local_step
is less than the accumulator's global time step.
A sparse gradient is represented by its indices, values and possibly empty
or None shape. Indices must be a vector representing the locations of
non-zero entries in the tensor. Values are the non-zero slices of the
gradient, and must have the same first dimension as indices, i.e., the nnz
represented by indices and values must be consistent. Shape, if not empty or
None, must be consistent with the accumulator's shape (if also provided).
Example:
A tensor [[0, 0], [0. 1], [2, 3]] can be represented
indices: [1,2]
values: [[0,1],[2,3]]
shape: [3, 2]
Args:
grad_indices: Indices of the sparse gradient to be applied.
grad_values: Values of the sparse gradient to be applied.
grad_shape: Shape of the sparse gradient to be applied.
local_step: Time step at which the gradient was computed.
name: Optional name for the operation.
Returns:
The operation that (conditionally) applies a gradient to the accumulator.
Raises:
InvalidArgumentError: If grad is of the wrong shape
"""
local_step = math_ops.to_int64(ops.convert_to_tensor(local_step))
return gen_data_flow_ops.sparse_accumulator_apply_gradient(
self._accumulator_ref,
local_step=local_step,
gradient_indices=math_ops.to_int64(grad_indices),
gradient_values=grad_values,
gradient_shape=math_ops.to_int64([] if grad_shape is None else
grad_shape),
has_known_shape=(grad_shape is not None),
name=name)
def take_grad(self, num_required, name=None):
"""Attempts to extract the average gradient from the accumulator.
The operation blocks until sufficient number of gradients have been
successfully applied to the accumulator.
Once successful, the following actions are also triggered:
- Counter of accumulated gradients is reset to 0.
- Aggregated gradient is reset to 0 tensor.
- Accumulator's internal time step is incremented by 1.
Args:
num_required: Number of gradients that needs to have been aggregated
name: Optional name for the operation
Returns:
A tuple of indices, values, and shape representing the average gradient.
Raises:
InvalidArgumentError: If num_required < 1
"""
return gen_data_flow_ops.sparse_accumulator_take_gradient(
self._accumulator_ref, num_required, dtype=self._dtype, name=name)
def take_indexed_slices_grad(self, num_required, name=None):
"""Attempts to extract the average gradient from the accumulator.
The operation blocks until sufficient number of gradients have been
successfully applied to the accumulator.
Once successful, the following actions are also triggered:
- Counter of accumulated gradients is reset to 0.
- Aggregated gradient is reset to 0 tensor.
- Accumulator's internal time step is incremented by 1.
Args:
num_required: Number of gradients that needs to have been aggregated
name: Optional name for the operation
Returns:
An IndexedSlices holding the value of the average gradient.
Raises:
InvalidArgumentError: If num_required < 1
"""
return_val = gen_data_flow_ops.sparse_accumulator_take_gradient(
self._accumulator_ref, num_required, dtype=self._dtype, name=name)
return ops.IndexedSlices(
indices=return_val.indices,
values=return_val.values,
dense_shape=return_val.shape)
ops.RegisterShape("AccumulatorNumAccumulated")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("AccumulatorSetGlobalStep")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("ConditionalAccumulator")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("AccumulatorApplyGradient")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("AccumulatorTakeGradient")(common_shapes.call_cpp_shape_fn)
ops.RegisterShape("SparseConditionalAccumulator")(
common_shapes.call_cpp_shape_fn)
ops.RegisterShape("SparseAccumulatorApplyGradient")(
common_shapes.call_cpp_shape_fn)
ops.RegisterShape("SparseAccumulatorTakeGradient")(
common_shapes.call_cpp_shape_fn)
| apache-2.0 |
rcrowder/nupic | src/nupic/frameworks/opf/safe_interpreter.py | 50 | 1453 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Safe Python interpreter for user-submitted code."""
import asteval
class SafeInterpreter(asteval.Interpreter):
blacklisted_nodes = set(('while', 'for', ))
def __init__(self, *args, **kwargs):
"""Initialize interpreter with blacklisted nodes removed from supported
nodes.
"""
self.supported_nodes = tuple(set(self.supported_nodes) -
self.blacklisted_nodes)
asteval.Interpreter.__init__(self, *args, **kwargs)
| agpl-3.0 |
vim-IDE/python-mode | pymode/libs/astroid/node_classes.py | 8 | 29797 | # copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""Module for some node classes. More nodes in scoped_nodes.py
"""
import sys
import six
from logilab.common.decorators import cachedproperty
from astroid.exceptions import NoDefault
from astroid.bases import (NodeNG, Statement, Instance, InferenceContext,
_infer_stmts, YES, BUILTINS)
from astroid.mixins import (BlockRangeMixIn, AssignTypeMixin,
ParentAssignTypeMixin, FromImportMixIn)
PY3K = sys.version_info >= (3, 0)
def unpack_infer(stmt, context=None):
"""recursively generate nodes inferred by the given statement.
If the inferred value is a list or a tuple, recurse on the elements
"""
if isinstance(stmt, (List, Tuple)):
for elt in stmt.elts:
for infered_elt in unpack_infer(elt, context):
yield infered_elt
return
# if infered is a final node, return it and stop
infered = next(stmt.infer(context))
if infered is stmt:
yield infered
return
# else, infer recursivly, except YES object that should be returned as is
for infered in stmt.infer(context):
if infered is YES:
yield infered
else:
for inf_inf in unpack_infer(infered, context):
yield inf_inf
def are_exclusive(stmt1, stmt2, exceptions=None):
"""return true if the two given statements are mutually exclusive
`exceptions` may be a list of exception names. If specified, discard If
branches and check one of the statement is in an exception handler catching
one of the given exceptions.
algorithm :
1) index stmt1's parents
2) climb among stmt2's parents until we find a common parent
3) if the common parent is a If or TryExcept statement, look if nodes are
in exclusive branches
"""
# index stmt1's parents
stmt1_parents = {}
children = {}
node = stmt1.parent
previous = stmt1
while node:
stmt1_parents[node] = 1
children[node] = previous
previous = node
node = node.parent
# climb among stmt2's parents until we find a common parent
node = stmt2.parent
previous = stmt2
while node:
if node in stmt1_parents:
# if the common parent is a If or TryExcept statement, look if
# nodes are in exclusive branches
if isinstance(node, If) and exceptions is None:
if (node.locate_child(previous)[1]
is not node.locate_child(children[node])[1]):
return True
elif isinstance(node, TryExcept):
c2attr, c2node = node.locate_child(previous)
c1attr, c1node = node.locate_child(children[node])
if c1node is not c2node:
if ((c2attr == 'body' and c1attr == 'handlers' and children[node].catch(exceptions)) or
(c2attr == 'handlers' and c1attr == 'body' and previous.catch(exceptions)) or
(c2attr == 'handlers' and c1attr == 'orelse') or
(c2attr == 'orelse' and c1attr == 'handlers')):
return True
elif c2attr == 'handlers' and c1attr == 'handlers':
return previous is not children[node]
return False
previous = node
node = node.parent
return False
class LookupMixIn(object):
"""Mixin looking up a name in the right scope
"""
def lookup(self, name):
"""lookup a variable name
return the scope node and the list of assignments associated to the
given name according to the scope where it has been found (locals,
globals or builtin)
The lookup is starting from self's scope. If self is not a frame itself
and the name is found in the inner frame locals, statements will be
filtered to remove ignorable statements according to self's location
"""
return self.scope().scope_lookup(self, name)
def ilookup(self, name):
"""infered lookup
return an iterator on infered values of the statements returned by
the lookup method
"""
frame, stmts = self.lookup(name)
context = InferenceContext()
return _infer_stmts(stmts, context, frame)
def _filter_stmts(self, stmts, frame, offset):
"""filter statements to remove ignorable statements.
If self is not a frame itself and the name is found in the inner
frame locals, statements will be filtered to remove ignorable
statements according to self's location
"""
# if offset == -1, my actual frame is not the inner frame but its parent
#
# class A(B): pass
#
# we need this to resolve B correctly
if offset == -1:
myframe = self.frame().parent.frame()
else:
myframe = self.frame()
# If the frame of this node is the same as the statement
# of this node, then the node is part of a class or
# a function definition and the frame of this node should be the
# the upper frame, not the frame of the definition.
# For more information why this is important,
# see Pylint issue #295.
# For example, for 'b', the statement is the same
# as the frame / scope:
#
# def test(b=1):
# ...
if self.statement() is myframe and myframe.parent:
myframe = myframe.parent.frame()
if not myframe is frame or self is frame:
return stmts
mystmt = self.statement()
# line filtering if we are in the same frame
#
# take care node may be missing lineno information (this is the case for
# nodes inserted for living objects)
if myframe is frame and mystmt.fromlineno is not None:
assert mystmt.fromlineno is not None, mystmt
mylineno = mystmt.fromlineno + offset
else:
# disabling lineno filtering
mylineno = 0
_stmts = []
_stmt_parents = []
for node in stmts:
stmt = node.statement()
# line filtering is on and we have reached our location, break
if mylineno > 0 and stmt.fromlineno > mylineno:
break
assert hasattr(node, 'ass_type'), (node, node.scope(),
node.scope().locals)
ass_type = node.ass_type()
if node.has_base(self):
break
_stmts, done = ass_type._get_filtered_stmts(self, node, _stmts, mystmt)
if done:
break
optional_assign = ass_type.optional_assign
if optional_assign and ass_type.parent_of(self):
# we are inside a loop, loop var assigment is hidding previous
# assigment
_stmts = [node]
_stmt_parents = [stmt.parent]
continue
# XXX comment various branches below!!!
try:
pindex = _stmt_parents.index(stmt.parent)
except ValueError:
pass
else:
# we got a parent index, this means the currently visited node
# is at the same block level as a previously visited node
if _stmts[pindex].ass_type().parent_of(ass_type):
# both statements are not at the same block level
continue
# if currently visited node is following previously considered
# assignement and both are not exclusive, we can drop the
# previous one. For instance in the following code ::
#
# if a:
# x = 1
# else:
# x = 2
# print x
#
# we can't remove neither x = 1 nor x = 2 when looking for 'x'
# of 'print x'; while in the following ::
#
# x = 1
# x = 2
# print x
#
# we can remove x = 1 when we see x = 2
#
# moreover, on loop assignment types, assignment won't
# necessarily be done if the loop has no iteration, so we don't
# want to clear previous assigments if any (hence the test on
# optional_assign)
if not (optional_assign or are_exclusive(_stmts[pindex], node)):
del _stmt_parents[pindex]
del _stmts[pindex]
if isinstance(node, AssName):
if not optional_assign and stmt.parent is mystmt.parent:
_stmts = []
_stmt_parents = []
elif isinstance(node, DelName):
_stmts = []
_stmt_parents = []
continue
if not are_exclusive(self, node):
_stmts.append(node)
_stmt_parents.append(stmt.parent)
return _stmts
# Name classes
class AssName(LookupMixIn, ParentAssignTypeMixin, NodeNG):
"""class representing an AssName node"""
class DelName(LookupMixIn, ParentAssignTypeMixin, NodeNG):
"""class representing a DelName node"""
class Name(LookupMixIn, NodeNG):
"""class representing a Name node"""
##################### node classes ########################################
class Arguments(NodeNG, AssignTypeMixin):
"""class representing an Arguments node"""
if PY3K:
# Python 3.4+ uses a different approach regarding annotations,
# each argument is a new class, _ast.arg, which exposes an
# 'annotation' attribute. In astroid though, arguments are exposed
# as is in the Arguments node and the only way to expose annotations
# is by using something similar with Python 3.3:
# - we expose 'varargannotation' and 'kwargannotation' of annotations
# of varargs and kwargs.
# - we expose 'annotation', a list with annotations for
# for each normal argument. If an argument doesn't have an
# annotation, its value will be None.
_astroid_fields = ('args', 'defaults', 'kwonlyargs',
'kw_defaults', 'annotations',
'varargannotation', 'kwargannotation')
annotations = None
varargannotation = None
kwargannotation = None
else:
_astroid_fields = ('args', 'defaults', 'kwonlyargs', 'kw_defaults')
args = None
defaults = None
kwonlyargs = None
kw_defaults = None
def __init__(self, vararg=None, kwarg=None):
self.vararg = vararg
self.kwarg = kwarg
def _infer_name(self, frame, name):
if self.parent is frame:
return name
return None
@cachedproperty
def fromlineno(self):
lineno = super(Arguments, self).fromlineno
return max(lineno, self.parent.fromlineno or 0)
def format_args(self):
"""return arguments formatted as string"""
result = []
if self.args:
result.append(_format_args(self.args, self.defaults))
if self.vararg:
result.append('*%s' % self.vararg)
if self.kwarg:
result.append('**%s' % self.kwarg)
if self.kwonlyargs:
if not self.vararg:
result.append('*')
result.append(_format_args(self.kwonlyargs, self.kw_defaults))
return ', '.join(result)
def default_value(self, argname):
"""return the default value for an argument
:raise `NoDefault`: if there is no default value defined
"""
i = _find_arg(argname, self.args)[0]
if i is not None:
idx = i - (len(self.args) - len(self.defaults))
if idx >= 0:
return self.defaults[idx]
i = _find_arg(argname, self.kwonlyargs)[0]
if i is not None and self.kw_defaults[i] is not None:
return self.kw_defaults[i]
raise NoDefault()
def is_argument(self, name):
"""return True if the name is defined in arguments"""
if name == self.vararg:
return True
if name == self.kwarg:
return True
return self.find_argname(name, True)[1] is not None
def find_argname(self, argname, rec=False):
"""return index and Name node with given name"""
if self.args: # self.args may be None in some cases (builtin function)
return _find_arg(argname, self.args, rec)
return None, None
def get_children(self):
"""override get_children to skip over None elements in kw_defaults"""
for child in super(Arguments, self).get_children():
if child is not None:
yield child
def _find_arg(argname, args, rec=False):
for i, arg in enumerate(args):
if isinstance(arg, Tuple):
if rec:
found = _find_arg(argname, arg.elts)
if found[0] is not None:
return found
elif arg.name == argname:
return i, arg
return None, None
def _format_args(args, defaults=None):
values = []
if args is None:
return ''
if defaults is not None:
default_offset = len(args) - len(defaults)
for i, arg in enumerate(args):
if isinstance(arg, Tuple):
values.append('(%s)' % _format_args(arg.elts))
else:
values.append(arg.name)
if defaults is not None and i >= default_offset:
if defaults[i-default_offset] is not None:
values[-1] += '=' + defaults[i-default_offset].as_string()
return ', '.join(values)
class AssAttr(NodeNG, ParentAssignTypeMixin):
"""class representing an AssAttr node"""
_astroid_fields = ('expr',)
expr = None
class Assert(Statement):
"""class representing an Assert node"""
_astroid_fields = ('test', 'fail',)
test = None
fail = None
class Assign(Statement, AssignTypeMixin):
"""class representing an Assign node"""
_astroid_fields = ('targets', 'value',)
targets = None
value = None
class AugAssign(Statement, AssignTypeMixin):
"""class representing an AugAssign node"""
_astroid_fields = ('target', 'value',)
target = None
value = None
class Backquote(NodeNG):
"""class representing a Backquote node"""
_astroid_fields = ('value',)
value = None
class BinOp(NodeNG):
"""class representing a BinOp node"""
_astroid_fields = ('left', 'right',)
left = None
right = None
class BoolOp(NodeNG):
"""class representing a BoolOp node"""
_astroid_fields = ('values',)
values = None
class Break(Statement):
"""class representing a Break node"""
class CallFunc(NodeNG):
"""class representing a CallFunc node"""
_astroid_fields = ('func', 'args', 'starargs', 'kwargs')
func = None
args = None
starargs = None
kwargs = None
def __init__(self):
self.starargs = None
self.kwargs = None
class Compare(NodeNG):
"""class representing a Compare node"""
_astroid_fields = ('left', 'ops',)
left = None
ops = None
def get_children(self):
"""override get_children for tuple fields"""
yield self.left
for _, comparator in self.ops:
yield comparator # we don't want the 'op'
def last_child(self):
"""override last_child"""
# XXX maybe if self.ops:
return self.ops[-1][1]
#return self.left
class Comprehension(NodeNG):
"""class representing a Comprehension node"""
_astroid_fields = ('target', 'iter', 'ifs')
target = None
iter = None
ifs = None
optional_assign = True
def ass_type(self):
return self
def _get_filtered_stmts(self, lookup_node, node, stmts, mystmt):
"""method used in filter_stmts"""
if self is mystmt:
if isinstance(lookup_node, (Const, Name)):
return [lookup_node], True
elif self.statement() is mystmt:
# original node's statement is the assignment, only keeps
# current node (gen exp, list comp)
return [node], True
return stmts, False
class Const(NodeNG, Instance):
"""represent a constant node like num, str, bool, None, bytes"""
def __init__(self, value=None):
self.value = value
def getitem(self, index, context=None):
if isinstance(self.value, six.string_types):
return Const(self.value[index])
raise TypeError('%r (value=%s)' % (self, self.value))
def has_dynamic_getattr(self):
return False
def itered(self):
if isinstance(self.value, six.string_types):
return self.value
raise TypeError()
def pytype(self):
return self._proxied.qname()
class Continue(Statement):
"""class representing a Continue node"""
class Decorators(NodeNG):
"""class representing a Decorators node"""
_astroid_fields = ('nodes',)
nodes = None
def __init__(self, nodes=None):
self.nodes = nodes
def scope(self):
# skip the function node to go directly to the upper level scope
return self.parent.parent.scope()
class DelAttr(NodeNG, ParentAssignTypeMixin):
"""class representing a DelAttr node"""
_astroid_fields = ('expr',)
expr = None
class Delete(Statement, AssignTypeMixin):
"""class representing a Delete node"""
_astroid_fields = ('targets',)
targets = None
class Dict(NodeNG, Instance):
"""class representing a Dict node"""
_astroid_fields = ('items',)
def __init__(self, items=None):
if items is None:
self.items = []
else:
self.items = [(const_factory(k), const_factory(v))
for k, v in items.items()]
def pytype(self):
return '%s.dict' % BUILTINS
def get_children(self):
"""get children of a Dict node"""
# overrides get_children
for key, value in self.items:
yield key
yield value
def last_child(self):
"""override last_child"""
if self.items:
return self.items[-1][1]
return None
def itered(self):
return self.items[::2]
def getitem(self, lookup_key, context=None):
for key, value in self.items:
for inferedkey in key.infer(context):
if inferedkey is YES:
continue
if isinstance(inferedkey, Const) \
and inferedkey.value == lookup_key:
return value
# This should raise KeyError, but all call sites only catch
# IndexError. Let's leave it like that for now.
raise IndexError(lookup_key)
class Discard(Statement):
"""class representing a Discard node"""
_astroid_fields = ('value',)
value = None
class Ellipsis(NodeNG):
"""class representing an Ellipsis node"""
class EmptyNode(NodeNG):
"""class representing an EmptyNode node"""
class ExceptHandler(Statement, AssignTypeMixin):
"""class representing an ExceptHandler node"""
_astroid_fields = ('type', 'name', 'body',)
type = None
name = None
body = None
@cachedproperty
def blockstart_tolineno(self):
if self.name:
return self.name.tolineno
elif self.type:
return self.type.tolineno
else:
return self.lineno
def catch(self, exceptions):
if self.type is None or exceptions is None:
return True
for node in self.type.nodes_of_class(Name):
if node.name in exceptions:
return True
class Exec(Statement):
"""class representing an Exec node"""
_astroid_fields = ('expr', 'globals', 'locals',)
expr = None
globals = None
locals = None
class ExtSlice(NodeNG):
"""class representing an ExtSlice node"""
_astroid_fields = ('dims',)
dims = None
class For(BlockRangeMixIn, AssignTypeMixin, Statement):
"""class representing a For node"""
_astroid_fields = ('target', 'iter', 'body', 'orelse',)
target = None
iter = None
body = None
orelse = None
optional_assign = True
@cachedproperty
def blockstart_tolineno(self):
return self.iter.tolineno
class From(FromImportMixIn, Statement):
"""class representing a From node"""
def __init__(self, fromname, names, level=0):
self.modname = fromname
self.names = names
self.level = level
class Getattr(NodeNG):
"""class representing a Getattr node"""
_astroid_fields = ('expr',)
expr = None
class Global(Statement):
"""class representing a Global node"""
def __init__(self, names):
self.names = names
def _infer_name(self, frame, name):
return name
class If(BlockRangeMixIn, Statement):
"""class representing an If node"""
_astroid_fields = ('test', 'body', 'orelse')
test = None
body = None
orelse = None
@cachedproperty
def blockstart_tolineno(self):
return self.test.tolineno
def block_range(self, lineno):
"""handle block line numbers range for if statements"""
if lineno == self.body[0].fromlineno:
return lineno, lineno
if lineno <= self.body[-1].tolineno:
return lineno, self.body[-1].tolineno
return self._elsed_block_range(lineno, self.orelse,
self.body[0].fromlineno - 1)
class IfExp(NodeNG):
"""class representing an IfExp node"""
_astroid_fields = ('test', 'body', 'orelse')
test = None
body = None
orelse = None
class Import(FromImportMixIn, Statement):
"""class representing an Import node"""
class Index(NodeNG):
"""class representing an Index node"""
_astroid_fields = ('value',)
value = None
class Keyword(NodeNG):
"""class representing a Keyword node"""
_astroid_fields = ('value',)
value = None
class List(NodeNG, Instance, ParentAssignTypeMixin):
"""class representing a List node"""
_astroid_fields = ('elts',)
def __init__(self, elts=None):
if elts is None:
self.elts = []
else:
self.elts = [const_factory(e) for e in elts]
def pytype(self):
return '%s.list' % BUILTINS
def getitem(self, index, context=None):
return self.elts[index]
def itered(self):
return self.elts
class Nonlocal(Statement):
"""class representing a Nonlocal node"""
def __init__(self, names):
self.names = names
def _infer_name(self, frame, name):
return name
class Pass(Statement):
"""class representing a Pass node"""
class Print(Statement):
"""class representing a Print node"""
_astroid_fields = ('dest', 'values',)
dest = None
values = None
class Raise(Statement):
"""class representing a Raise node"""
exc = None
if sys.version_info < (3, 0):
_astroid_fields = ('exc', 'inst', 'tback')
inst = None
tback = None
else:
_astroid_fields = ('exc', 'cause')
exc = None
cause = None
def raises_not_implemented(self):
if not self.exc:
return
for name in self.exc.nodes_of_class(Name):
if name.name == 'NotImplementedError':
return True
class Return(Statement):
"""class representing a Return node"""
_astroid_fields = ('value',)
value = None
class Set(NodeNG, Instance, ParentAssignTypeMixin):
"""class representing a Set node"""
_astroid_fields = ('elts',)
def __init__(self, elts=None):
if elts is None:
self.elts = []
else:
self.elts = [const_factory(e) for e in elts]
def pytype(self):
return '%s.set' % BUILTINS
def itered(self):
return self.elts
class Slice(NodeNG):
"""class representing a Slice node"""
_astroid_fields = ('lower', 'upper', 'step')
lower = None
upper = None
step = None
class Starred(NodeNG, ParentAssignTypeMixin):
"""class representing a Starred node"""
_astroid_fields = ('value',)
value = None
class Subscript(NodeNG):
"""class representing a Subscript node"""
_astroid_fields = ('value', 'slice')
value = None
slice = None
class TryExcept(BlockRangeMixIn, Statement):
"""class representing a TryExcept node"""
_astroid_fields = ('body', 'handlers', 'orelse',)
body = None
handlers = None
orelse = None
def _infer_name(self, frame, name):
return name
def block_range(self, lineno):
"""handle block line numbers range for try/except statements"""
last = None
for exhandler in self.handlers:
if exhandler.type and lineno == exhandler.type.fromlineno:
return lineno, lineno
if exhandler.body[0].fromlineno <= lineno <= exhandler.body[-1].tolineno:
return lineno, exhandler.body[-1].tolineno
if last is None:
last = exhandler.body[0].fromlineno - 1
return self._elsed_block_range(lineno, self.orelse, last)
class TryFinally(BlockRangeMixIn, Statement):
"""class representing a TryFinally node"""
_astroid_fields = ('body', 'finalbody',)
body = None
finalbody = None
def block_range(self, lineno):
"""handle block line numbers range for try/finally statements"""
child = self.body[0]
# py2.5 try: except: finally:
if (isinstance(child, TryExcept) and child.fromlineno == self.fromlineno
and lineno > self.fromlineno and lineno <= child.tolineno):
return child.block_range(lineno)
return self._elsed_block_range(lineno, self.finalbody)
class Tuple(NodeNG, Instance, ParentAssignTypeMixin):
"""class representing a Tuple node"""
_astroid_fields = ('elts',)
def __init__(self, elts=None):
if elts is None:
self.elts = []
else:
self.elts = [const_factory(e) for e in elts]
def pytype(self):
return '%s.tuple' % BUILTINS
def getitem(self, index, context=None):
return self.elts[index]
def itered(self):
return self.elts
class UnaryOp(NodeNG):
"""class representing an UnaryOp node"""
_astroid_fields = ('operand',)
operand = None
class While(BlockRangeMixIn, Statement):
"""class representing a While node"""
_astroid_fields = ('test', 'body', 'orelse',)
test = None
body = None
orelse = None
@cachedproperty
def blockstart_tolineno(self):
return self.test.tolineno
def block_range(self, lineno):
"""handle block line numbers range for for and while statements"""
return self. _elsed_block_range(lineno, self.orelse)
class With(BlockRangeMixIn, AssignTypeMixin, Statement):
"""class representing a With node"""
_astroid_fields = ('items', 'body')
items = None
body = None
@cachedproperty
def blockstart_tolineno(self):
return self.items[-1][0].tolineno
def get_children(self):
for expr, var in self.items:
yield expr
if var:
yield var
for elt in self.body:
yield elt
class Yield(NodeNG):
"""class representing a Yield node"""
_astroid_fields = ('value',)
value = None
class YieldFrom(Yield):
""" Class representing a YieldFrom node. """
# constants ##############################################################
CONST_CLS = {
list: List,
tuple: Tuple,
dict: Dict,
set: Set,
type(None): Const,
}
def _update_const_classes():
"""update constant classes, so the keys of CONST_CLS can be reused"""
klasses = (bool, int, float, complex, str)
if sys.version_info < (3, 0):
klasses += (unicode, long)
if sys.version_info >= (2, 6):
klasses += (bytes,)
for kls in klasses:
CONST_CLS[kls] = Const
_update_const_classes()
def const_factory(value):
"""return an astroid node for a python value"""
# XXX we should probably be stricter here and only consider stuff in
# CONST_CLS or do better treatment: in case where value is not in CONST_CLS,
# we should rather recall the builder on this value than returning an empty
# node (another option being that const_factory shouldn't be called with something
# not in CONST_CLS)
assert not isinstance(value, NodeNG)
try:
return CONST_CLS[value.__class__](value)
except (KeyError, AttributeError):
node = EmptyNode()
node.object = value
return node
| lgpl-3.0 |
andela-ooladayo/django | tests/check_framework/test_security.py | 242 | 17428 | from django.conf import settings
from django.core.checks.security import base, csrf, sessions
from django.test import SimpleTestCase
from django.test.utils import override_settings
class CheckSessionCookieSecureTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.sessions import check_session_cookie_secure
return check_session_cookie_secure
@override_settings(
SESSION_COOKIE_SECURE=False,
INSTALLED_APPS=["django.contrib.sessions"],
MIDDLEWARE_CLASSES=[])
def test_session_cookie_secure_with_installed_app(self):
"""
Warn if SESSION_COOKIE_SECURE is off and "django.contrib.sessions" is
in INSTALLED_APPS.
"""
self.assertEqual(self.func(None), [sessions.W010])
@override_settings(
SESSION_COOKIE_SECURE=False,
INSTALLED_APPS=[],
MIDDLEWARE_CLASSES=["django.contrib.sessions.middleware.SessionMiddleware"])
def test_session_cookie_secure_with_middleware(self):
"""
Warn if SESSION_COOKIE_SECURE is off and
"django.contrib.sessions.middleware.SessionMiddleware" is in
MIDDLEWARE_CLASSES.
"""
self.assertEqual(self.func(None), [sessions.W011])
@override_settings(
SESSION_COOKIE_SECURE=False,
INSTALLED_APPS=["django.contrib.sessions"],
MIDDLEWARE_CLASSES=["django.contrib.sessions.middleware.SessionMiddleware"])
def test_session_cookie_secure_both(self):
"""
If SESSION_COOKIE_SECURE is off and we find both the session app and
the middleware, provide one common warning.
"""
self.assertEqual(self.func(None), [sessions.W012])
@override_settings(
SESSION_COOKIE_SECURE=True,
INSTALLED_APPS=["django.contrib.sessions"],
MIDDLEWARE_CLASSES=["django.contrib.sessions.middleware.SessionMiddleware"])
def test_session_cookie_secure_true(self):
"""
If SESSION_COOKIE_SECURE is on, there's no warning about it.
"""
self.assertEqual(self.func(None), [])
class CheckSessionCookieHttpOnlyTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.sessions import check_session_cookie_httponly
return check_session_cookie_httponly
@override_settings(
SESSION_COOKIE_HTTPONLY=False,
INSTALLED_APPS=["django.contrib.sessions"],
MIDDLEWARE_CLASSES=[])
def test_session_cookie_httponly_with_installed_app(self):
"""
Warn if SESSION_COOKIE_HTTPONLY is off and "django.contrib.sessions"
is in INSTALLED_APPS.
"""
self.assertEqual(self.func(None), [sessions.W013])
@override_settings(
SESSION_COOKIE_HTTPONLY=False,
INSTALLED_APPS=[],
MIDDLEWARE_CLASSES=["django.contrib.sessions.middleware.SessionMiddleware"])
def test_session_cookie_httponly_with_middleware(self):
"""
Warn if SESSION_COOKIE_HTTPONLY is off and
"django.contrib.sessions.middleware.SessionMiddleware" is in
MIDDLEWARE_CLASSES.
"""
self.assertEqual(self.func(None), [sessions.W014])
@override_settings(
SESSION_COOKIE_HTTPONLY=False,
INSTALLED_APPS=["django.contrib.sessions"],
MIDDLEWARE_CLASSES=["django.contrib.sessions.middleware.SessionMiddleware"])
def test_session_cookie_httponly_both(self):
"""
If SESSION_COOKIE_HTTPONLY is off and we find both the session app and
the middleware, provide one common warning.
"""
self.assertEqual(self.func(None), [sessions.W015])
@override_settings(
SESSION_COOKIE_HTTPONLY=True,
INSTALLED_APPS=["django.contrib.sessions"],
MIDDLEWARE_CLASSES=["django.contrib.sessions.middleware.SessionMiddleware"])
def test_session_cookie_httponly_true(self):
"""
If SESSION_COOKIE_HTTPONLY is on, there's no warning about it.
"""
self.assertEqual(self.func(None), [])
class CheckCSRFMiddlewareTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.csrf import check_csrf_middleware
return check_csrf_middleware
@override_settings(MIDDLEWARE_CLASSES=[])
def test_no_csrf_middleware(self):
"""
Warn if CsrfViewMiddleware isn't in MIDDLEWARE_CLASSES.
"""
self.assertEqual(self.func(None), [csrf.W003])
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.csrf.CsrfViewMiddleware"])
def test_with_csrf_middleware(self):
self.assertEqual(self.func(None), [])
class CheckCSRFCookieSecureTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.csrf import check_csrf_cookie_secure
return check_csrf_cookie_secure
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.csrf.CsrfViewMiddleware"],
CSRF_COOKIE_SECURE=False)
def test_with_csrf_cookie_secure_false(self):
"""
Warn if CsrfViewMiddleware is in MIDDLEWARE_CLASSES but
CSRF_COOKIE_SECURE isn't True.
"""
self.assertEqual(self.func(None), [csrf.W016])
@override_settings(MIDDLEWARE_CLASSES=[], CSRF_COOKIE_SECURE=False)
def test_with_csrf_cookie_secure_false_no_middleware(self):
"""
No warning if CsrfViewMiddleware isn't in MIDDLEWARE_CLASSES, even if
CSRF_COOKIE_SECURE is False.
"""
self.assertEqual(self.func(None), [])
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.csrf.CsrfViewMiddleware"],
CSRF_COOKIE_SECURE=True)
def test_with_csrf_cookie_secure_true(self):
self.assertEqual(self.func(None), [])
class CheckCSRFCookieHttpOnlyTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.csrf import check_csrf_cookie_httponly
return check_csrf_cookie_httponly
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.csrf.CsrfViewMiddleware"],
CSRF_COOKIE_HTTPONLY=False)
def test_with_csrf_cookie_httponly_false(self):
"""
Warn if CsrfViewMiddleware is in MIDDLEWARE_CLASSES but
CSRF_COOKIE_HTTPONLY isn't True.
"""
self.assertEqual(self.func(None), [csrf.W017])
@override_settings(MIDDLEWARE_CLASSES=[], CSRF_COOKIE_HTTPONLY=False)
def test_with_csrf_cookie_httponly_false_no_middleware(self):
"""
No warning if CsrfViewMiddleware isn't in MIDDLEWARE_CLASSES, even if
CSRF_COOKIE_HTTPONLY is False.
"""
self.assertEqual(self.func(None), [])
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.csrf.CsrfViewMiddleware"],
CSRF_COOKIE_HTTPONLY=True)
def test_with_csrf_cookie_httponly_true(self):
self.assertEqual(self.func(None), [])
class CheckSecurityMiddlewareTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.base import check_security_middleware
return check_security_middleware
@override_settings(MIDDLEWARE_CLASSES=[])
def test_no_security_middleware(self):
"""
Warn if SecurityMiddleware isn't in MIDDLEWARE_CLASSES.
"""
self.assertEqual(self.func(None), [base.W001])
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"])
def test_with_security_middleware(self):
self.assertEqual(self.func(None), [])
class CheckStrictTransportSecurityTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.base import check_sts
return check_sts
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"],
SECURE_HSTS_SECONDS=0)
def test_no_sts(self):
"""
Warn if SECURE_HSTS_SECONDS isn't > 0.
"""
self.assertEqual(self.func(None), [base.W004])
@override_settings(
MIDDLEWARE_CLASSES=[],
SECURE_HSTS_SECONDS=0)
def test_no_sts_no_middlware(self):
"""
Don't warn if SECURE_HSTS_SECONDS isn't > 0 and SecurityMiddleware isn't
installed.
"""
self.assertEqual(self.func(None), [])
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"],
SECURE_HSTS_SECONDS=3600)
def test_with_sts(self):
self.assertEqual(self.func(None), [])
class CheckStrictTransportSecuritySubdomainsTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.base import check_sts_include_subdomains
return check_sts_include_subdomains
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"],
SECURE_HSTS_INCLUDE_SUBDOMAINS=False,
SECURE_HSTS_SECONDS=3600)
def test_no_sts_subdomains(self):
"""
Warn if SECURE_HSTS_INCLUDE_SUBDOMAINS isn't True.
"""
self.assertEqual(self.func(None), [base.W005])
@override_settings(
MIDDLEWARE_CLASSES=[],
SECURE_HSTS_INCLUDE_SUBDOMAINS=False,
SECURE_HSTS_SECONDS=3600)
def test_no_sts_subdomains_no_middlware(self):
"""
Don't warn if SecurityMiddleware isn't installed.
"""
self.assertEqual(self.func(None), [])
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"],
SECURE_SSL_REDIRECT=False,
SECURE_HSTS_SECONDS=None)
def test_no_sts_subdomains_no_seconds(self):
"""
Don't warn if SECURE_HSTS_SECONDS isn't set.
"""
self.assertEqual(self.func(None), [])
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"],
SECURE_HSTS_INCLUDE_SUBDOMAINS=True,
SECURE_HSTS_SECONDS=3600)
def test_with_sts_subdomains(self):
self.assertEqual(self.func(None), [])
class CheckXFrameOptionsMiddlewareTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.base import check_xframe_options_middleware
return check_xframe_options_middleware
@override_settings(MIDDLEWARE_CLASSES=[])
def test_middleware_not_installed(self):
"""
Warn if XFrameOptionsMiddleware isn't in MIDDLEWARE_CLASSES.
"""
self.assertEqual(self.func(None), [base.W002])
@override_settings(MIDDLEWARE_CLASSES=["django.middleware.clickjacking.XFrameOptionsMiddleware"])
def test_middleware_installed(self):
self.assertEqual(self.func(None), [])
class CheckXFrameOptionsDenyTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.base import check_xframe_deny
return check_xframe_deny
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.clickjacking.XFrameOptionsMiddleware"],
X_FRAME_OPTIONS='SAMEORIGIN',
)
def test_x_frame_options_not_deny(self):
"""
Warn if XFrameOptionsMiddleware is in MIDDLEWARE_CLASSES but
X_FRAME_OPTIONS isn't 'DENY'.
"""
self.assertEqual(self.func(None), [base.W019])
@override_settings(MIDDLEWARE_CLASSES=[], X_FRAME_OPTIONS='SAMEORIGIN')
def test_middleware_not_installed(self):
"""
No error if XFrameOptionsMiddleware isn't in MIDDLEWARE_CLASSES even if
X_FRAME_OPTIONS isn't 'DENY'.
"""
self.assertEqual(self.func(None), [])
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.clickjacking.XFrameOptionsMiddleware"],
X_FRAME_OPTIONS='DENY',
)
def test_xframe_deny(self):
self.assertEqual(self.func(None), [])
class CheckContentTypeNosniffTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.base import check_content_type_nosniff
return check_content_type_nosniff
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"],
SECURE_CONTENT_TYPE_NOSNIFF=False)
def test_no_content_type_nosniff(self):
"""
Warn if SECURE_CONTENT_TYPE_NOSNIFF isn't True.
"""
self.assertEqual(self.func(None), [base.W006])
@override_settings(
MIDDLEWARE_CLASSES=[],
SECURE_CONTENT_TYPE_NOSNIFF=False)
def test_no_content_type_nosniff_no_middleware(self):
"""
Don't warn if SECURE_CONTENT_TYPE_NOSNIFF isn't True and
SecurityMiddleware isn't in MIDDLEWARE_CLASSES.
"""
self.assertEqual(self.func(None), [])
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"],
SECURE_CONTENT_TYPE_NOSNIFF=True)
def test_with_content_type_nosniff(self):
self.assertEqual(self.func(None), [])
class CheckXssFilterTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.base import check_xss_filter
return check_xss_filter
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"],
SECURE_BROWSER_XSS_FILTER=False)
def test_no_xss_filter(self):
"""
Warn if SECURE_BROWSER_XSS_FILTER isn't True.
"""
self.assertEqual(self.func(None), [base.W007])
@override_settings(
MIDDLEWARE_CLASSES=[],
SECURE_BROWSER_XSS_FILTER=False)
def test_no_xss_filter_no_middleware(self):
"""
Don't warn if SECURE_BROWSER_XSS_FILTER isn't True and
SecurityMiddleware isn't in MIDDLEWARE_CLASSES.
"""
self.assertEqual(self.func(None), [])
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"],
SECURE_BROWSER_XSS_FILTER=True)
def test_with_xss_filter(self):
self.assertEqual(self.func(None), [])
class CheckSSLRedirectTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.base import check_ssl_redirect
return check_ssl_redirect
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"],
SECURE_SSL_REDIRECT=False)
def test_no_ssl_redirect(self):
"""
Warn if SECURE_SSL_REDIRECT isn't True.
"""
self.assertEqual(self.func(None), [base.W008])
@override_settings(
MIDDLEWARE_CLASSES=[],
SECURE_SSL_REDIRECT=False)
def test_no_ssl_redirect_no_middlware(self):
"""
Don't warn if SECURE_SSL_REDIRECT is False and SecurityMiddleware isn't
installed.
"""
self.assertEqual(self.func(None), [])
@override_settings(
MIDDLEWARE_CLASSES=["django.middleware.security.SecurityMiddleware"],
SECURE_SSL_REDIRECT=True)
def test_with_ssl_redirect(self):
self.assertEqual(self.func(None), [])
class CheckSecretKeyTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.base import check_secret_key
return check_secret_key
@override_settings(SECRET_KEY=('abcdefghijklmnopqrstuvwx' * 2) + 'ab')
def test_okay_secret_key(self):
self.assertEqual(len(settings.SECRET_KEY), base.SECRET_KEY_MIN_LENGTH)
self.assertGreater(len(set(settings.SECRET_KEY)), base.SECRET_KEY_MIN_UNIQUE_CHARACTERS)
self.assertEqual(self.func(None), [])
@override_settings(SECRET_KEY='')
def test_empty_secret_key(self):
self.assertEqual(self.func(None), [base.W009])
@override_settings(SECRET_KEY=None)
def test_missing_secret_key(self):
del settings.SECRET_KEY
self.assertEqual(self.func(None), [base.W009])
@override_settings(SECRET_KEY=None)
def test_none_secret_key(self):
self.assertEqual(self.func(None), [base.W009])
@override_settings(SECRET_KEY=('abcdefghijklmnopqrstuvwx' * 2) + 'a')
def test_low_length_secret_key(self):
self.assertEqual(len(settings.SECRET_KEY), base.SECRET_KEY_MIN_LENGTH - 1)
self.assertEqual(self.func(None), [base.W009])
@override_settings(SECRET_KEY='abcd' * 20)
def test_low_entropy_secret_key(self):
self.assertGreater(len(settings.SECRET_KEY), base.SECRET_KEY_MIN_LENGTH)
self.assertLess(len(set(settings.SECRET_KEY)), base.SECRET_KEY_MIN_UNIQUE_CHARACTERS)
self.assertEqual(self.func(None), [base.W009])
class CheckDebugTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.base import check_debug
return check_debug
@override_settings(DEBUG=True)
def test_debug_true(self):
"""
Warn if DEBUG is True.
"""
self.assertEqual(self.func(None), [base.W018])
@override_settings(DEBUG=False)
def test_debug_false(self):
self.assertEqual(self.func(None), [])
class CheckAllowedHostsTest(SimpleTestCase):
@property
def func(self):
from django.core.checks.security.base import check_allowed_hosts
return check_allowed_hosts
@override_settings(ALLOWED_HOSTS=[])
def test_allowed_hosts_empty(self):
self.assertEqual(self.func(None), [base.W020])
@override_settings(ALLOWED_HOSTS=['.example.com', ])
def test_allowed_hosts_set(self):
self.assertEqual(self.func(None), [])
| bsd-3-clause |
qateam123/eq | app/validation/date_range_check.py | 1 | 1649 | import logging
from datetime import datetime
from app.validation.abstract_validator import AbstractValidator
from app.validation.validation_result import ValidationResult
logger = logging.getLogger(__name__)
class DateRangeCheck(AbstractValidator):
def validate(self, user_answers):
"""
Validate that the users answer is a valid date range
:param user_answers: The answer the user provided for the response
:return: ValidationResult(): An object containing the result of the validation
"""
result = ValidationResult(False)
logger.debug('Type Checking question date range with data %s', user_answers)
try:
if len(user_answers) == 2:
from_date = datetime.strptime(user_answers[0], "%d/%m/%Y")
to_date = datetime.strptime(user_answers[1], "%d/%m/%Y")
date_diff = to_date - from_date
if date_diff.total_seconds() > 0:
return ValidationResult(True)
elif date_diff.total_seconds() == 0:
result.errors.append(AbstractValidator.INVALID_DATE_RANGE_TO_FROM_SAME)
return result
else:
result.errors.append(AbstractValidator.INVALID_DATE_RANGE_TO_BEFORE_FROM)
return result
except ValueError:
result.errors.append(AbstractValidator.INVALID_DATE)
except TypeError:
result.errors.append(AbstractValidator.INVALID_DATE)
except AttributeError:
result.errors.append(AbstractValidator.INVALID_DATE)
return result
| mit |
CyanogenMod/android_external_chromium_org | build/android/gyp/create_standalone_apk.py | 126 | 1886 | #!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Combines stripped libraries and incomplete APK into single standalone APK.
"""
import optparse
import os
import shutil
import sys
import tempfile
from util import build_utils
from util import md5_check
def CreateStandaloneApk(options):
def DoZip():
with tempfile.NamedTemporaryFile(suffix='.zip') as intermediate_file:
intermediate_path = intermediate_file.name
shutil.copy(options.input_apk_path, intermediate_path)
apk_path_abs = os.path.abspath(intermediate_path)
build_utils.CheckOutput(
['zip', '-r', '-1', apk_path_abs, 'lib'],
cwd=options.libraries_top_dir)
shutil.copy(intermediate_path, options.output_apk_path)
input_paths = [options.input_apk_path, options.libraries_top_dir]
record_path = '%s.standalone.stamp' % options.input_apk_path
md5_check.CallAndRecordIfStale(
DoZip,
record_path=record_path,
input_paths=input_paths)
def main():
parser = optparse.OptionParser()
parser.add_option('--libraries-top-dir',
help='Top directory that contains libraries '
'(i.e. library paths are like '
'libraries_top_dir/lib/android_app_abi/foo.so).')
parser.add_option('--input-apk-path', help='Path to incomplete APK.')
parser.add_option('--output-apk-path', help='Path for standalone APK.')
parser.add_option('--stamp', help='Path to touch on success.')
options, _ = parser.parse_args()
required_options = ['libraries_top_dir', 'input_apk_path', 'output_apk_path']
build_utils.CheckOptions(options, parser, required=required_options)
CreateStandaloneApk(options)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
ConnorGBrewster/servo | tests/wpt/web-platform-tests/xhr/resources/inspect-headers.py | 5 | 1421 | def get_response(raw_headers, filter_value, filter_name):
result = ""
for line in raw_headers.headers:
if line[-2:] != '\r\n':
return "Syntax error: missing CRLF: " + line
line = line[:-2]
if ':' not in line:
return "Syntax error: no colon found: " + line
name, value = line.split(':', 1)
if len(value) > 1 and value[0] == ' ':
value = value[1:]
if filter_value:
if value == filter_value:
result += name + ","
elif name.lower() == filter_name:
result += name + ": " + value + "\n"
return result
def main(request, response):
headers = []
if "cors" in request.GET:
headers.append(("Access-Control-Allow-Origin", "*"))
headers.append(("Access-Control-Allow-Credentials", "true"))
headers.append(("Access-Control-Allow-Methods", "GET, POST, PUT, FOO"))
headers.append(("Access-Control-Allow-Headers", "x-test, x-foo"))
headers.append(("Access-Control-Expose-Headers", "x-request-method, x-request-content-type, x-request-query, x-request-content-length"))
headers.append(("content-type", "text/plain"))
filter_value = request.GET.first("filter_value", "")
filter_name = request.GET.first("filter_name", "").lower()
result = get_response(request.raw_headers, filter_value, filter_name)
return headers, result
| mpl-2.0 |
rob356/SickRage | lib/unidecode/x093.py | 252 | 4666 | data = (
'Lun ', # 0x00
'Kua ', # 0x01
'Ling ', # 0x02
'Bei ', # 0x03
'Lu ', # 0x04
'Li ', # 0x05
'Qiang ', # 0x06
'Pou ', # 0x07
'Juan ', # 0x08
'Min ', # 0x09
'Zui ', # 0x0a
'Peng ', # 0x0b
'An ', # 0x0c
'Pi ', # 0x0d
'Xian ', # 0x0e
'Ya ', # 0x0f
'Zhui ', # 0x10
'Lei ', # 0x11
'A ', # 0x12
'Kong ', # 0x13
'Ta ', # 0x14
'Kun ', # 0x15
'Du ', # 0x16
'Wei ', # 0x17
'Chui ', # 0x18
'Zi ', # 0x19
'Zheng ', # 0x1a
'Ben ', # 0x1b
'Nie ', # 0x1c
'Cong ', # 0x1d
'Qun ', # 0x1e
'Tan ', # 0x1f
'Ding ', # 0x20
'Qi ', # 0x21
'Qian ', # 0x22
'Zhuo ', # 0x23
'Qi ', # 0x24
'Yu ', # 0x25
'Jin ', # 0x26
'Guan ', # 0x27
'Mao ', # 0x28
'Chang ', # 0x29
'Tian ', # 0x2a
'Xi ', # 0x2b
'Lian ', # 0x2c
'Tao ', # 0x2d
'Gu ', # 0x2e
'Cuo ', # 0x2f
'Shu ', # 0x30
'Zhen ', # 0x31
'Lu ', # 0x32
'Meng ', # 0x33
'Lu ', # 0x34
'Hua ', # 0x35
'Biao ', # 0x36
'Ga ', # 0x37
'Lai ', # 0x38
'Ken ', # 0x39
'Kazari ', # 0x3a
'Bu ', # 0x3b
'Nai ', # 0x3c
'Wan ', # 0x3d
'Zan ', # 0x3e
'[?] ', # 0x3f
'De ', # 0x40
'Xian ', # 0x41
'[?] ', # 0x42
'Huo ', # 0x43
'Liang ', # 0x44
'[?] ', # 0x45
'Men ', # 0x46
'Kai ', # 0x47
'Ying ', # 0x48
'Di ', # 0x49
'Lian ', # 0x4a
'Guo ', # 0x4b
'Xian ', # 0x4c
'Du ', # 0x4d
'Tu ', # 0x4e
'Wei ', # 0x4f
'Cong ', # 0x50
'Fu ', # 0x51
'Rou ', # 0x52
'Ji ', # 0x53
'E ', # 0x54
'Rou ', # 0x55
'Chen ', # 0x56
'Ti ', # 0x57
'Zha ', # 0x58
'Hong ', # 0x59
'Yang ', # 0x5a
'Duan ', # 0x5b
'Xia ', # 0x5c
'Yu ', # 0x5d
'Keng ', # 0x5e
'Xing ', # 0x5f
'Huang ', # 0x60
'Wei ', # 0x61
'Fu ', # 0x62
'Zhao ', # 0x63
'Cha ', # 0x64
'Qie ', # 0x65
'She ', # 0x66
'Hong ', # 0x67
'Kui ', # 0x68
'Tian ', # 0x69
'Mou ', # 0x6a
'Qiao ', # 0x6b
'Qiao ', # 0x6c
'Hou ', # 0x6d
'Tou ', # 0x6e
'Cong ', # 0x6f
'Huan ', # 0x70
'Ye ', # 0x71
'Min ', # 0x72
'Jian ', # 0x73
'Duan ', # 0x74
'Jian ', # 0x75
'Song ', # 0x76
'Kui ', # 0x77
'Hu ', # 0x78
'Xuan ', # 0x79
'Duo ', # 0x7a
'Jie ', # 0x7b
'Zhen ', # 0x7c
'Bian ', # 0x7d
'Zhong ', # 0x7e
'Zi ', # 0x7f
'Xiu ', # 0x80
'Ye ', # 0x81
'Mei ', # 0x82
'Pai ', # 0x83
'Ai ', # 0x84
'Jie ', # 0x85
'[?] ', # 0x86
'Mei ', # 0x87
'Chuo ', # 0x88
'Ta ', # 0x89
'Bang ', # 0x8a
'Xia ', # 0x8b
'Lian ', # 0x8c
'Suo ', # 0x8d
'Xi ', # 0x8e
'Liu ', # 0x8f
'Zu ', # 0x90
'Ye ', # 0x91
'Nou ', # 0x92
'Weng ', # 0x93
'Rong ', # 0x94
'Tang ', # 0x95
'Suo ', # 0x96
'Qiang ', # 0x97
'Ge ', # 0x98
'Shuo ', # 0x99
'Chui ', # 0x9a
'Bo ', # 0x9b
'Pan ', # 0x9c
'Sa ', # 0x9d
'Bi ', # 0x9e
'Sang ', # 0x9f
'Gang ', # 0xa0
'Zi ', # 0xa1
'Wu ', # 0xa2
'Ying ', # 0xa3
'Huang ', # 0xa4
'Tiao ', # 0xa5
'Liu ', # 0xa6
'Kai ', # 0xa7
'Sun ', # 0xa8
'Sha ', # 0xa9
'Sou ', # 0xaa
'Wan ', # 0xab
'Hao ', # 0xac
'Zhen ', # 0xad
'Zhen ', # 0xae
'Luo ', # 0xaf
'Yi ', # 0xb0
'Yuan ', # 0xb1
'Tang ', # 0xb2
'Nie ', # 0xb3
'Xi ', # 0xb4
'Jia ', # 0xb5
'Ge ', # 0xb6
'Ma ', # 0xb7
'Juan ', # 0xb8
'Kasugai ', # 0xb9
'Habaki ', # 0xba
'Suo ', # 0xbb
'[?] ', # 0xbc
'[?] ', # 0xbd
'[?] ', # 0xbe
'Na ', # 0xbf
'Lu ', # 0xc0
'Suo ', # 0xc1
'Ou ', # 0xc2
'Zu ', # 0xc3
'Tuan ', # 0xc4
'Xiu ', # 0xc5
'Guan ', # 0xc6
'Xuan ', # 0xc7
'Lian ', # 0xc8
'Shou ', # 0xc9
'Ao ', # 0xca
'Man ', # 0xcb
'Mo ', # 0xcc
'Luo ', # 0xcd
'Bi ', # 0xce
'Wei ', # 0xcf
'Liu ', # 0xd0
'Di ', # 0xd1
'Qiao ', # 0xd2
'Cong ', # 0xd3
'Yi ', # 0xd4
'Lu ', # 0xd5
'Ao ', # 0xd6
'Keng ', # 0xd7
'Qiang ', # 0xd8
'Cui ', # 0xd9
'Qi ', # 0xda
'Chang ', # 0xdb
'Tang ', # 0xdc
'Man ', # 0xdd
'Yong ', # 0xde
'Chan ', # 0xdf
'Feng ', # 0xe0
'Jing ', # 0xe1
'Biao ', # 0xe2
'Shu ', # 0xe3
'Lou ', # 0xe4
'Xiu ', # 0xe5
'Cong ', # 0xe6
'Long ', # 0xe7
'Zan ', # 0xe8
'Jian ', # 0xe9
'Cao ', # 0xea
'Li ', # 0xeb
'Xia ', # 0xec
'Xi ', # 0xed
'Kang ', # 0xee
'[?] ', # 0xef
'Beng ', # 0xf0
'[?] ', # 0xf1
'[?] ', # 0xf2
'Zheng ', # 0xf3
'Lu ', # 0xf4
'Hua ', # 0xf5
'Ji ', # 0xf6
'Pu ', # 0xf7
'Hui ', # 0xf8
'Qiang ', # 0xf9
'Po ', # 0xfa
'Lin ', # 0xfb
'Suo ', # 0xfc
'Xiu ', # 0xfd
'San ', # 0xfe
'Cheng ', # 0xff
)
| gpl-3.0 |
1950195/mojito | docs/dev_guide/intro/conf.py | 6 | 7244 |
#
# Cocktails documentation build configuration file, created by
# sphinx-quickstart on Wed Oct 12 18:07:15 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.doctest', 'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['/home/y/share/htdocs/cocktails/sphinx_rst_ydn/ydn_template/']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
# project = u'Cocktails'
copyright = u'2011, Yahoo! Inc., 2011'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
#version = '0.1.0.178'
# The full version, including alpha/beta/rc tags.
#release = '0.1.0.178'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['.build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'javascript'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'ydntheme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['/home/y/share/htdocs/cocktails/sphinx_rst_ydn/ydn_template/']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'Mojito Intro'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = 'images/molotov-cocktail_logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = 'images/Mojito.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['.static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {
# '**':["other_links.html"]
# }
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MojitoIntro'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Cocktails.tex', u'Cocktails Documentation',
u'Joe Catera', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'Mojito', u'Mojito Intro',
[u'Joe Catera'], 1)
]
| bsd-3-clause |
stonebig/numba | numba/core/controlflow.py | 1 | 30419 | import collections
import functools
import sys
from numba.core import utils
from numba.core.ir import Loc
from numba.core.errors import UnsupportedError
# List of bytecodes creating a new block in the control flow graph
# (in addition to explicit jump labels).
NEW_BLOCKERS = frozenset(['SETUP_LOOP', 'FOR_ITER', 'SETUP_WITH'])
class CFBlock(object):
def __init__(self, offset):
self.offset = offset
self.body = []
# A map of jumps to outgoing blocks (successors):
# { offset of outgoing block -> number of stack pops }
self.outgoing_jumps = {}
# A map of jumps to incoming blocks (predecessors):
# { offset of incoming block -> number of stack pops }
self.incoming_jumps = {}
self.terminating = False
def __repr__(self):
args = (self.offset,
sorted(self.outgoing_jumps),
sorted(self.incoming_jumps))
return "block(offset:%d, outgoing: %s, incoming: %s)" % args
def __iter__(self):
return iter(self.body)
class Loop(collections.namedtuple("Loop",
("entries", "exits", "header", "body"))):
"""
A control flow loop, as detected by a CFGraph object.
"""
__slots__ = ()
# The loop header is enough to detect that two loops are really
# the same, assuming they belong to the same graph.
# (note: in practice, only one loop instance is created per graph
# loop, so identity would be fine)
def __eq__(self, other):
return isinstance(other, Loop) and other.header == self.header
def __hash__(self):
return hash(self.header)
class _DictOfContainers(collections.defaultdict):
"""A defaultdict with customized equality checks that ignore empty values.
Non-empty value is checked by: `bool(value_item) == True`.
"""
def __eq__(self, other):
if isinstance(other, _DictOfContainers):
mine = self._non_empty_items()
theirs = other._non_empty_items()
return mine == theirs
return NotImplemented
def __ne__(self, other):
ret = self.__eq__(other)
if ret is NotImplemented:
return ret
else:
return not ret
def _non_empty_items(self):
return [(k, vs) for k, vs in sorted(self.items()) if vs]
class CFGraph(object):
"""
Generic (almost) implementation of a Control Flow Graph.
"""
def __init__(self):
self._nodes = set()
self._preds = _DictOfContainers(set)
self._succs = _DictOfContainers(set)
self._edge_data = {}
self._entry_point = None
def add_node(self, node):
"""
Add *node* to the graph. This is necessary before adding any
edges from/to the node. *node* can be any hashable object.
"""
self._nodes.add(node)
def add_edge(self, src, dest, data=None):
"""
Add an edge from node *src* to node *dest*, with optional
per-edge *data*.
If such an edge already exists, it is replaced (duplicate edges
are not possible).
"""
if src not in self._nodes:
raise ValueError("Cannot add edge as src node %s not in nodes %s" %
(src, self._nodes))
if dest not in self._nodes:
raise ValueError("Cannot add edge as dest node %s not in nodes %s" %
(dest, self._nodes))
self._add_edge(src, dest, data)
def successors(self, src):
"""
Yield (node, data) pairs representing the successors of node *src*.
(*data* will be None if no data was specified when adding the edge)
"""
for dest in self._succs[src]:
yield dest, self._edge_data[src, dest]
def predecessors(self, dest):
"""
Yield (node, data) pairs representing the predecessors of node *dest*.
(*data* will be None if no data was specified when adding the edge)
"""
for src in self._preds[dest]:
yield src, self._edge_data[src, dest]
def set_entry_point(self, node):
"""
Set the entry point of the graph to *node*.
"""
assert node in self._nodes
self._entry_point = node
def process(self):
"""
Compute essential properties of the control flow graph. The graph
must have been fully populated, and its entry point specified. Other
graph properties are computed on-demand.
"""
if self._entry_point is None:
raise RuntimeError("no entry point defined!")
self._eliminate_dead_blocks()
def dominators(self):
"""
Return a dictionary of {node -> set(nodes)} mapping each node to
the nodes dominating it.
A node D dominates a node N when any path leading to N must go through D
"""
return self._doms
def post_dominators(self):
"""
Return a dictionary of {node -> set(nodes)} mapping each node to
the nodes post-dominating it.
A node P post-dominates a node N when any path starting from N must go
through P.
"""
return self._post_doms
def immediate_dominators(self):
"""
Return a dictionary of {node -> node} mapping each node to its
immediate dominator (idom).
The idom(B) is the closest strict dominator of V
"""
return self._idom
def dominance_frontier(self):
"""
Return a dictionary of {node -> set(nodes)} mapping each node to
the nodes in its dominance frontier.
The dominance frontier _df(N) is the set of all nodes that are
immediate successors to blocks dominanted by N but which aren't
stricly dominanted by N
"""
return self._df
def dominator_tree(self):
"""
return a dictionary of {node -> set(nodes)} mapping each node to
the set of nodes it immediately dominates
The domtree(B) is the closest strict set of nodes that B dominates
"""
return self._domtree
@utils.cached_property
def _exit_points(self):
return self._find_exit_points()
@utils.cached_property
def _doms(self):
return self._find_dominators()
@utils.cached_property
def _back_edges(self):
return self._find_back_edges()
@utils.cached_property
def _topo_order(self):
return self._find_topo_order()
@utils.cached_property
def _descs(self):
return self._find_descendents()
@utils.cached_property
def _loops(self):
return self._find_loops()
@utils.cached_property
def _in_loops(self):
return self._find_in_loops()
@utils.cached_property
def _post_doms(self):
return self._find_post_dominators()
@utils.cached_property
def _idom(self):
return self._find_immediate_dominators()
@utils.cached_property
def _df(self):
return self._find_dominance_frontier()
@utils.cached_property
def _domtree(self):
return self._find_dominator_tree()
def descendents(self, node):
"""
Return the set of descendents of the given *node*, in topological
order (ignoring back edges).
"""
return self._descs[node]
def entry_point(self):
"""
Return the entry point node.
"""
assert self._entry_point is not None
return self._entry_point
def exit_points(self):
"""
Return the computed set of exit nodes (may be empty).
"""
return self._exit_points
def backbone(self):
"""
Return the set of nodes constituting the graph's backbone.
(i.e. the nodes that every path starting from the entry point
must go through). By construction, it is non-empty: it contains
at least the entry point.
"""
return self._post_doms[self._entry_point]
def loops(self):
"""
Return a dictionary of {node -> loop} mapping each loop header
to the loop (a Loop instance) starting with it.
"""
return self._loops
def in_loops(self, node):
"""
Return the list of Loop objects the *node* belongs to,
from innermost to outermost.
"""
return [self._loops[x] for x in self._in_loops.get(node, ())]
def dead_nodes(self):
"""
Return the set of dead nodes (eliminated from the graph).
"""
return self._dead_nodes
def nodes(self):
"""
Return the set of live nodes.
"""
return self._nodes
def topo_order(self):
"""
Return the sequence of nodes in topological order (ignoring back
edges).
"""
return self._topo_order
def topo_sort(self, nodes, reverse=False):
"""
Iterate over the *nodes* in topological order (ignoring back edges).
The sort isn't guaranteed to be stable.
"""
nodes = set(nodes)
it = self._topo_order
if reverse:
it = reversed(it)
for n in it:
if n in nodes:
yield n
def dump(self, file=None):
"""
Dump extensive debug information.
"""
import pprint
file = file or sys.stdout
if 1:
print("CFG adjacency lists:", file=file)
self._dump_adj_lists(file)
print("CFG dominators:", file=file)
pprint.pprint(self._doms, stream=file)
print("CFG post-dominators:", file=file)
pprint.pprint(self._post_doms, stream=file)
print("CFG back edges:", sorted(self._back_edges), file=file)
print("CFG loops:", file=file)
pprint.pprint(self._loops, stream=file)
print("CFG node-to-loops:", file=file)
pprint.pprint(self._in_loops, stream=file)
print("CFG backbone:", file=file)
pprint.pprint(self.backbone(), stream=file)
def render_dot(self, filename="numba_cfg.dot"):
"""Render the controlflow graph with GraphViz DOT via the
``graphviz`` python binding.
Returns
-------
g : graphviz.Digraph
Use `g.view()` to open the graph in the default PDF application.
"""
try:
import graphviz as gv
except ImportError:
raise ImportError(
"The feature requires `graphviz` but it is not available. "
"Please install with `pip install graphviz`"
)
g = gv.Digraph(filename=filename)
# Populate the nodes
for n in self._nodes:
g.node(str(n))
# Populate the edges
for n in self._nodes:
for edge in self._succs[n]:
g.edge(str(n), str(edge))
return g
# Internal APIs
def _add_edge(self, from_, to, data=None):
# This internal version allows adding edges to/from unregistered
# (ghost) nodes.
self._preds[to].add(from_)
self._succs[from_].add(to)
self._edge_data[from_, to] = data
def _remove_node_edges(self, node):
for succ in self._succs.pop(node, ()):
self._preds[succ].remove(node)
del self._edge_data[node, succ]
for pred in self._preds.pop(node, ()):
self._succs[pred].remove(node)
del self._edge_data[pred, node]
def _dfs(self, entries=None):
if entries is None:
entries = (self._entry_point,)
seen = set()
stack = list(entries)
while stack:
node = stack.pop()
if node not in seen:
yield node
seen.add(node)
for succ in self._succs[node]:
stack.append(succ)
def _eliminate_dead_blocks(self):
"""
Eliminate all blocks not reachable from the entry point, and
stash them into self._dead_nodes.
"""
live = set()
for node in self._dfs():
live.add(node)
self._dead_nodes = self._nodes - live
self._nodes = live
# Remove all edges leading from dead nodes
for dead in self._dead_nodes:
self._remove_node_edges(dead)
def _find_exit_points(self):
"""
Compute the graph's exit points.
"""
exit_points = set()
for n in self._nodes:
if not self._succs.get(n):
exit_points.add(n)
return exit_points
def _find_postorder(self):
succs = self._succs
back_edges = self._back_edges
post_order = []
seen = set()
def _dfs_rec(node):
if node not in seen:
seen.add(node)
for dest in succs[node]:
if (node, dest) not in back_edges:
_dfs_rec(dest)
post_order.append(node)
_dfs_rec(self._entry_point)
return post_order
def _find_immediate_dominators(self):
# The algorithm implemented computes the immediate dominator
# for each node in the CFG which is equivalent to build a dominator tree
# Based on the implementation from NetworkX
# library - nx.immediate_dominators
# https://github.com/networkx/networkx/blob/858e7cb183541a78969fed0cbcd02346f5866c02/networkx/algorithms/dominance.py # noqa: E501
# References:
# Keith D. Cooper, Timothy J. Harvey, and Ken Kennedy
# A Simple, Fast Dominance Algorithm
# https://www.cs.rice.edu/~keith/EMBED/dom.pdf
def intersect(u, v):
while u != v:
while idx[u] < idx[v]:
u = idom[u]
while idx[u] > idx[v]:
v = idom[v]
return u
entry = self._entry_point
preds_table = self._preds
order = self._find_postorder()
idx = {e: i for i, e in enumerate(order)} # index of each node
idom = {entry : entry}
order.pop()
order.reverse()
changed = True
while changed:
changed = False
for u in order:
new_idom = functools.reduce(intersect,
(v for v in preds_table[u]
if v in idom))
if u not in idom or idom[u] != new_idom:
idom[u] = new_idom
changed = True
return idom
def _find_dominator_tree(self):
idom = self._idom
domtree = _DictOfContainers(set)
for u, v in idom.items():
# v dominates u
if u not in domtree:
domtree[u] = set()
if u != v:
domtree[v].add(u)
return domtree
def _find_dominance_frontier(self):
idom = self._idom
preds_table = self._preds
df = {u: set() for u in idom}
for u in idom:
if len(preds_table[u]) < 2:
continue
for v in preds_table[u]:
while v != idom[u]:
df[v].add(u)
v = idom[v]
return df
def _find_dominators_internal(self, post=False):
# See theoretical description in
# http://en.wikipedia.org/wiki/Dominator_%28graph_theory%29
# The algorithm implemented here uses a todo-list as described
# in http://pages.cs.wisc.edu/~fischer/cs701.f08/finding.loops.html
if post:
entries = set(self._exit_points)
preds_table = self._succs
succs_table = self._preds
else:
entries = set([self._entry_point])
preds_table = self._preds
succs_table = self._succs
if not entries:
raise RuntimeError("no entry points: dominator algorithm "
"cannot be seeded")
doms = {}
for e in entries:
doms[e] = set([e])
todo = []
for n in self._nodes:
if n not in entries:
doms[n] = set(self._nodes)
todo.append(n)
while todo:
n = todo.pop()
if n in entries:
continue
new_doms = set([n])
preds = preds_table[n]
if preds:
new_doms |= functools.reduce(set.intersection,
[doms[p] for p in preds])
if new_doms != doms[n]:
assert len(new_doms) < len(doms[n])
doms[n] = new_doms
todo.extend(succs_table[n])
return doms
def _find_dominators(self):
return self._find_dominators_internal(post=False)
def _find_post_dominators(self):
# To handle infinite loops correctly, we need to add a dummy
# exit point, and link members of infinite loops to it.
dummy_exit = object()
self._exit_points.add(dummy_exit)
for loop in self._loops.values():
if not loop.exits:
for b in loop.body:
self._add_edge(b, dummy_exit)
pdoms = self._find_dominators_internal(post=True)
# Fix the _post_doms table to make no reference to the dummy exit
del pdoms[dummy_exit]
for doms in pdoms.values():
doms.discard(dummy_exit)
self._remove_node_edges(dummy_exit)
self._exit_points.remove(dummy_exit)
return pdoms
# Finding loops and back edges: see
# http://pages.cs.wisc.edu/~fischer/cs701.f08/finding.loops.html
def _find_back_edges(self, stats=None):
"""
Find back edges. An edge (src, dest) is a back edge if and
only if *dest* dominates *src*.
"""
# Prepare stats to capture execution information
if stats is not None:
if not isinstance(stats, dict):
raise TypeError(f"*stats* must be a dict; got {type(stats)}")
stats.setdefault('iteration_count', 0)
# Uses a simple DFS to find back-edges.
# The new algorithm is faster than the the previous dominator based
# algorithm.
back_edges = set()
# stack: keeps track of the traversal path
stack = []
# succs_state: keep track of unvisited successors of a node
succs_state = {}
entry_point = self.entry_point()
checked = set()
def push_state(node):
stack.append(node)
succs_state[node] = [dest for dest in self._succs[node]]
push_state(entry_point)
# Keep track for iteration count for debugging
iter_ct = 0
while stack:
iter_ct += 1
tos = stack[-1]
tos_succs = succs_state[tos]
# Are there successors not checked?
if tos_succs:
# Check the next successor
cur_node = tos_succs.pop()
# Is it in our traversal path?
if cur_node in stack:
# Yes, it's a backedge
back_edges.add((tos, cur_node))
elif cur_node not in checked:
# Push
push_state(cur_node)
else:
# Checked all successors. Pop
stack.pop()
checked.add(tos)
if stats is not None:
stats['iteration_count'] += iter_ct
return back_edges
def _find_topo_order(self):
succs = self._succs
back_edges = self._back_edges
post_order = []
seen = set()
def _dfs_rec(node):
if node not in seen:
seen.add(node)
for dest in succs[node]:
if (node, dest) not in back_edges:
_dfs_rec(dest)
post_order.append(node)
_dfs_rec(self._entry_point)
post_order.reverse()
return post_order
def _find_descendents(self):
descs = {}
for node in reversed(self._topo_order):
descs[node] = node_descs = set()
for succ in self._succs[node]:
if (node, succ) not in self._back_edges:
node_descs.add(succ)
node_descs.update(descs[succ])
return descs
def _find_loops(self):
"""
Find the loops defined by the graph's back edges.
"""
bodies = {}
for src, dest in self._back_edges:
# The destination of the back edge is the loop header
header = dest
# Build up the loop body from the back edge's source node,
# up to the source header.
body = set([header])
queue = [src]
while queue:
n = queue.pop()
if n not in body:
body.add(n)
queue.extend(self._preds[n])
# There can be several back edges to a given loop header;
# if so, merge the resulting body fragments.
if header in bodies:
bodies[header].update(body)
else:
bodies[header] = body
# Create a Loop object for each header.
loops = {}
for header, body in bodies.items():
entries = set()
exits = set()
for n in body:
entries.update(self._preds[n] - body)
exits.update(self._succs[n] - body)
loop = Loop(header=header, body=body, entries=entries, exits=exits)
loops[header] = loop
return loops
def _find_in_loops(self):
loops = self._loops
# Compute the loops to which each node belongs.
in_loops = dict((n, []) for n in self._nodes)
# Sort loops from longest to shortest
# This ensures that outer loops will come before inner loops
for loop in sorted(loops.values(), key=lambda loop: len(loop.body)):
for n in loop.body:
in_loops[n].append(loop.header)
return in_loops
def _dump_adj_lists(self, file):
adj_lists = dict((src, sorted(list(dests)))
for src, dests in self._succs.items())
import pprint
pprint.pprint(adj_lists, stream=file)
def __eq__(self, other):
if not isinstance(other, CFGraph):
raise NotImplementedError
for x in ['_nodes', '_edge_data', '_entry_point', '_preds', '_succs']:
this = getattr(self, x, None)
that = getattr(other, x, None)
if this != that:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
class ControlFlowAnalysis(object):
"""
Attributes
----------
- bytecode
- blocks
- blockseq
- doms: dict of set
Dominators
- backbone: set of block offsets
The set of block that is common to all possible code path.
"""
def __init__(self, bytecode):
self.bytecode = bytecode
self.blocks = {}
self.liveblocks = {}
self.blockseq = []
self.doms = None
self.backbone = None
# Internal temp states
self._force_new_block = True
self._curblock = None
self._blockstack = []
self._loops = []
self._withs = []
def iterblocks(self):
"""
Return all blocks in sequence of occurrence
"""
for i in self.blockseq:
yield self.blocks[i]
def iterliveblocks(self):
"""
Return all live blocks in sequence of occurrence
"""
for i in self.blockseq:
if i in self.liveblocks:
yield self.blocks[i]
def incoming_blocks(self, block):
"""
Yield (incoming block, number of stack pops) pairs for *block*.
"""
for i, pops in block.incoming_jumps.items():
if i in self.liveblocks:
yield self.blocks[i], pops
def dump(self, file=None):
self.graph.dump(file=None)
def run(self):
for inst in self._iter_inst():
fname = "op_%s" % inst.opname
fn = getattr(self, fname, None)
if fn is not None:
fn(inst)
elif inst.is_jump:
# this catches e.g. try... except
l = Loc(self.bytecode.func_id.filename, inst.lineno)
if inst.opname in {"SETUP_EXCEPT", "SETUP_FINALLY"}:
msg = "'try' block not supported until python3.7 or later"
else:
msg = "Use of unsupported opcode (%s) found" % inst.opname
raise UnsupportedError(msg, loc=l)
else:
# Non-jump instructions are ignored
pass # intentionally
# Close all blocks
for cur, nxt in zip(self.blockseq, self.blockseq[1:]):
blk = self.blocks[cur]
if not blk.outgoing_jumps and not blk.terminating:
blk.outgoing_jumps[nxt] = 0
graph = CFGraph()
for b in self.blocks:
graph.add_node(b)
for b in self.blocks.values():
for out, pops in b.outgoing_jumps.items():
graph.add_edge(b.offset, out, pops)
graph.set_entry_point(min(self.blocks))
graph.process()
self.graph = graph
# Fill incoming
for b in self.blocks.values():
for out, pops in b.outgoing_jumps.items():
self.blocks[out].incoming_jumps[b.offset] = pops
# Find liveblocks
self.liveblocks = dict((i, self.blocks[i])
for i in self.graph.nodes())
for lastblk in reversed(self.blockseq):
if lastblk in self.liveblocks:
break
else:
raise AssertionError("No live block that exits!?")
# Find backbone
backbone = self.graph.backbone()
# Filter out in loop blocks (Assuming no other cyclic control blocks)
# This is to unavoid variable defined in loops to be considered as
# function scope.
inloopblocks = set()
for b in self.blocks.keys():
if self.graph.in_loops(b):
inloopblocks.add(b)
self.backbone = backbone - inloopblocks
def jump(self, target, pops=0):
"""
Register a jump (conditional or not) to *target* offset.
*pops* is the number of stack pops implied by the jump (default 0).
"""
self._curblock.outgoing_jumps[target] = pops
def _iter_inst(self):
for inst in self.bytecode:
if self._use_new_block(inst):
self._guard_with_as(inst)
self._start_new_block(inst)
self._curblock.body.append(inst.offset)
yield inst
def _use_new_block(self, inst):
if inst.offset in self.bytecode.labels:
res = True
elif inst.opname in NEW_BLOCKERS:
res = True
else:
res = self._force_new_block
self._force_new_block = False
return res
def _start_new_block(self, inst):
self._curblock = CFBlock(inst.offset)
self.blocks[inst.offset] = self._curblock
self.blockseq.append(inst.offset)
def _guard_with_as(self, current_inst):
"""Checks if the next instruction after a SETUP_WITH is something other
than a POP_TOP, if it is something else it'll be some sort of store
which is not supported (this corresponds to `with CTXMGR as VAR(S)`)."""
if current_inst.opname == "SETUP_WITH":
next_op = self.bytecode[current_inst.next].opname
if next_op != "POP_TOP":
msg = ("The 'with (context manager) as "
"(variable):' construct is not "
"supported.")
raise UnsupportedError(msg)
def op_SETUP_LOOP(self, inst):
end = inst.get_jump_target()
self._blockstack.append(end)
self._loops.append((inst.offset, end))
# TODO: Looplifting requires the loop entry be its own block.
# Forcing a new block here is the simplest solution for now.
# But, we should consider other less ad-hoc ways.
self.jump(inst.next)
self._force_new_block = True
def op_SETUP_WITH(self, inst):
end = inst.get_jump_target()
self._blockstack.append(end)
self._withs.append((inst.offset, end))
# TODO: WithLifting requires the loop entry be its own block.
# Forcing a new block here is the simplest solution for now.
# But, we should consider other less ad-hoc ways.
self.jump(inst.next)
self._force_new_block = True
def op_POP_BLOCK(self, inst):
self._blockstack.pop()
def op_FOR_ITER(self, inst):
self.jump(inst.get_jump_target())
self.jump(inst.next)
self._force_new_block = True
def _op_ABSOLUTE_JUMP_IF(self, inst):
self.jump(inst.get_jump_target())
self.jump(inst.next)
self._force_new_block = True
op_POP_JUMP_IF_FALSE = _op_ABSOLUTE_JUMP_IF
op_POP_JUMP_IF_TRUE = _op_ABSOLUTE_JUMP_IF
op_JUMP_IF_FALSE = _op_ABSOLUTE_JUMP_IF
op_JUMP_IF_TRUE = _op_ABSOLUTE_JUMP_IF
def _op_ABSOLUTE_JUMP_OR_POP(self, inst):
self.jump(inst.get_jump_target())
self.jump(inst.next, pops=1)
self._force_new_block = True
op_JUMP_IF_FALSE_OR_POP = _op_ABSOLUTE_JUMP_OR_POP
op_JUMP_IF_TRUE_OR_POP = _op_ABSOLUTE_JUMP_OR_POP
def op_JUMP_ABSOLUTE(self, inst):
self.jump(inst.get_jump_target())
self._force_new_block = True
def op_JUMP_FORWARD(self, inst):
self.jump(inst.get_jump_target())
self._force_new_block = True
def op_RETURN_VALUE(self, inst):
self._curblock.terminating = True
self._force_new_block = True
def op_RAISE_VARARGS(self, inst):
self._curblock.terminating = True
self._force_new_block = True
def op_BREAK_LOOP(self, inst):
self.jump(self._blockstack[-1])
self._force_new_block = True
| bsd-2-clause |
felixmatt/shyft | shyft/tests/api/test_time_axis.py | 1 | 3311 | from builtins import range
from shyft import api
import numpy as np
import unittest
class TimeAxis(unittest.TestCase):
"""Verify and illustrate TimeAxis
defined as n periods non-overlapping ascending
"""
def setUp(self):
self.c = api.Calendar()
self.d = api.deltahours(1)
self.n = 24
# self.t= self.c.trim(api.utctime_now(),self.d)
self.t = self.c.trim(self.c.time(1969, 12, 31, 0, 0, 0), self.d)
self.ta = api.TimeAxis(self.t, self.d, self.n)
def tearDown(self):
pass
def test_index_of(self):
self.assertEqual(self.ta.index_of(self.t),0)
self.assertEqual(self.ta.index_of(self.t,0), 0)
self.assertEqual(self.ta.index_of(self.t-3600), api.npos)
self.assertEqual(self.ta.open_range_index_of(self.t),0)
self.assertEqual(self.ta.open_range_index_of(self.t,0), 0)
self.assertEqual(self.ta.open_range_index_of(self.t-3600), api.npos)
def test_create_timeaxis(self):
self.assertEqual(self.ta.size(), self.n)
self.assertEqual(len(self.ta), self.n)
self.assertEqual(self.ta(0).start, self.t)
self.assertEqual(self.ta(0).end, self.t + self.d)
self.assertEqual(self.ta(1).start, self.t + self.d)
self.assertEqual(self.ta.total_period().start, self.t)
va = np.array([86400, 3600, 3], dtype=np.int64)
xta = api.TimeAxisFixedDeltaT(int(va[0]), int(va[1]), int(va[2]))
self.assertEqual(xta.size(), 3)
def test_iterate_timeaxis(self):
tot_dt = 0
for p in self.ta:
tot_dt += p.timespan()
self.assertEqual(tot_dt, self.n * self.d)
def test_timeaxis_str(self):
s = str(self.ta)
self.assertTrue(len(s) > 10)
def test_point_timeaxis_(self):
"""
A point time axis takes n+1 points do describe n-periods, where
each period is defined as [ point_i .. point_i+1 >
"""
all_points = api.UtcTimeVector([t for t in range(self.t, self.t + (self.n + 1) * self.d, self.d)])
tap = api.TimeAxisByPoints(all_points)
self.assertEqual(tap.size(), self.ta.size())
for i in range(self.ta.size()):
self.assertEqual(tap(i), self.ta(i))
self.assertEqual(tap.t_end, all_points[-1], "t_end should equal the n+1'th point if supplied")
s = str(tap)
self.assertTrue(len(s) > 0)
def test_generic_timeaxis(self):
c = api.Calendar('Europe/Oslo')
dt = api.deltahours(1)
n = 240
t0 = c.time(2016, 4, 10)
tag1 = api.TimeAxis(t0, dt, n)
self.assertEqual(len(tag1), n)
self.assertEqual(tag1.time(0), t0)
tag2 = api.TimeAxis(c, t0, dt, n)
self.assertEqual(len(tag2), n)
self.assertEqual(tag2.time(0), t0)
self.assertIsNotNone(tag2.calendar_dt.calendar)
def test_timeaxis_time_points(self):
c = api.Calendar('Europe/Oslo')
dt = api.deltahours(1)
n = 240
t0 = c.time(2016, 4, 10)
ta = api.TimeAxis(c, t0, dt, n)
tp = ta.time_points
self.assertIsNotNone(tp)
self.assertEqual(len(tp), n + 1)
self.assertEqual(len(api.TimeAxis(c, t0, dt, 0).time_points), 0)
if __name__ == "__main__":
unittest.main()
| lgpl-3.0 |
code-google-com/rad2py | psp2py/controllers/projects.py | 8 | 1263 | # coding: utf8
# try something like
def index():
return dict(form=crud.select(db.psp_project, linkto='show'))
def search():
form, table=crud.search(db.psp_project, linkto='edit')
return dict(form=form, table=table)
def create():
return dict(form=crud.create(db.psp_project))
def show():
project_id = request.args[1]
project = db(db.psp_project.id==project_id).select().first()
times = db(db.psp_time_summary.project_id==project_id).select(
db.psp_time_summary.phase,
db.psp_time_summary.plan,
db.psp_time_summary.actual,
db.psp_time_summary.interruption,
db.psp_time_summary.off_task)
times = times.sort(lambda x: PSP_PHASES.index(x.phase))
defects = db(db.psp_defect.project_id==project_id).select(
db.psp_defect.number,
db.psp_defect.summary,
db.psp_defect.type,
db.psp_defect.inject_phase,
db.psp_defect.remove_phase,
db.psp_defect.fix_time,
db.psp_defect.fix_defect,
orderby=db.psp_defect.number,
)
form = crud.read(db.psp_project, project_id)
return dict(project=project, form=form, times=times, defects=defects)
def edit():
return dict(form=crud.update(db.psp_project, request.args[1]))
| gpl-3.0 |
perrygeo/Fiona | fiona/collection.py | 1 | 17081 | # -*- coding: utf-8 -*-
# Collections provide file-like access to feature data
import os
import sys
from fiona.ogrext import Iterator, ItemsIterator, KeysIterator
from fiona.ogrext import Session, WritingSession
from fiona.ogrext import (
calc_gdal_version_num, get_gdal_version_num, get_gdal_release_name)
from fiona.ogrext import buffer_to_virtual_file, remove_virtual_file
from fiona.errors import DriverError, SchemaError, CRSError
from fiona._drivers import driver_count, GDALEnv
from fiona.drvsupport import supported_drivers
from six import string_types, binary_type
class Collection(object):
"""A file-like interface to features of a vector dataset
Python text file objects are iterators over lines of a file. Fiona
Collections are similar iterators (not lists!) over features
represented as GeoJSON-like mappings.
"""
def __init__(self, path, mode='r', driver=None, schema=None, crs=None,
encoding=None, layer=None, vsi=None, archive=None,
enabled_drivers=None, crs_wkt=None, **kwargs):
"""The required ``path`` is the absolute or relative path to
a file, such as '/data/test_uk.shp'. In ``mode`` 'r', data can
be read only. In ``mode`` 'a', data can be appended to a file.
In ``mode`` 'w', data overwrites the existing contents of
a file.
In ``mode`` 'w', an OGR ``driver`` name and a ``schema`` are
required. A Proj4 ``crs`` string is recommended. If both ``crs``
and ``crs_wkt`` keyword arguments are passed, the latter will
trump the former.
In 'w' mode, kwargs will be mapped to OGR layer creation
options.
"""
if not isinstance(path, string_types):
raise TypeError("invalid path: %r" % path)
if not isinstance(mode, string_types) or mode not in ('r', 'w', 'a'):
raise TypeError("invalid mode: %r" % mode)
if driver and not isinstance(driver, string_types):
raise TypeError("invalid driver: %r" % driver)
if schema and not hasattr(schema, 'get'):
raise TypeError("invalid schema: %r" % schema)
if crs and not isinstance(crs, (dict,) + string_types):
raise TypeError("invalid crs: %r" % crs)
if crs_wkt and not isinstance(crs_wkt, string_types):
raise TypeError("invalid crs_wkt: %r" % crs_wkt)
if encoding and not isinstance(encoding, string_types):
raise TypeError("invalid encoding: %r" % encoding)
if layer and not isinstance(layer, tuple(list(string_types) + [int])):
raise TypeError("invalid name: %r" % layer)
if vsi:
if not isinstance(vsi, string_types) or vsi not in ('zip', 'tar', 'gzip'):
raise TypeError("invalid vsi: %r" % vsi)
if archive and not isinstance(archive, string_types):
raise TypeError("invalid archive: %r" % archive)
# Check GDAL version against drivers
if (driver == "GPKG" and
get_gdal_version_num() < calc_gdal_version_num(1, 11, 0)):
raise DriverError(
"GPKG driver requires GDAL 1.11.0, fiona was compiled "
"against: {}".format(get_gdal_release_name()))
self.session = None
self.iterator = None
self._len = 0
self._bounds = None
self._driver = None
self._schema = None
self._crs = None
self._crs_wkt = None
self.env = None
self.enabled_drivers = enabled_drivers
self.path = vsi_path(path, vsi, archive)
if mode == 'w':
if layer and not isinstance(layer, string_types):
raise ValueError("in 'r' mode, layer names must be strings")
if driver == 'GeoJSON':
if layer is not None:
raise ValueError("the GeoJSON format does not have layers")
self.name = 'OgrGeoJSON'
# TODO: raise ValueError as above for other single-layer formats.
else:
self.name = layer or os.path.basename(os.path.splitext(path)[0])
else:
if layer in (0, None):
self.name = 0
else:
self.name = layer or os.path.basename(os.path.splitext(path)[0])
self.mode = mode
if self.mode == 'w':
if driver == 'Shapefile':
driver = 'ESRI Shapefile'
if not driver:
raise DriverError("no driver")
elif driver not in supported_drivers:
raise DriverError(
"unsupported driver: %r" % driver)
elif self.mode not in supported_drivers[driver]:
raise DriverError(
"unsupported mode: %r" % self.mode)
self._driver = driver
if not schema:
raise SchemaError("no schema")
elif 'properties' not in schema:
raise SchemaError("schema lacks: properties")
elif 'geometry' not in schema:
raise SchemaError("schema lacks: geometry")
self._schema = schema
if crs_wkt:
self._crs_wkt = crs_wkt
elif crs:
if 'init' in crs or 'proj' in crs or 'epsg' in crs.lower():
self._crs = crs
else:
raise CRSError("crs lacks init or proj parameter")
if driver_count == 0:
# create a local manager and enter
self.env = GDALEnv()
else:
self.env = GDALEnv()
self.env.__enter__()
if self.mode == "r":
self._driver = driver
self.encoding = encoding
self.session = Session()
self.session.start(self)
# If encoding param is None, we'll use what the session
# suggests.
self.encoding = encoding or self.session.get_fileencoding().lower()
elif self.mode in ("a", "w"):
self._driver = driver
self.encoding = encoding
self.session = WritingSession()
self.session.start(self, **kwargs)
self.encoding = encoding or self.session.get_fileencoding().lower()
if self.session:
self.guard_driver_mode()
def __repr__(self):
return "<%s Collection '%s', mode '%s' at %s>" % (
self.closed and "closed" or "open",
self.path + ":" + str(self.name),
self.mode,
hex(id(self)))
def guard_driver_mode(self):
driver = self.session.get_driver()
if driver not in supported_drivers:
raise DriverError("unsupported driver: %r" % driver)
if self.mode not in supported_drivers[driver]:
raise DriverError("unsupported mode: %r" % self.mode)
@property
def driver(self):
"""Returns the name of the proper OGR driver."""
if not self._driver and self.mode in ("a", "r") and self.session:
self._driver = self.session.get_driver()
return self._driver
@property
def schema(self):
"""Returns a mapping describing the data schema.
The mapping has 'geometry' and 'properties' items. The former is a
string such as 'Point' and the latter is an ordered mapping that
follows the order of fields in the data file.
"""
if not self._schema and self.mode in ("a", "r") and self.session:
self._schema = self.session.get_schema()
return self._schema
@property
def crs(self):
"""Returns a Proj4 string."""
if self._crs is None and self.session:
self._crs = self.session.get_crs()
return self._crs
@property
def crs_wkt(self):
"""Returns a WKT string."""
if self._crs_wkt is None and self.session:
self._crs_wkt = self.session.get_crs_wkt()
return self._crs_wkt
@property
def meta(self):
"""Returns a mapping with the driver, schema, crs, and additional
properties."""
return {
'driver': self.driver, 'schema': self.schema, 'crs': self.crs,
'crs_wkt': self.crs_wkt}
profile = meta
def filter(self, *args, **kwds):
"""Returns an iterator over records, but filtered by a test for
spatial intersection with the provided ``bbox``, a (minx, miny,
maxx, maxy) tuple or a geometry ``mask``.
Positional arguments ``stop`` or ``start, stop[, step]`` allows
iteration to skip over items or stop at a specific item.
"""
if self.closed:
raise ValueError("I/O operation on closed collection")
elif self.mode != 'r':
raise IOError("collection not open for reading")
if args:
s = slice(*args)
start = s.start
stop = s.stop
step = s.step
else:
start = stop = step = None
bbox = kwds.get('bbox')
mask = kwds.get('mask')
if bbox and mask:
raise ValueError("mask and bbox can not be set together")
self.iterator = Iterator(
self, start, stop, step, bbox, mask)
return self.iterator
def items(self, *args, **kwds):
"""Returns an iterator over FID, record pairs, optionally
filtered by a test for spatial intersection with the provided
``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry
``mask``.
Positional arguments ``stop`` or ``start, stop[, step]`` allows
iteration to skip over items or stop at a specific item.
"""
if self.closed:
raise ValueError("I/O operation on closed collection")
elif self.mode != 'r':
raise IOError("collection not open for reading")
if args:
s = slice(*args)
start = s.start
stop = s.stop
step = s.step
else:
start = stop = step = None
bbox = kwds.get('bbox')
mask = kwds.get('mask')
if bbox and mask:
raise ValueError("mask and bbox can not be set together")
self.iterator = ItemsIterator(
self, start, stop, step, bbox, mask)
return self.iterator
def keys(self, *args, **kwds):
"""Returns an iterator over FIDs, optionally
filtered by a test for spatial intersection with the provided
``bbox``, a (minx, miny, maxx, maxy) tuple or a geometry
``mask``.
Positional arguments ``stop`` or ``start, stop[, step]`` allows
iteration to skip over items or stop at a specific item.
"""
if self.closed:
raise ValueError("I/O operation on closed collection")
elif self.mode != 'r':
raise IOError("collection not open for reading")
if args:
s = slice(*args)
start = s.start
stop = s.stop
step = s.step
else:
start = stop = step = None
bbox = kwds.get('bbox')
mask = kwds.get('mask')
if bbox and mask:
raise ValueError("mask and bbox can not be set together")
self.iterator = KeysIterator(
self, start, stop, step, bbox, mask)
return self.iterator
def __contains__(self, fid):
return self.session.has_feature(fid)
values = filter
def __iter__(self):
"""Returns an iterator over records."""
return self.filter()
def __next__(self):
"""Returns next record from iterator."""
if not self.iterator:
iter(self)
return next(self.iterator)
next = __next__
def __getitem__(self, item):
return self.session.__getitem__(item)
def writerecords(self, records):
"""Stages multiple records for writing to disk."""
if self.closed:
raise ValueError("I/O operation on closed collection")
if self.mode not in ('a', 'w'):
raise IOError("collection not open for writing")
self.session.writerecs(records, self)
self._len = self.session.get_length()
self._bounds = self.session.get_extent()
def write(self, record):
"""Stages a record for writing to disk."""
self.writerecords([record])
def validate_record(self, record):
"""Compares the record to the collection's schema.
Returns ``True`` if the record matches, else ``False``.
"""
# Currently we only compare keys of properties, not the types of
# values.
return (
set(record['properties'].keys()) ==
set(self.schema['properties'].keys()) and
self.validate_record_geometry(record))
def validate_record_geometry(self, record):
"""Compares the record's geometry to the collection's schema.
Returns ``True`` if the record matches, else ``False``.
"""
# Shapefiles welcome mixes of line/multis and polygon/multis.
# OGR reports these mixed files as type "Polygon" or "LineString"
# but will return either these or their multi counterparts when
# reading features.
if (self.driver == "ESRI Shapefile" and
"Point" not in record['geometry']['type']):
return record['geometry']['type'].lstrip(
"Multi") == self.schema['geometry'].lstrip("3D ").lstrip(
"Multi")
else:
return (
record['geometry']['type'] ==
self.schema['geometry'].lstrip("3D "))
def __len__(self):
if self._len <= 0 and self.session is not None:
self._len = self.session.get_length()
if self._len < 0:
# Raise TypeError when we don't know the length so that Python
# will treat Collection as a generator
raise TypeError("Layer does not support counting")
return self._len
@property
def bounds(self):
"""Returns (minx, miny, maxx, maxy)."""
if self._bounds is None and self.session is not None:
self._bounds = self.session.get_extent()
return self._bounds
def flush(self):
"""Flush the buffer."""
if self.session is not None and self.session.get_length() > 0:
self.session.sync(self)
new_len = self.session.get_length()
self._len = new_len > self._len and new_len or self._len
self._bounds = self.session.get_extent()
def close(self):
"""In append or write mode, flushes data to disk, then ends
access."""
if self.session is not None:
if self.mode in ('a', 'w'):
self.flush()
self.session.stop()
self.session = None
self.iterator = None
if self.env:
self.env.__exit__()
@property
def closed(self):
"""``False`` if data can be accessed, otherwise ``True``."""
return self.session is None
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def __del__(self):
# Note: you can't count on this being called. Call close() explicitly
# or use the context manager protocol ("with").
self.__exit__(None, None, None)
class BytesCollection(Collection):
"""BytesCollection takes a buffer of bytes and maps that to
a virtual file that can then be opened by fiona.
"""
def __init__(self, bytesbuf):
"""Takes buffer of bytes whose contents is something we'd like
to open with Fiona and maps it to a virtual file.
"""
if not isinstance(bytesbuf, binary_type):
raise ValueError("input buffer must be bytes")
# Hold a reference to the buffer, as bad things will happen if
# it is garbage collected while in use.
self.bytesbuf = bytesbuf
# Map the buffer to a file.
self.virtual_file = buffer_to_virtual_file(self.bytesbuf)
# Instantiate the parent class.
super(BytesCollection, self).__init__(self.virtual_file)
def close(self):
"""Removes the virtual file associated with the class."""
super(BytesCollection, self).close()
if self.virtual_file:
remove_virtual_file(self.virtual_file)
self.virtual_file = None
self.bytesbuf = None
def __repr__(self):
return "<%s BytesCollection '%s', mode '%s' at %s>" % (
self.closed and "closed" or "open",
self.path + ":" + str(self.name),
self.mode,
hex(id(self)))
def vsi_path(path, vsi=None, archive=None):
# If a VSF and archive file are specified, we convert the path to
# an OGR VSI path (see cpl_vsi.h).
if vsi:
if archive:
result = "/vsi%s/%s%s" % (vsi, archive, path)
else:
result = "/vsi%s/%s" % (vsi, path)
else:
result = path
return result
| bsd-3-clause |
noroutine/ansible | test/units/template/test_templar.py | 48 | 20034 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from jinja2.runtime import Context
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.module_utils.six import string_types
from ansible.template import Templar, AnsibleContext, AnsibleEnvironment
from ansible.utils.unsafe_proxy import AnsibleUnsafe, wrap_var
from units.mock.loader import DictDataLoader
class BaseTemplar(object):
def setUp(self):
self.test_vars = dict(
foo="bar",
bam="{{foo}}",
num=1,
var_true=True,
var_false=False,
var_dict=dict(a="b"),
bad_dict="{a='b'",
var_list=[1],
recursive="{{recursive}}",
some_var="blip",
some_static_var="static_blip",
some_keyword="{{ foo }}",
some_unsafe_var=wrap_var("unsafe_blip"),
some_static_unsafe_var=wrap_var("static_unsafe_blip"),
some_unsafe_keyword=wrap_var("{{ foo }}"),
)
self.fake_loader = DictDataLoader({
"/path/to/my_file.txt": "foo\n",
})
self.templar = Templar(loader=self.fake_loader, variables=self.test_vars)
def is_unsafe(self, obj):
if obj is None:
return False
if hasattr(obj, '__UNSAFE__'):
return True
if isinstance(obj, AnsibleUnsafe):
return True
if isinstance(obj, dict):
for key in obj.keys():
if self.is_unsafe(key) or self.is_unsafe(obj[key]):
return True
if isinstance(obj, list):
for item in obj:
if self.is_unsafe(item):
return True
if isinstance(obj, string_types) and hasattr(obj, '__UNSAFE__'):
return True
return False
# class used for testing arbitrary objects passed to template
class SomeClass(object):
foo = 'bar'
def __init__(self):
self.blip = 'blip'
class SomeUnsafeClass(AnsibleUnsafe):
def __init__(self):
super(SomeUnsafeClass, self).__init__()
self.blip = 'unsafe blip'
class TestTemplarTemplate(BaseTemplar, unittest.TestCase):
def test_lookup_jinja_dict_key_in_static_vars(self):
res = self.templar.template("{'some_static_var': '{{ some_var }}'}",
static_vars=['some_static_var'])
# self.assertEqual(res['{{ a_keyword }}'], "blip")
print(res)
def test_templatable(self):
res = self.templar.templatable('foo')
self.assertTrue(res)
def test_templatable_none(self):
res = self.templar.templatable(None)
self.assertTrue(res)
@patch('ansible.template.Templar.template', side_effect=AnsibleError)
def test_templatable_exception(self, mock_template):
res = self.templar.templatable('foo')
self.assertFalse(res)
def test_template_convert_bare_string(self):
# Note: no bare_deprecated=False so we hit the deprecation path
res = self.templar.template('foo', convert_bare=True)
self.assertEqual(res, 'bar')
def test_template_convert_bare_nested(self):
res = self.templar.template('bam', convert_bare=True, bare_deprecated=False)
self.assertEqual(res, 'bar')
def test_template_convert_bare_unsafe(self):
res = self.templar.template('some_unsafe_var', convert_bare=True, bare_deprecated=False)
self.assertEqual(res, 'unsafe_blip')
# self.assertIsInstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_convert_bare_filter(self):
res = self.templar.template('bam|capitalize', convert_bare=True, bare_deprecated=False)
self.assertEqual(res, 'Bar')
def test_template_convert_bare_filter_unsafe(self):
res = self.templar.template('some_unsafe_var|capitalize', convert_bare=True, bare_deprecated=False)
self.assertEqual(res, 'Unsafe_blip')
# self.assertIsInstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_convert_data(self):
res = self.templar.template('{{foo}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
@patch('ansible.template.safe_eval', side_effect=AnsibleError)
def test_template_convert_data_template_in_data(self, mock_safe_eval):
res = self.templar.template('{{bam}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
def test_template_convert_data_bare(self):
res = self.templar.template('bam', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bam')
def test_template_convert_data_to_json(self):
res = self.templar.template('{{bam|to_json}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, '"bar"')
def test_template_convert_data_convert_bare_data_bare(self):
res = self.templar.template('bam', convert_data=True, convert_bare=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
def test_template_unsafe_non_string(self):
unsafe_obj = AnsibleUnsafe()
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_unsafe_non_string_subclass(self):
unsafe_obj = SomeUnsafeClass()
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
# TODO: not sure what template is supposed to do it, but it currently throws attributeError
@patch('ansible.template.Templar._clean_data')
def test_template_unsafe_non_string_clean_data_exception(self, mock_clean_data):
msg = 'Error raised from _clean_data by test_template_unsafe_non_string_clean_data_exception'
mock_clean_data.side_effect = AnsibleError(msg)
unsafe_obj = AnsibleUnsafe()
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
# TODO: not sure what template is supposed to do it, but it currently throws attributeError
@patch('ansible.template.Templar._clean_data', side_effect=AnsibleError)
def test_template_unsafe_non_string_subclass_clean_data_exception(self, mock_clean_data):
unsafe_obj = SomeUnsafeClass()
self.assertTrue(self.is_unsafe(unsafe_obj))
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_weird(self):
data = u'''1 2 #}huh{# %}ddfg{% }}dfdfg{{ {%what%} {{#foo#}} {%{bar}%} {#%blip%#} {{asdfsd%} 3 4 {{foo}} 5 6 7'''
self.assertRaisesRegexp(AnsibleError,
'template error while templating string',
self.templar.template,
data)
class TestTemplarCleanData(BaseTemplar, unittest.TestCase):
def test_clean_data(self):
res = self.templar._clean_data(u'some string')
self.assertEqual(res, u'some string')
def test_clean_data_not_stringtype(self):
res = self.templar._clean_data(None)
# None vs NoneType
self.assertEqual(res, None)
def test_clean_data_jinja(self):
res = self.templar._clean_data(u'1 2 {what} 3 4 {{foo}} 5 6 7')
self.assertEqual(res, u'1 2 {what} 3 4 {#foo#} 5 6 7')
def test_clean_data_block(self):
res = self.templar._clean_data(u'1 2 {%what%} 3 4 {{foo}} 5 6 7')
self.assertEqual(res, u'1 2 {#what#} 3 4 {#foo#} 5 6 7')
# def test_clean_data_weird(self):
# res = self.templar._clean_data(u'1 2 #}huh{# %}ddfg{% }}dfdfg{{ {%what%} {{#foo#}} {%{bar}%} {#%blip%#} {{asdfsd%} 3 4 {{foo}} 5 6 7')
# print(res)
self.assertEqual(res, u'1 2 {#what#} 3 4 {#foo#} 5 6 7')
def test_clean_data_object(self):
obj = {u'foo': [1, 2, 3, u'bdasdf', u'{what}', u'{{foo}}', 5]}
clean_obj = {u'foo': [1, 2, 3, u'bdasdf', u'{what}', u'{#foo#}', 5]}
res = self.templar._clean_data(obj)
self.assertNotEqual(res, obj)
self.assertEqual(res, clean_obj)
def test_clean_data_bad_dict(self):
res = self.templar._clean_data(u'{{bad_dict}}')
self.assertEqual(res, u'{#bad_dict#}')
def test_clean_data_unsafe_obj(self):
some_obj = SomeClass()
unsafe_obj = wrap_var(some_obj)
res = self.templar._clean_data(unsafe_obj)
self.assertIsInstance(res, SomeClass)
class TestTemplarMisc(BaseTemplar, unittest.TestCase):
def test_templar_simple(self):
templar = self.templar
# test some basic templating
self.assertEqual(templar.template("{{foo}}"), "bar")
self.assertEqual(templar.template("{{foo}}\n"), "bar\n")
self.assertEqual(templar.template("{{foo}}\n", preserve_trailing_newlines=True), "bar\n")
self.assertEqual(templar.template("{{foo}}\n", preserve_trailing_newlines=False), "bar")
self.assertEqual(templar.template("{{bam}}"), "bar")
self.assertEqual(templar.template("{{num}}"), 1)
self.assertEqual(templar.template("{{var_true}}"), True)
self.assertEqual(templar.template("{{var_false}}"), False)
self.assertEqual(templar.template("{{var_dict}}"), dict(a="b"))
self.assertEqual(templar.template("{{bad_dict}}"), "{a='b'")
self.assertEqual(templar.template("{{var_list}}"), [1])
self.assertEqual(templar.template(1, convert_bare=True), 1)
# force errors
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{bad_var}}")
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{lookup('file', bad_var)}}")
self.assertRaises(AnsibleError, templar.template, "{{lookup('bad_lookup')}}")
self.assertRaises(AnsibleError, templar.template, "{{recursive}}")
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{foo-bar}}")
# test with fail_on_undefined=False
self.assertEqual(templar.template("{{bad_var}}", fail_on_undefined=False), "{{bad_var}}")
# test set_available_variables()
templar.set_available_variables(variables=dict(foo="bam"))
self.assertEqual(templar.template("{{foo}}"), "bam")
# variables must be a dict() for set_available_variables()
self.assertRaises(AssertionError, templar.set_available_variables, "foo=bam")
def test_templar_escape_backslashes(self):
# Rule of thumb: If escape backslashes is True you should end up with
# the same number of backslashes as when you started.
self.assertEqual(self.templar.template("\t{{foo}}", escape_backslashes=True), "\tbar")
self.assertEqual(self.templar.template("\t{{foo}}", escape_backslashes=False), "\tbar")
self.assertEqual(self.templar.template("\\{{foo}}", escape_backslashes=True), "\\bar")
self.assertEqual(self.templar.template("\\{{foo}}", escape_backslashes=False), "\\bar")
self.assertEqual(self.templar.template("\\{{foo + '\t' }}", escape_backslashes=True), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\t' }}", escape_backslashes=False), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\\t' }}", escape_backslashes=True), "\\bar\\t")
self.assertEqual(self.templar.template("\\{{foo + '\\t' }}", escape_backslashes=False), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\\\\t' }}", escape_backslashes=True), "\\bar\\\\t")
self.assertEqual(self.templar.template("\\{{foo + '\\\\t' }}", escape_backslashes=False), "\\bar\\t")
def test_template_jinja2_extensions(self):
fake_loader = DictDataLoader({})
templar = Templar(loader=fake_loader)
old_exts = C.DEFAULT_JINJA2_EXTENSIONS
try:
C.DEFAULT_JINJA2_EXTENSIONS = "foo,bar"
self.assertEqual(templar._get_extensions(), ['foo', 'bar'])
finally:
C.DEFAULT_JINJA2_EXTENSIONS = old_exts
class TestTemplarLookup(BaseTemplar, unittest.TestCase):
def test_lookup_missing_plugin(self):
self.assertRaisesRegexp(AnsibleError,
r'lookup plugin \(not_a_real_lookup_plugin\) not found',
self.templar._lookup,
'not_a_real_lookup_plugin',
'an_arg', a_keyword_arg='a_keyword_arg_value')
def test_lookup_list(self):
res = self.templar._lookup('list', 'an_arg', 'another_arg')
self.assertEqual(res, 'an_arg,another_arg')
def test_lookup_jinja_undefined(self):
self.assertRaisesRegexp(AnsibleUndefinedVariable,
"'an_undefined_jinja_var' is undefined",
self.templar._lookup,
'list', '{{ an_undefined_jinja_var }}')
def test_lookup_jinja_defined(self):
res = self.templar._lookup('list', '{{ some_var }}')
self.assertTrue(self.is_unsafe(res))
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_string_passed(self):
self.assertRaisesRegexp(AnsibleError,
"with_dict expects a dict",
self.templar._lookup,
'dict',
'{{ some_var }}')
def test_lookup_jinja_dict_list_passed(self):
self.assertRaisesRegexp(AnsibleError,
"with_dict expects a dict",
self.templar._lookup,
'dict',
['foo', 'bar'])
def test_lookup_jinja_kwargs(self):
res = self.templar._lookup('list', 'blip', random_keyword='12345')
self.assertTrue(self.is_unsafe(res))
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_list_wantlist(self):
res = self.templar._lookup('list', '{{ some_var }}', wantlist=True)
self.assertEqual(res, ["blip"])
def test_lookup_jinja_list_wantlist_undefined(self):
self.assertRaisesRegexp(AnsibleUndefinedVariable,
"'some_undefined_var' is undefined",
self.templar._lookup,
'list',
'{{ some_undefined_var }}',
wantlist=True)
def test_lookup_jinja_list_wantlist_unsafe(self):
res = self.templar._lookup('list', '{{ some_unsafe_var }}', wantlist=True)
for lookup_result in res:
self.assertTrue(self.is_unsafe(lookup_result))
# self.assertIsInstance(lookup_result, AnsibleUnsafe)
# Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict(self):
res = self.templar._lookup('list', {'{{ a_keyword }}': '{{ some_var }}'})
self.assertEqual(res['{{ a_keyword }}'], "blip")
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res['{{ a_keyword }}'], AnsibleUnsafe)
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_unsafe(self):
res = self.templar._lookup('list', {'{{ some_unsafe_key }}': '{{ some_unsafe_var }}'})
self.assertTrue(self.is_unsafe(res['{{ some_unsafe_key }}']))
# self.assertIsInstance(res['{{ some_unsafe_key }}'], AnsibleUnsafe)
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_unsafe_value(self):
res = self.templar._lookup('list', {'{{ a_keyword }}': '{{ some_unsafe_var }}'})
self.assertTrue(self.is_unsafe(res['{{ a_keyword }}']))
# self.assertIsInstance(res['{{ a_keyword }}'], AnsibleUnsafe)
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_none(self):
res = self.templar._lookup('list', None)
self.assertIsNone(res)
class TestAnsibleContext(BaseTemplar, unittest.TestCase):
def _context(self, variables=None):
variables = variables or {}
env = AnsibleEnvironment()
context = AnsibleContext(env, parent={}, name='some_context',
blocks={})
for key, value in variables.items():
context.vars[key] = value
return context
def test(self):
context = self._context()
self.assertIsInstance(context, AnsibleContext)
self.assertIsInstance(context, Context)
def test_resolve_unsafe(self):
context = self._context(variables={'some_unsafe_key': wrap_var('some_unsafe_string')})
res = context.resolve('some_unsafe_key')
# self.assertIsInstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res)
def test_resolve_unsafe_list(self):
context = self._context(variables={'some_unsafe_key': [wrap_var('some unsafe string 1')]})
res = context.resolve('some_unsafe_key')
# self.assertIsInstance(res[0], AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res)
def test_resolve_unsafe_dict(self):
context = self._context(variables={'some_unsafe_key':
{'an_unsafe_dict': wrap_var('some unsafe string 1')}
})
res = context.resolve('some_unsafe_key')
self.assertTrue(self.is_unsafe(res['an_unsafe_dict']),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res['an_unsafe_dict'])
def test_resolve(self):
context = self._context(variables={'some_key': 'some_string'})
res = context.resolve('some_key')
self.assertEqual(res, 'some_string')
# self.assertNotIsInstance(res, AnsibleUnsafe)
self.assertFalse(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was not expected to be marked unsafe but was' % res)
def test_resolve_none(self):
context = self._context(variables={'some_key': None})
res = context.resolve('some_key')
self.assertEqual(res, None)
# self.assertNotIsInstance(res, AnsibleUnsafe)
self.assertFalse(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was not expected to be marked unsafe but was' % res)
| gpl-3.0 |
CaiZhongda/psutil | psutil/_compat.py | 4 | 9627 | #!/usr/bin/env python
# Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module which provides compatibility with older Python versions."""
__all__ = ["PY3", "int", "long", "xrange", "exec_", "callable",
"namedtuple", "property", "defaultdict"]
import sys
# --- python 2/3 compatibility layer
PY3 = sys.version_info >= (3,)
try:
import __builtin__
except ImportError:
import builtins as __builtin__ # py3
if PY3:
int = int
long = int
xrange = range
exec_ = getattr(__builtin__, "exec")
print_ = getattr(__builtin__, "print")
else:
int = int
long = long
xrange = xrange
def exec_(code, globs=None, locs=None):
if globs is None:
frame = _sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
def print_(s):
sys.stdout.write(s + '\n')
sys.stdout.flush()
# removed in 3.0, reintroduced in 3.2
try:
callable = callable
except Exception:
def callable(obj):
for klass in type(obj).__mro__:
if "__call__" in klass.__dict__:
return True
return False
# --- stdlib additions
try:
from collections import namedtuple
except ImportError:
from operator import itemgetter as _itemgetter
from keyword import iskeyword as _iskeyword
import sys as _sys
def namedtuple(typename, field_names, verbose=False, rename=False):
"""A collections.namedtuple implementation written in Python
to support Python versions < 2.6.
Taken from: http://code.activestate.com/recipes/500261/
"""
# Parse and validate the field names. Validation serves two
# purposes, generating informative error messages and preventing
# template injection attacks.
if isinstance(field_names, basestring):
# names separated by whitespace and/or commas
field_names = field_names.replace(',', ' ').split()
field_names = tuple(map(str, field_names))
if rename:
names = list(field_names)
seen = set()
for i, name in enumerate(names):
if (not min(c.isalnum() or c=='_' for c in name) or _iskeyword(name)
or not name or name[0].isdigit() or name.startswith('_')
or name in seen):
names[i] = '_%d' % i
seen.add(name)
field_names = tuple(names)
for name in (typename,) + field_names:
if not min(c.isalnum() or c=='_' for c in name):
raise ValueError('Type names and field names can only contain ' \
'alphanumeric characters and underscores: %r'
% name)
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a keyword: %r' \
% name)
if name[0].isdigit():
raise ValueError('Type names and field names cannot start with a ' \
'number: %r' % name)
seen_names = set()
for name in field_names:
if name.startswith('_') and not rename:
raise ValueError('Field names cannot start with an underscore: %r'
% name)
if name in seen_names:
raise ValueError('Encountered duplicate field name: %r' % name)
seen_names.add(name)
# Create and fill-in the class template
numfields = len(field_names)
# tuple repr without parens or quotes
argtxt = repr(field_names).replace("'", "")[1:-1]
reprtxt = ', '.join('%s=%%r' % name for name in field_names)
template = '''class %(typename)s(tuple):
'%(typename)s(%(argtxt)s)' \n
__slots__ = () \n
_fields = %(field_names)r \n
def __new__(_cls, %(argtxt)s):
return _tuple.__new__(_cls, (%(argtxt)s)) \n
@classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
'Make a new %(typename)s object from a sequence or iterable'
result = new(cls, iterable)
if len(result) != %(numfields)d:
raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
return result \n
def __repr__(self):
return '%(typename)s(%(reprtxt)s)' %% self \n
def _asdict(self):
'Return a new dict which maps field names to their values'
return dict(zip(self._fields, self)) \n
def _replace(_self, **kwds):
'Return a new %(typename)s object replacing specified fields with new values'
result = _self._make(map(kwds.pop, %(field_names)r, _self))
if kwds:
raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
return result \n
def __getnewargs__(self):
return tuple(self) \n\n''' % locals()
for i, name in enumerate(field_names):
template += ' %s = _property(_itemgetter(%d))\n' % (name, i)
if verbose:
sys.stdout.write(template + '\n')
sys.stdout.flush()
# Execute the template string in a temporary namespace
namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
_property=property, _tuple=tuple)
try:
exec_(template, namespace)
except SyntaxError:
e = sys.exc_info()[1]
raise SyntaxError(e.message + ':\n' + template)
result = namespace[typename]
# For pickling to work, the __module__ variable needs to be set
# to the frame where the named tuple is created. Bypass this
# step in enviroments where sys._getframe is not defined (Jython
# for example) or sys._getframe is not defined for arguments
# greater than 0 (IronPython).
try:
result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return result
# hack to support property.setter/deleter on python < 2.6
# http://docs.python.org/library/functions.html?highlight=property#property
if hasattr(property, 'setter'):
property = property
else:
class property(__builtin__.property):
__metaclass__ = type
def __init__(self, fget, *args, **kwargs):
super(property, self).__init__(fget, *args, **kwargs)
self.__doc__ = fget.__doc__
def getter(self, method):
return property(method, self.fset, self.fdel)
def setter(self, method):
return property(self.fget, method, self.fdel)
def deleter(self, method):
return property(self.fget, self.fset, method)
# py 2.5 collections.defauldict
# Taken from:
# http://code.activestate.com/recipes/523034-emulate-collectionsdefaultdict/
# credits: Jason Kirtland
try:
from collections import defaultdict
except ImportError:
class defaultdict(dict):
def __init__(self, default_factory=None, *a, **kw):
if (default_factory is not None and
not hasattr(default_factory, '__call__')):
raise TypeError('first argument must be callable')
dict.__init__(self, *a, **kw)
self.default_factory = default_factory
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
return self.__missing__(key)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __reduce__(self):
if self.default_factory is None:
args = tuple()
else:
args = self.default_factory,
return type(self), args, None, None, self.items()
def copy(self):
return self.__copy__()
def __copy__(self):
return type(self)(self.default_factory, self)
def __deepcopy__(self, memo):
import copy
return type(self)(self.default_factory,
copy.deepcopy(self.items()))
def __repr__(self):
return 'defaultdict(%s, %s)' % (self.default_factory,
dict.__repr__(self))
# py 2.5 functools.wraps
try:
from functools import wraps
except ImportError:
def wraps(original):
def inner(fn):
# see functools.WRAPPER_ASSIGNMENTS
for attribute in ['__module__',
'__name__',
'__doc__'
]:
setattr(fn, attribute, getattr(original, attribute))
# see functools.WRAPPER_UPDATES
for attribute in ['__dict__',
]:
if hasattr(fn, attribute):
getattr(fn, attribute).update(getattr(original, attribute))
else:
setattr(fn, attribute,
getattr(original, attribute).copy())
return fn
return inner
| bsd-3-clause |
DIRACGrid/DIRAC | src/DIRAC/Core/DISET/test/Test_AuthManager.py | 2 | 11586 | """ Basic unit tests for AuthManager
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
from diraccfg import CFG
from DIRAC import gConfig
from DIRAC.Core.DISET.AuthManager import AuthManager
__RCSID__ = "$Id$"
testSystemsCFG = """
Systems
{
Service
{
Authorization
{
Method = NormalUser
MethodAll = Any
MethodAuth = Authenticated
MethodGroup = NormalUser,group:group_test
MethodGroupOther = NormalUser,group:group_test_other
MethodAllGroup = Any,group:group_test
MethodAllGroupOther = Any,group:group_test_other
MethodAuthGroup = Authenticated,group:group_test
MethodAuthGroupOther = Authenticated,group:group_test_other
MethodVO = NormalUser,vo:testVO
MethodAllVO = Any,vo:testVO
MethodAuthVO = Authenticated,vo:testVO
MethodHost = group:hosts
MethodTrustedHost = TrustedHost,group:hosts
}
}
}
"""
testRegistryCFG = """
Registry
{
VO
{
testVO
{
VOAdmin = userA
}
testVOBad
{
VOAdmin = userB
}
testVOOther
{
VOAdmin = userA
}
}
Users
{
userA
{
DN = /User/test/DN/CN=userA
}
userB
{
DN = /User/test/DN/CN=userB
}
userS
{
DN = /User/test/DN/CN=userS
Suspended = testVO
}
}
Hosts
{
test.hostA.ch
{
DN = /User/test/DN/CN=test.hostA.ch
Properties = TrustedHost
}
test.hostB.ch
{
DN = /User/test/DN/CN=test.hostB.ch
Properties = NoTrustedHost
}
}
Groups
{
group_test
{
Users = userA, userS
VO = testVO
Properties = NormalUser
}
group_test_other
{
Users = userS
VO = testVOOther
Properties = NormalUser
}
group_bad
{
Users = userB
VO = testVOBad
Properties = NoProperties
}
}
}
"""
class AuthManagerTest(unittest.TestCase):
""" Base class for the Modules test cases
"""
def setUp(self):
self.authMgr = AuthManager('/Systems/Service/Authorization')
cfg = CFG()
cfg.loadFromBuffer(testSystemsCFG)
gConfig.loadCFG(cfg)
cfg.loadFromBuffer(testRegistryCFG)
gConfig.loadCFG(cfg)
self.noAuthCredDict = {'group': 'group_test'}
self.userCredDict = {'DN': '/User/test/DN/CN=userA',
'group': 'group_test'}
self.suspendedOtherVOUserCredDict = {'DN': '/User/test/DN/CN=userS',
'group': 'group_test_other'}
self.badUserCredDict = {'DN': '/User/test/DN/CN=userB',
'group': 'group_bad'}
self.suspendedUserCredDict = {'DN': '/User/test/DN/CN=userS',
'group': 'group_test'}
self.hostCredDict = {'DN': '/User/test/DN/CN=test.hostA.ch',
'group': 'hosts'}
self.badHostCredDict = {'DN': '/User/test/DN/CN=test.hostB.ch',
'group': 'hosts'}
def tearDown(self):
pass
def test_userProperties(self):
# MethodAll accepts everybody
result = self.authMgr.authQuery('MethodAll', self.userCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAll', self.noAuthCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAll', self.badUserCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAll', self.suspendedUserCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAll', self.suspendedOtherVOUserCredDict)
self.assertTrue(result)
# MethodAuth requires DN to be identified
result = self.authMgr.authQuery('MethodAuth', self.userCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAuth', self.noAuthCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuth', self.badUserCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAuth', self.suspendedUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuth', self.suspendedOtherVOUserCredDict)
self.assertTrue(result)
# Method requires NormalUser property
result = self.authMgr.authQuery('Method', self.userCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('Method', self.badUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('Method', self.noAuthCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('Method', self.suspendedUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('Method', self.suspendedOtherVOUserCredDict)
self.assertTrue(result)
def test_userGroup(self):
# MethodAllGroup accepts everybody from the right group
result = self.authMgr.authQuery('MethodAllGroup', self.userCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAllGroup', self.noAuthCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAllGroup', self.badUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAllGroup', self.suspendedUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAllGroup', self.suspendedOtherVOUserCredDict)
self.assertFalse(result)
# MethodAllGroupOther accepts everybody from the right group
result = self.authMgr.authQuery('MethodAllGroupOther', self.userCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAllGroupOther', self.noAuthCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAllGroupOther', self.badUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAllGroupOther', self.suspendedUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAllGroupOther', self.suspendedOtherVOUserCredDict)
self.assertTrue(result)
# MethodAuthGroup requires DN to be identified from the right group
result = self.authMgr.authQuery('MethodAuthGroup', self.userCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAuthGroup', self.noAuthCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuthGroup', self.badUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuthGroup', self.suspendedUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuthGroup', self.suspendedOtherVOUserCredDict)
self.assertFalse(result)
# MethodAuthGroupOther requires DN to be identified from the right group
result = self.authMgr.authQuery('MethodAuthGroupOther', self.userCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuthGroupOther', self.noAuthCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuthGroupOther', self.badUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuthGroupOther', self.suspendedUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuthGroupOther', self.suspendedOtherVOUserCredDict)
self.assertTrue(result)
# MethodGroup requires NormalUser property and the right group
result = self.authMgr.authQuery('MethodGroup', self.userCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodGroup', self.badUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodGroup', self.noAuthCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodGroup', self.suspendedUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodGroup', self.suspendedOtherVOUserCredDict)
self.assertFalse(result)
# MethodGroupOther requires NormalUser property and the right group
result = self.authMgr.authQuery('MethodGroupOther', self.userCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodGroupOther', self.badUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodGroupOther', self.noAuthCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodGroupOther', self.suspendedUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodGroupOther', self.suspendedOtherVOUserCredDict)
self.assertTrue(result)
def test_userVO(self):
# MethodAllGroup accepts everybody from the right group
result = self.authMgr.authQuery('MethodAllVO', self.userCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAllVO', self.noAuthCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAllVO', self.badUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAllVO', self.suspendedUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAllVO', self.suspendedOtherVOUserCredDict)
self.assertFalse(result)
# MethodAuthGroup requires DN to be identified from the right group
result = self.authMgr.authQuery('MethodAuthVO', self.userCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAuthVO', self.noAuthCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuthVO', self.badUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuthVO', self.suspendedUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodAuthVO', self.suspendedOtherVOUserCredDict)
self.assertFalse(result)
# Method requires NormalUser property and the right group
result = self.authMgr.authQuery('MethodVO', self.userCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodVO', self.badUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodVO', self.noAuthCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodVO', self.suspendedUserCredDict)
self.assertFalse(result)
result = self.authMgr.authQuery('MethodVO', self.suspendedOtherVOUserCredDict)
self.assertFalse(result)
def test_hostProperties(self):
# MethodAll accepts everybody
result = self.authMgr.authQuery('MethodAll', self.hostCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAll', self.badHostCredDict)
self.assertTrue(result)
# MethodAuth requires DN to be identified
result = self.authMgr.authQuery('MethodAuth', self.hostCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodAuth', self.badHostCredDict)
self.assertTrue(result)
# Method requires NormalUser property
result = self.authMgr.authQuery('Method', self.hostCredDict)
self.assertFalse(result)
# MethodHost requires hosts group
result = self.authMgr.authQuery('MethodHost', self.hostCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodHost', self.badHostCredDict)
self.assertTrue(result)
# MethodTrustedHost requires hosts group and TrustedHost property
result = self.authMgr.authQuery('MethodTrustedHost', self.hostCredDict)
self.assertTrue(result)
result = self.authMgr.authQuery('MethodTrustedHost', self.badHostCredDict)
self.assertFalse(result)
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(AuthManagerTest)
testResult = unittest.TextTestRunner(verbosity=2).run(suite)
| gpl-3.0 |
chaos33/httpie | httpie/context.py | 48 | 2652 | import sys
from httpie.compat import is_windows
from httpie.config import DEFAULT_CONFIG_DIR, Config
class Environment(object):
"""
Information about the execution context
(standard streams, config directory, etc).
By default, it represents the actual environment.
All of the attributes can be overwritten though, which
is used by the test suite to simulate various scenarios.
"""
is_windows = is_windows
config_dir = DEFAULT_CONFIG_DIR
stdin = sys.stdin
stdin_isatty = stdin.isatty()
stdin_encoding = None
stdout = sys.stdout
stdout_isatty = stdout.isatty()
stdout_encoding = None
stderr = sys.stderr
stderr_isatty = stderr.isatty()
colors = 256
if not is_windows:
import curses
try:
curses.setupterm()
try:
colors = curses.tigetnum('colors')
except TypeError:
# pypy3 (2.4.0)
colors = curses.tigetnum(b'colors')
except curses.error:
pass
del curses
else:
# noinspection PyUnresolvedReferences
import colorama.initialise
stdout = colorama.initialise.wrap_stream(
stdout, convert=None, strip=None,
autoreset=True, wrap=True
)
stderr = colorama.initialise.wrap_stream(
stderr, convert=None, strip=None,
autoreset=True, wrap=True
)
del colorama
def __init__(self, **kwargs):
"""
Use keyword arguments to overwrite
any of the class attributes for this instance.
"""
assert all(hasattr(type(self), attr) for attr in kwargs.keys())
self.__dict__.update(**kwargs)
# Keyword arguments > stream.encoding > default utf8
if self.stdin_encoding is None:
self.stdin_encoding = getattr(
self.stdin, 'encoding', None) or 'utf8'
if self.stdout_encoding is None:
actual_stdout = self.stdout
if is_windows:
# noinspection PyUnresolvedReferences
from colorama import AnsiToWin32
if isinstance(self.stdout, AnsiToWin32):
actual_stdout = self.stdout.wrapped
self.stdout_encoding = getattr(
actual_stdout, 'encoding', None) or 'utf8'
@property
def config(self):
if not hasattr(self, '_config'):
self._config = Config(directory=self.config_dir)
if self._config.is_new():
self._config.save()
else:
self._config.load()
return self._config
| bsd-3-clause |
Celthi/youtube-dl-GUI | youtube_dl/extractor/worldstarhiphop.py | 31 | 1824 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
class WorldStarHipHopIE(InfoExtractor):
_VALID_URL = r'https?://(?:www|m)\.worldstar(?:candy|hiphop)\.com/videos/video\.php\?v=(?P<id>.*)'
_TEST = {
"url": "http://www.worldstarhiphop.com/videos/video.php?v=wshh6a7q1ny0G34ZwuIO",
"md5": "9d04de741161603bf7071bbf4e883186",
"info_dict": {
"id": "wshh6a7q1ny0G34ZwuIO",
"ext": "mp4",
"title": "KO Of The Week: MMA Fighter Gets Knocked Out By Swift Head Kick!"
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
m_vevo_id = re.search(r'videoId=(.*?)&?', webpage)
if m_vevo_id is not None:
return self.url_result('vevo:%s' % m_vevo_id.group(1), ie='Vevo')
video_url = self._search_regex(
r'so\.addVariable\("file","(.*?)"\)', webpage, 'video URL')
if 'youtube' in video_url:
return self.url_result(video_url, ie='Youtube')
video_title = self._html_search_regex(
r'(?s)<div class="content-heading">\s*<h1>(.*?)</h1>',
webpage, 'title')
# Getting thumbnail and if not thumbnail sets correct title for WSHH candy video.
thumbnail = self._html_search_regex(
r'rel="image_src" href="(.*)" />', webpage, 'thumbnail',
fatal=False)
if not thumbnail:
_title = r'candytitles.*>(.*)</span>'
mobj = re.search(_title, webpage)
if mobj is not None:
video_title = mobj.group(1)
return {
'id': video_id,
'url': video_url,
'title': video_title,
'thumbnail': thumbnail,
}
| mit |
willingc/cookiecutter | tests/test_output_folder.py | 5 | 1969 | # -*- coding: utf-8 -*-
"""
test_output_folder
------------------
Test formerly known from a unittest residing in test_generate.py named
TestOutputFolder.test_output_folder
"""
from __future__ import unicode_literals
import os
import pytest
from cookiecutter import generate
from cookiecutter import utils
from cookiecutter import exceptions
@pytest.fixture(scope='function')
def remove_output_folder(request):
"""
Remove the output folder in case it exists on disk.
"""
def finalizer_remove_output_folder():
if os.path.exists('output_folder'):
utils.rmtree('output_folder')
request.addfinalizer(finalizer_remove_output_folder)
@pytest.mark.usefixtures('clean_system', 'remove_output_folder')
def test_output_folder():
context = generate.generate_context(
context_file='tests/test-output-folder/cookiecutter.json'
)
generate.generate_files(
context=context,
repo_dir='tests/test-output-folder'
)
something = """Hi!
My name is Audrey Greenfeld.
It is 2014."""
something2 = open('output_folder/something.txt').read()
assert something == something2
in_folder = "The color is green and the letter is D."
in_folder2 = open('output_folder/folder/in_folder.txt').read()
assert in_folder == in_folder2
assert os.path.isdir('output_folder/im_a.dir')
assert os.path.isfile('output_folder/im_a.dir/im_a.file.py')
@pytest.mark.usefixtures('clean_system', 'remove_output_folder')
def test_exception_when_output_folder_exists():
context = generate.generate_context(
context_file='tests/test-output-folder/cookiecutter.json'
)
output_folder = context['cookiecutter']['test_name']
if not os.path.exists(output_folder):
os.makedirs(output_folder)
with pytest.raises(exceptions.OutputDirExistsException):
generate.generate_files(
context=context,
repo_dir='tests/test-output-folder'
)
| bsd-3-clause |
amenonsen/ansible | lib/ansible/modules/remote_management/ucs/ucs_vlans.py | 64 | 6941 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: ucs_vlans
short_description: Configures VLANs on Cisco UCS Manager
description:
- Configures VLANs on Cisco UCS Manager.
- Examples can be used with the UCS Platform Emulator U(https://communities.cisco.com/ucspe).
extends_documentation_fragment: ucs
options:
state:
description:
- If C(present), will verify VLANs are present and will create if needed.
- If C(absent), will verify VLANs are absent and will delete if needed.
choices: [present, absent]
default: present
name:
description:
- The name assigned to the VLAN.
- The VLAN name is case sensitive.
- This name can be between 1 and 32 alphanumeric characters.
- "You cannot use spaces or any special characters other than - (hyphen), \"_\" (underscore), : (colon), and . (period)."
- You cannot change this name after the VLAN is created.
required: yes
multicast_policy:
description:
- The multicast policy associated with this VLAN.
- This option is only valid if the Sharing Type field is set to None or Primary.
default: ''
fabric:
description:
- "The fabric configuration of the VLAN. This can be one of the following:"
- "common - The VLAN applies to both fabrics and uses the same configuration parameters in both cases."
- "A — The VLAN only applies to fabric A."
- "B — The VLAN only applies to fabric B."
- For upstream disjoint L2 networks, Cisco recommends that you choose common to create VLANs that apply to both fabrics.
choices: [common, A, B]
default: common
id:
description:
- The unique string identifier assigned to the VLAN.
- A VLAN ID can be between '1' and '3967', or between '4048' and '4093'.
- You cannot create VLANs with IDs from 4030 to 4047. This range of VLAN IDs is reserved.
- The VLAN IDs you specify must also be supported on the switch that you are using.
- VLANs in the LAN cloud and FCoE VLANs in the SAN cloud must have different IDs.
- Optional if state is absent.
required: yes
sharing:
description:
- The Sharing Type field.
- "Whether this VLAN is subdivided into private or secondary VLANs. This can be one of the following:"
- "none - This VLAN does not have any secondary or private VLANs. This is a regular VLAN."
- "primary - This VLAN can have one or more secondary VLANs, as shown in the Secondary VLANs area. This VLAN is a primary VLAN in the private VLAN domain."
- "isolated - This is a private VLAN associated with a primary VLAN. This VLAN is an Isolated VLAN."
- "community - This VLAN can communicate with other ports on the same community VLAN as well as the promiscuous port. This VLAN is a Community VLAN."
choices: [none, primary, isolated, community]
default: none
native:
description:
- Designates the VLAN as a native VLAN.
choices: ['yes', 'no']
default: 'no'
requirements:
- ucsmsdk
author:
- David Soper (@dsoper2)
- CiscoUcs (@CiscoUcs)
version_added: '2.5'
'''
EXAMPLES = r'''
- name: Configure VLAN
ucs_vlans:
hostname: 172.16.143.150
username: admin
password: password
name: vlan2
id: '2'
native: 'yes'
- name: Remove VLAN
ucs_vlans:
hostname: 172.16.143.150
username: admin
password: password
name: vlan2
state: absent
'''
RETURN = r'''
#
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.remote_management.ucs import UCSModule, ucs_argument_spec
def main():
argument_spec = ucs_argument_spec
argument_spec.update(
name=dict(type='str', required=True),
multicast_policy=dict(type='str', default=''),
fabric=dict(type='str', default='common', choices=['common', 'A', 'B']),
id=dict(type='str'),
sharing=dict(type='str', default='none', choices=['none', 'primary', 'isolated', 'community']),
native=dict(type='str', default='no', choices=['yes', 'no']),
state=dict(type='str', default='present', choices=['present', 'absent']),
)
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
required_if=[
['state', 'present', ['id']],
],
)
ucs = UCSModule(module)
err = False
# UCSModule creation above verifies ucsmsdk is present and exits on failure, so additional imports are done below.
from ucsmsdk.mometa.fabric.FabricVlan import FabricVlan
changed = False
try:
mo_exists = False
props_match = False
# dn is fabric/lan/net-<name> for common vlans or fabric/lan/[A or B]/net-<name> for A or B
dn_base = 'fabric/lan'
if module.params['fabric'] != 'common':
dn_base += '/' + module.params['fabric']
dn = dn_base + '/net-' + module.params['name']
mo = ucs.login_handle.query_dn(dn)
if mo:
mo_exists = True
if module.params['state'] == 'absent':
# mo must exist but all properties do not have to match
if mo_exists:
if not module.check_mode:
ucs.login_handle.remove_mo(mo)
ucs.login_handle.commit()
changed = True
else:
if mo_exists:
# check top-level mo props
kwargs = dict(id=module.params['id'])
kwargs['default_net'] = module.params['native']
kwargs['sharing'] = module.params['sharing']
kwargs['mcast_policy_name'] = module.params['multicast_policy']
if (mo.check_prop_match(**kwargs)):
props_match = True
if not props_match:
if not module.check_mode:
# create if mo does not already exist
mo = FabricVlan(
parent_mo_or_dn=dn_base,
name=module.params['name'],
id=module.params['id'],
default_net=module.params['native'],
sharing=module.params['sharing'],
mcast_policy_name=module.params['multicast_policy'],
)
ucs.login_handle.add_mo(mo, True)
ucs.login_handle.commit()
changed = True
except Exception as e:
err = True
ucs.result['msg'] = "setup error: %s " % str(e)
ucs.result['changed'] = changed
if err:
module.fail_json(**ucs.result)
module.exit_json(**ucs.result)
if __name__ == '__main__':
main()
| gpl-3.0 |
Peekmo/three.js | utils/exporters/blender/addons/io_three/exporter/api/camera.py | 125 | 1823 | from bpy import data, types, context
from .. import logger
def _camera(func):
"""
:param func:
"""
def inner(name, *args, **kwargs):
"""
:param name:
:param *args:
:param **kwargs:
"""
if isinstance(name, types.Camera):
camera = name
else:
camera = data.cameras[name]
return func(camera, *args, **kwargs)
return inner
@_camera
def aspect(camera):
"""
:param camera:
:rtype: float
"""
logger.debug("camera.aspect(%s)", camera)
render = context.scene.render
return render.resolution_x/render.resolution_y
@_camera
def bottom(camera):
"""
:param camera:
:rtype: float
"""
logger.debug("camera.bottom(%s)", camera)
return -(camera.angle_y * camera.ortho_scale)
@_camera
def far(camera):
"""
:param camera:
:rtype: float
"""
logger.debug("camera.far(%s)", camera)
return camera.clip_end
@_camera
def fov(camera):
"""
:param camera:
:rtype: float
"""
logger.debug("camera.fov(%s)", camera)
return camera.lens
@_camera
def left(camera):
"""
:param camera:
:rtype: float
"""
logger.debug("camera.left(%s)", camera)
return -(camera.angle_x * camera.ortho_scale)
@_camera
def near(camera):
"""
:param camera:
:rtype: float
"""
logger.debug("camera.near(%s)", camera)
return camera.clip_start
@_camera
def right(camera):
"""
:param camera:
:rtype: float
"""
logger.debug("camera.right(%s)", camera)
return camera.angle_x * camera.ortho_scale
@_camera
def top(camera):
"""
:param camera:
:rtype: float
"""
logger.debug("camera.top(%s)", camera)
return camera.angle_y * camera.ortho_scale
| mit |
lioncash/dolphin | Tools/IDA/LoadDolphinMap.py | 45 | 1743 | # Copyright 2018 Dolphin Emulator Project
# Licensed under GPLv2+
# Refer to the license.txt file included.
from collections import namedtuple
DolphinSymbol = namedtuple("DolphinSymbol", [
"section", "addr", "size", "vaddr", "align", "name"
])
def load_dolphin_map(filepath):
with open(filepath, "r") as f:
section = ""
symbol_map = []
for line in f.readlines():
t = line.strip().split(" ", 4)
if len(t) == 3 and t[1] == "section" and t[2] == "layout":
section = t[0]
continue
if not section or len(t) != 5:
continue
symbol_map.append(DolphinSymbol(section, *t))
return symbol_map
def ida_main():
import idc
filepath = idc.AskFile(0, "*.map", "Load a Dolphin emulator symbol map")
symbol_map = load_dolphin_map(filepath)
for symbol in symbol_map:
addr = int(symbol.vaddr, 16)
size = int(symbol.size, 16)
idc.MakeUnknown(addr, size, 0)
if symbol.section in [".init", ".text"]:
idc.MakeCode(addr)
success = idc.MakeFunction(
addr,
idc.BADADDR if not size else (addr+size)
)
else:
success = idc.MakeData(addr, idc.FF_BYTE, size, 0)
if not success:
idc.Message("Can't apply properties for symbol:"
" {0.vaddr} - {0.name}\n".format(symbol))
flags = idc.SN_NOCHECK | idc.SN_PUBLIC
if symbol.name.startswith("zz_"):
flags |= idc.SN_AUTO | idc.SN_WEAK
else:
flags |= idc.SN_NON_AUTO
idc.MakeNameEx(addr, symbol.name, flags)
if __name__ == "__main__":
ida_main()
| gpl-2.0 |
d0ugal/readthedocs.org | readthedocs/core/management/commands/archive.py | 7 | 1045 | from glob import glob
import os
import logging
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.template import Context, loader as template_loader
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""Custom management command to rebuild documentation for all projects on
the site. Invoked via ``./manage.py update_repos``.
"""
def handle(self, *args, **options):
doc_index = {}
os.chdir(settings.DOCROOT)
for directory in glob("*"):
doc_index[directory] = []
path = os.path.join(directory, 'rtd-builds')
for version in glob(os.path.join(path, "*")):
v = version.replace(path + '/', '')
doc_index[directory].append(v)
context = Context({
'doc_index': doc_index,
'MEDIA_URL': settings.MEDIA_URL,
})
html = template_loader.get_template('archive/index.html').render(context)
print html
| mit |
Djlavoy/scrapy | tests/test_selector_csstranslator.py | 59 | 6026 | """
Selector tests for cssselect backend
"""
from twisted.trial import unittest
from scrapy.http import HtmlResponse
from scrapy.selector.csstranslator import ScrapyHTMLTranslator
from scrapy.selector import Selector
from cssselect.parser import SelectorSyntaxError
from cssselect.xpath import ExpressionError
HTMLBODY = '''
<html>
<body>
<div>
<a id="name-anchor" name="foo"></a>
<a id="tag-anchor" rel="tag" href="http://localhost/foo">link</a>
<a id="nofollow-anchor" rel="nofollow" href="https://example.org"> link</a>
<p id="paragraph">
lorem ipsum text
<b id="p-b">hi</b> <em id="p-em">there</em>
<b id="p-b2">guy</b>
<input type="checkbox" id="checkbox-unchecked" />
<input type="checkbox" id="checkbox-disabled" disabled="" />
<input type="text" id="text-checked" checked="checked" />
<input type="hidden" />
<input type="hidden" disabled="disabled" />
<input type="checkbox" id="checkbox-checked" checked="checked" />
<input type="checkbox" id="checkbox-disabled-checked"
disabled="disabled" checked="checked" />
<fieldset id="fieldset" disabled="disabled">
<input type="checkbox" id="checkbox-fieldset-disabled" />
<input type="hidden" />
</fieldset>
</p>
<map name="dummymap">
<area shape="circle" coords="200,250,25" href="foo.html" id="area-href" />
<area shape="default" id="area-nohref" />
</map>
</div>
<div class="cool-footer" id="foobar-div" foobar="ab bc cde">
<span id="foobar-span">foo ter</span>
</div>
</body></html>
'''
class TranslatorMixinTest(unittest.TestCase):
tr_cls = ScrapyHTMLTranslator
def setUp(self):
self.tr = self.tr_cls()
self.c2x = self.tr.css_to_xpath
def test_attr_function(self):
cases = [
('::attr(name)', u'descendant-or-self::*/@name'),
('a::attr(href)', u'descendant-or-self::a/@href'),
('a ::attr(img)', u'descendant-or-self::a/descendant-or-self::*/@img'),
('a > ::attr(class)', u'descendant-or-self::a/*/@class'),
]
for css, xpath in cases:
self.assertEqual(self.c2x(css), xpath, css)
def test_attr_function_exception(self):
cases = [
('::attr(12)', ExpressionError),
('::attr(34test)', ExpressionError),
('::attr(@href)', SelectorSyntaxError),
]
for css, exc in cases:
self.assertRaises(exc, self.c2x, css)
def test_text_pseudo_element(self):
cases = [
('::text', u'descendant-or-self::text()'),
('p::text', u'descendant-or-self::p/text()'),
('p ::text', u'descendant-or-self::p/descendant-or-self::text()'),
('#id::text', u"descendant-or-self::*[@id = 'id']/text()"),
('p#id::text', u"descendant-or-self::p[@id = 'id']/text()"),
('p#id ::text', u"descendant-or-self::p[@id = 'id']/descendant-or-self::text()"),
('p#id > ::text', u"descendant-or-self::p[@id = 'id']/*/text()"),
('p#id ~ ::text', u"descendant-or-self::p[@id = 'id']/following-sibling::*/text()"),
('a[href]::text', u'descendant-or-self::a[@href]/text()'),
('a[href] ::text', u'descendant-or-self::a[@href]/descendant-or-self::text()'),
('p::text, a::text', u"descendant-or-self::p/text() | descendant-or-self::a/text()"),
]
for css, xpath in cases:
self.assertEqual(self.c2x(css), xpath, css)
def test_pseudo_function_exception(self):
cases = [
('::attribute(12)', ExpressionError),
('::text()', ExpressionError),
('::attr(@href)', SelectorSyntaxError),
]
for css, exc in cases:
self.assertRaises(exc, self.c2x, css)
def test_unknown_pseudo_element(self):
cases = [
('::text-node', ExpressionError),
]
for css, exc in cases:
self.assertRaises(exc, self.c2x, css)
def test_unknown_pseudo_class(self):
cases = [
(':text', ExpressionError),
(':attribute(name)', ExpressionError),
]
for css, exc in cases:
self.assertRaises(exc, self.c2x, css)
class CSSSelectorTest(unittest.TestCase):
sscls = Selector
def setUp(self):
self.htmlresponse = HtmlResponse('http://example.com', body=HTMLBODY)
self.sel = self.sscls(self.htmlresponse)
def x(self, *a, **kw):
return [v.strip() for v in self.sel.css(*a, **kw).extract() if v.strip()]
def test_selector_simple(self):
for x in self.sel.css('input'):
self.assertTrue(isinstance(x, self.sel.__class__), x)
self.assertEqual(self.sel.css('input').extract(),
[x.extract() for x in self.sel.css('input')])
def test_text_pseudo_element(self):
self.assertEqual(self.x('#p-b2'), [u'<b id="p-b2">guy</b>'])
self.assertEqual(self.x('#p-b2::text'), [u'guy'])
self.assertEqual(self.x('#p-b2 ::text'), [u'guy'])
self.assertEqual(self.x('#paragraph::text'), [u'lorem ipsum text'])
self.assertEqual(self.x('#paragraph ::text'), [u'lorem ipsum text', u'hi', u'there', u'guy'])
self.assertEqual(self.x('p::text'), [u'lorem ipsum text'])
self.assertEqual(self.x('p ::text'), [u'lorem ipsum text', u'hi', u'there', u'guy'])
def test_attribute_function(self):
self.assertEqual(self.x('#p-b2::attr(id)'), [u'p-b2'])
self.assertEqual(self.x('.cool-footer::attr(class)'), [u'cool-footer'])
self.assertEqual(self.x('.cool-footer ::attr(id)'), [u'foobar-div', u'foobar-span'])
self.assertEqual(self.x('map[name="dummymap"] ::attr(shape)'), [u'circle', u'default'])
def test_nested_selector(self):
self.assertEqual(self.sel.css('p').css('b::text').extract(),
[u'hi', u'guy'])
self.assertEqual(self.sel.css('div').css('area:last-child').extract(),
[u'<area shape="default" id="area-nohref">'])
| bsd-3-clause |
bkochendorfer/reviewboard | reviewboard/webapi/errors.py | 5 | 4650 | from __future__ import unicode_literals
from djblets.webapi.errors import WebAPIError
#
# Standard error messages
#
UNSPECIFIED_DIFF_REVISION = WebAPIError(
200,
'Diff revision not specified.',
http_status=400) # 400 Bad Request
INVALID_DIFF_REVISION = WebAPIError(
201,
'Invalid diff revision.',
http_status=404) # 404 Not Found
INVALID_ACTION = WebAPIError(
202,
'Invalid action specified.',
http_status=400) # 400 Bad Request
INVALID_CHANGE_NUMBER = WebAPIError(
203,
'The commit ID specified could not be found.',
http_status=404) # 404 Not Found
CHANGE_NUMBER_IN_USE = WebAPIError(
204,
'The commit ID specified has already been used.',
http_status=409) # 409 Conflict
MISSING_REPOSITORY = WebAPIError(
205,
'There was no repository found at the specified path.',
http_status=400) # 400 Bad Request
INVALID_REPOSITORY = WebAPIError(
206,
'The repository path specified is not in the list of known repositories.',
http_status=400) # 400 Bad Request
REPO_FILE_NOT_FOUND = WebAPIError(
207,
'The file was not found in the repository.',
http_status=400) # 400 Bad Request
INVALID_USER = WebAPIError(
208,
'User does not exist.',
http_status=400) # 400 Bad Request
REPO_NOT_IMPLEMENTED = WebAPIError(
209,
'The specified repository is not able to perform this action.',
http_status=501) # 501 Not Implemented
REPO_INFO_ERROR = WebAPIError(
210,
'There was an error fetching extended information for this repository.',
http_status=500) # 500 Internal Server Error
NOTHING_TO_PUBLISH = WebAPIError(
211,
'You attempted to publish a review request without any modifications.',
http_status=400) # 400 Bad Request
EMPTY_CHANGESET = WebAPIError(
212,
'The commit ID specified represents an empty changeset.',
http_status=400) # 400 Bad Request
SERVER_CONFIG_ERROR = WebAPIError(
213,
'There was an error storing configuration on the server.',
http_status=500) # 500 Internal Server Error
BAD_HOST_KEY = WebAPIError(
214,
'The SSH key on the host does ot match the stored key.',
http_status=403) # 403 Forbidden
UNVERIFIED_HOST_KEY = WebAPIError(
215,
'The SSH key on the host is unverified.',
http_status=403) # 403 Forbidden
UNVERIFIED_HOST_CERT = WebAPIError(
216,
'The HTTPS certificate on the host is unverified.',
http_status=403) # 403 Forbidden
MISSING_USER_KEY = WebAPIError(
217,
'A public SSH key was requested, but no SSH key was available to send.',
http_status=403) # 403 Forbidden
REPO_AUTHENTICATION_ERROR = WebAPIError(
218,
'Unable to authenticate with the repository using the provided '
'credentials.',
http_status=403) # 403 Forbidden
DIFF_EMPTY = WebAPIError(
219,
'The specified diff file is empty.',
http_status=400) # 400 Bad Request
DIFF_TOO_BIG = WebAPIError(
220,
'The specified diff file is too large.',
http_status=400) # 400 Bad Request
FILE_RETRIEVAL_ERROR = WebAPIError(
221,
'There was an error fetching a source file.',
http_status=500) # 500 Internal Server Error
HOSTINGSVC_AUTH_ERROR = WebAPIError(
222,
'There was an error authorizing with a service.',
http_status=403) # 403 Forbidden
GROUP_ALREADY_EXISTS = WebAPIError(
223,
'A group with this name already exists.',
http_status=409) # 409 Conflict
DIFF_PARSE_ERROR = WebAPIError(
224,
'The specified diff file could not be parsed.',
http_status=400) # 400 Bad Request
PUBLISH_ERROR = WebAPIError(
225,
'An error occurred during publishing.',
http_status=500) # 500 Internal Server Error
USER_QUERY_ERROR = WebAPIError(
226,
'An error occurred querying the user list.',
http_status=500) # 500 Internal Server Error
COMMIT_ID_ALREADY_EXISTS = WebAPIError(
227,
'Review request with this commit ID already exists in the repository.',
http_status=409) # 409 Conflict
TOKEN_GENERATION_FAILED = WebAPIError(
228,
'There was an error generating the API token. Please try again.',
http_status=500) # 500 Internal Server Error.
REPOSITORY_ALREADY_EXISTS = WebAPIError(
229,
'A repository with this name already exists.',
http_status=409) # 409 Conflict
CLOSE_ERROR = WebAPIError(
230,
'An error occurred while closing the review request.',
http_status=500) # 500 Internal Server Error
REOPEN_ERROR = WebAPIError(
231,
'An error occurred while reopening the review request.',
http_status=500) # 500 Internal Server Error
| mit |
lociii/googleads-python-lib | examples/adspygoogle/dfp/v201308/line_item_service/get_all_line_items.py | 2 | 1733 | #!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all line items. To create line items, run
create_line_items.py."""
__author__ = ('Jeff Sham',
'Vincent Tsao')
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.dfp import DfpUtils
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..', '..'))
# Initialize appropriate service.
line_item_service = client.GetService('LineItemService', version='v201308')
# Get line items by statement.
line_items = DfpUtils.GetAllEntitiesByStatementWithService(line_item_service)
# Display results.
for line_item in line_items:
print ('Line item with id \'%s\', belonging to order id \'%s\', and named '
'\'%s\' was found.' % (line_item['id'], line_item['orderId'],
line_item['name']))
print
print 'Number of results found: %s' % len(line_items)
| apache-2.0 |
sathnaga/virt-test | tests/nfs_corrupt.py | 6 | 8512 | import logging, os, re
from autotest.client.shared import error
from autotest.client import utils, os_dep
from virttest import utils_misc
from virttest import env_process
class NFSCorruptConfig(object):
"""
This class sets up nfs_corrupt test environment.
"""
def __init__(self, test, params):
self.nfs_dir = os.path.join(test.tmpdir, "nfs_dir")
self.mnt_dir = os.path.join(test.tmpdir, "mnt_dir")
self.chk_re = params.get("nfs_stat_chk_re", "running")
cmd_list = self._get_service_cmds()
self.start_cmd = cmd_list[0]
self.stop_cmd = cmd_list[1]
self.restart_cmd = cmd_list[2]
self.status_cmd = cmd_list[3]
@error.context_aware
def _get_service_cmds(self):
"""
Figure out the commands used to control the NFS service.
"""
error.context("Finding out appropriate commands to handle NFS service")
service = os_dep.command("service")
try:
systemctl = os_dep.command("systemctl")
except ValueError:
systemctl = None
if systemctl is not None:
init_script = "/etc/init.d/nfs"
service_file = "/lib/systemd/system/nfs-server.service"
if os.path.isfile(init_script):
service_name = "nfs"
elif os.path.isfile(service_file):
service_name = "nfs-server"
else:
raise error.TestError("Files %s and %s absent, don't know "
"how to set up NFS for this host" %
(init_script, service_file))
start_cmd = "%s start %s.service" % (systemctl, service_name)
stop_cmd = "%s stop %s.service" % (systemctl, service_name)
restart_cmd = "%s restart %s.service" % (systemctl, service_name)
status_cmd = "%s status %s.service" % (systemctl, service_name)
else:
start_cmd = "%s nfs start" % service
stop_cmd = "%s nfs stop" % service
restart_cmd = "%s nfs restart" % service
status_cmd = "%s nfs status" % service
return [start_cmd, stop_cmd, restart_cmd, status_cmd]
@error.context_aware
def setup(self, force_start=False):
"""
Setup test NFS share.
@param force_start: Whether to make NFS service start anyway.
"""
error.context("Setting up test NFS share")
for d in [self.nfs_dir, self.mnt_dir]:
try:
os.makedirs(d)
except OSError:
pass
if force_start:
self.start_service()
else:
if not self.is_service_active():
self.start_service()
utils.run("exportfs localhost:%s -o rw,no_root_squash" % self.nfs_dir)
utils.run("mount localhost:%s %s -o rw,soft,timeo=1,retrans=1,vers=3" %
(self.nfs_dir, self.mnt_dir))
@error.context_aware
def cleanup(self, force_stop=False):
error.context("Cleaning up test NFS share")
utils.run("umount %s" % self.mnt_dir)
utils.run("exportfs -u localhost:%s" % self.nfs_dir)
if force_stop:
self.stop_service()
def start_service(self):
"""
Starts the NFS server.
"""
utils.run(self.start_cmd)
def stop_service(self):
"""
Stops the NFS server.
"""
utils.run(self.stop_cmd)
def restart_service(self):
"""
Restarts the NFS server.
"""
utils.run(self.restart_cmd)
def is_service_active(self):
"""
Verifies whether the NFS server is running or not.
@param chk_re: Regular expression that tells whether NFS is running
or not.
"""
status = utils.system_output(self.status_cmd, ignore_status=True)
if re.findall(self.chk_re, status):
return True
else:
return False
@error.context_aware
def run_nfs_corrupt(test, params, env):
"""
Test if VM paused when image NFS shutdown, the drive option 'werror' should
be stop, the drive option 'cache' should be none.
1) Setup NFS service on host
2) Boot up a VM using another disk on NFS server and write the disk by dd
3) Check if VM status is 'running'
4) Reject NFS connection on host
5) Check if VM status is 'paused'
6) Accept NFS connection on host and continue VM by monitor command
7) Check if VM status is 'running'
@param test: kvm test object.
@param params: Dictionary with the test parameters.
@param env: Dictionary with test environment.
"""
def get_nfs_devname(params, session):
"""
Get the possbile name of nfs storage dev name in guest.
@param params: Test params dictionary.
@param session: An SSH session object.
"""
image1_type = params.object_params("image1").get("drive_format")
stg_type = params.object_params("stg").get("drive_format")
cmd = ""
# Seems we can get correct 'stg' devname even if the 'stg' image
# has a different type from main image (we call it 'image1' in
# config file) with these 'if' sentences.
if image1_type == stg_type:
cmd = "ls /dev/[hsv]d[a-z]"
elif stg_type == "virtio":
cmd = "ls /dev/vd[a-z]"
else:
cmd = "ls /dev/[sh]d[a-z]"
cmd += " | tail -n 1"
return session.cmd_output(cmd)
def check_vm_status(vm, status):
"""
Check if VM has the given status or not.
@param vm: VM object.
@param status: String with desired status.
@return: True if VM status matches our desired status.
@return: False if VM status does not match our desired status.
"""
try:
vm.verify_status(status)
except:
return False
else:
return True
config = NFSCorruptConfig(test, params)
config.setup()
image_name = os.path.join(config.mnt_dir, 'nfs_corrupt')
params["image_name_stg"] = image_name
params["force_create_image_stg"] = "yes"
params["create_image_stg"] = "yes"
stg_params = params.object_params("stg")
env_process.preprocess_image(test, stg_params, image_name)
vm = env.get_vm(params["main_vm"])
vm.create(params=params)
session = vm.wait_for_login(timeout=int(params.get("login_timeout", 360)))
nfs_devname = get_nfs_devname(params, session)
# Write disk on NFS server
write_disk_cmd = "dd if=/dev/urandom of=%s" % nfs_devname
logging.info("Write disk on NFS server, cmd: %s" % write_disk_cmd)
session.sendline(write_disk_cmd)
try:
# Read some command output, it will timeout
session.read_up_to_prompt(timeout=30)
except:
pass
try:
error.context("Make sure guest is running before test")
vm.resume()
vm.verify_status("running")
try:
cmd = "iptables"
cmd += " -t filter"
cmd += " -A INPUT"
cmd += " -s localhost"
cmd += " -m state"
cmd += " --state NEW"
cmd += " -p tcp"
cmd += " --dport 2049"
cmd += " -j REJECT"
error.context("Reject NFS connection on host")
utils.system(cmd)
error.context("Check if VM status is 'paused'")
if not utils_misc.wait_for(
lambda: check_vm_status(vm, "paused"),
int(params.get('wait_paused_timeout', 120))):
raise error.TestError("Guest is not paused after stop NFS")
finally:
error.context("Accept NFS connection on host")
cmd = "iptables"
cmd += " -t filter"
cmd += " -D INPUT"
cmd += " -s localhost"
cmd += " -m state"
cmd += " --state NEW"
cmd += " -p tcp"
cmd += " --dport 2049"
cmd += " -j REJECT"
utils.system(cmd)
error.context("Continue guest")
vm.resume()
error.context("Check if VM status is 'running'")
if not utils_misc.wait_for(lambda: check_vm_status(vm, "running"), 20):
raise error.TestError("Guest does not restore to 'running' status")
finally:
session.close()
vm.destroy(gracefully=True)
config.cleanup()
| gpl-2.0 |
Jens-G/thrift | contrib/fb303/py/fb303/FacebookBase.py | 43 | 2038 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import time
import FacebookService
import thrift.reflection.limited
from ttypes import fb_status
class FacebookBase(FacebookService.Iface):
def __init__(self, name):
self.name = name
self.alive = int(time.time())
self.counters = {}
def getName(self, ):
return self.name
def getVersion(self, ):
return ''
def getStatus(self, ):
return fb_status.ALIVE
def getCounters(self):
return self.counters
def resetCounter(self, key):
self.counters[key] = 0
def getCounter(self, key):
if self.counters.has_key(key):
return self.counters[key]
return 0
def incrementCounter(self, key):
self.counters[key] = self.getCounter(key) + 1
def setOption(self, key, value):
pass
def getOption(self, key):
return ""
def getOptions(self):
return {}
def getOptions(self):
return {}
def aliveSince(self):
return self.alive
def getCpuProfile(self, duration):
return ""
def getLimitedReflection(self):
return thrift.reflection.limited.Service()
def reinitialize(self):
pass
def shutdown(self):
pass
| apache-2.0 |
wfxiang08/django178 | tests/admin_docs/models.py | 32 | 1228 | """
Models for testing various aspects of the djang.contrib.admindocs app
"""
from django.db import models
class Company(models.Model):
name = models.CharField(max_length=200)
class Group(models.Model):
name = models.CharField(max_length=200)
class Family(models.Model):
last_name = models.CharField(max_length=200)
class Person(models.Model):
first_name = models.CharField(max_length=200, help_text="The person's first name")
last_name = models.CharField(max_length=200, help_text="The person's last name")
company = models.ForeignKey(Company, help_text="place of work")
family = models.ForeignKey(Family, related_name='+', null=True)
groups = models.ManyToManyField(Group, help_text="has membership")
def _get_full_name(self):
return "%s %s" % (self.first_name, self.last_name)
def add_image(self):
pass
def delete_image(self):
pass
def save_changes(self):
pass
def set_status(self):
pass
def get_full_name(self):
"""
Get the full name of the person
"""
return self._get_full_name()
def get_status_count(self):
return 0
def get_groups_list(self):
return []
| bsd-3-clause |
nitin-cherian/LifeLongLearning | Python/Experiments/JINJA/RealPython/jinja_env/lib/python3.5/site-packages/flask/logging.py | 122 | 2751 | # -*- coding: utf-8 -*-
"""
flask.logging
~~~~~~~~~~~~~
Implements the logging support for Flask.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
from werkzeug.local import LocalProxy
from logging import getLogger, StreamHandler, Formatter, getLoggerClass, \
DEBUG, ERROR
from .globals import _request_ctx_stack
PROD_LOG_FORMAT = '[%(asctime)s] %(levelname)s in %(module)s: %(message)s'
DEBUG_LOG_FORMAT = (
'-' * 80 + '\n' +
'%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' +
'%(message)s\n' +
'-' * 80
)
@LocalProxy
def _proxy_stream():
"""Finds the most appropriate error stream for the application. If a
WSGI request is in flight we log to wsgi.errors, otherwise this resolves
to sys.stderr.
"""
ctx = _request_ctx_stack.top
if ctx is not None:
return ctx.request.environ['wsgi.errors']
return sys.stderr
def _should_log_for(app, mode):
policy = app.config['LOGGER_HANDLER_POLICY']
if policy == mode or policy == 'always':
return True
return False
def create_logger(app):
"""Creates a logger for the given application. This logger works
similar to a regular Python logger but changes the effective logging
level based on the application's debug flag. Furthermore this
function also removes all attached handlers in case there was a
logger with the log name before.
"""
Logger = getLoggerClass()
class DebugLogger(Logger):
def getEffectiveLevel(self):
if self.level == 0 and app.debug:
return DEBUG
return Logger.getEffectiveLevel(self)
class DebugHandler(StreamHandler):
def emit(self, record):
if app.debug and _should_log_for(app, 'debug'):
StreamHandler.emit(self, record)
class ProductionHandler(StreamHandler):
def emit(self, record):
if not app.debug and _should_log_for(app, 'production'):
StreamHandler.emit(self, record)
debug_handler = DebugHandler()
debug_handler.setLevel(DEBUG)
debug_handler.setFormatter(Formatter(DEBUG_LOG_FORMAT))
prod_handler = ProductionHandler(_proxy_stream)
prod_handler.setLevel(ERROR)
prod_handler.setFormatter(Formatter(PROD_LOG_FORMAT))
logger = getLogger(app.logger_name)
# just in case that was not a new logger, get rid of all the handlers
# already attached to it.
del logger.handlers[:]
logger.__class__ = DebugLogger
logger.addHandler(debug_handler)
logger.addHandler(prod_handler)
# Disable propagation by default
logger.propagate = False
return logger
| mit |
BrianCoveney/teammates | BackupFiles/upload_data.py | 24 | 3131 | #This script should be placed in the GAE Python SDK directory.
#The path of the SDK will look like C:\Program Files (x86)\Google\google_appengine
#The script is to be used in conjunction with the generated_bulkloader.yaml file
#The script will upload all types of entities from the backup files to the GAE datastore.
#The only entity type that is not handled here is the StudentProfile entity type.
#As many backups would have been performed, the timestamp of which backup files to be used for uploading must be specified.
#The format of the timestamp is YYYY-MM-DD_HH-MM-SS.
import os
import datetime
#Obtain the timestamp from the user and forms the upload file path
date_time = raw_input("Enter the date and time of the backup files to be uploaded. Format is YYYY-MM-DD_HH-MM-SS: ")
desktopPath = os.path.expanduser("~/Desktop/TM_Backup/")
backupFileDirectory = os.path.join(desktopPath, date_time)
#Runs a set of commands to upload all the data to the GAE datastore
os.system("appcfg.py upload_data --url https://teammatesv4.appspot.com/remote_api --config_file generated_bulkloader.yaml --kind Account --filename %s/accounts.csv" %backupFileDirectory)
os.system("appcfg.py upload_data --url https://teammatesv4.appspot.com/remote_api --config_file generated_bulkloader.yaml --kind Comment --filename %s/comment.csv" %backupFileDirectory)
os.system("appcfg.py upload_data --url https://teammatesv4.appspot.com/remote_api --config_file generated_bulkloader.yaml --kind Course --filename %s/course.csv" %backupFileDirectory)
os.system("appcfg.py upload_data --url https://teammatesv4.appspot.com/remote_api --config_file generated_bulkloader.yaml --kind Evaluation --filename %s/evaluation.csv" %backupFileDirectory)
os.system("appcfg.py upload_data --url https://teammatesv4.appspot.com/remote_api --config_file generated_bulkloader.yaml --kind FeedbackQuestion --filename %s/feedbackQuestion.csv" %backupFileDirectory)
os.system("appcfg.py upload_data --url https://teammatesv4.appspot.com/remote_api --config_file generated_bulkloader.yaml --kind FeedbackResponse --filename %s/feedbackResponse.csv" %backupFileDirectory)
os.system("appcfg.py upload_data --url https://teammatesv4.appspot.com/remote_api --config_file generated_bulkloader.yaml --kind FeedbackResponseComment --filename %s/feedbackResponseComment.csv" %backupFileDirectory)
os.system("appcfg.py upload_data --url https://teammatesv4.appspot.com/remote_api --config_file generated_bulkloader.yaml --kind FeedbackSession --filename %s/feedbackSession.csv" %backupFileDirectory)
os.system("appcfg.py upload_data --url https://teammatesv4.appspot.com/remote_api --config_file generated_bulkloader.yaml --kind Instructor --filename %s/instructor.csv" %backupFileDirectory)
os.system("appcfg.py upload_data --url https://teammatesv4.appspot.com/remote_api --config_file generated_bulkloader.yaml --kind Student --filename %s/student.csv" %backupFileDirectory)
os.system("appcfg.py upload_data --url https://teammatesv4.appspot.com/remote_api --config_file generated_bulkloader.yaml --kind Submission --filename %s/submission.csv" %backupFileDirectory) | gpl-2.0 |
justintweaver/mtchi-cert-game | makahiki/apps/widgets/bonus_points/migrations/0004_auto__add_field_bonuspoint_claim_date__add_field_bonuspoint_create_dat.py | 7 | 4911 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'BonusPoint.claim_date'
db.add_column('bonus_points_bonuspoint', 'claim_date', self.gf('django.db.models.fields.DateField')(null=True, blank=True), keep_default=False)
# Adding field 'BonusPoint.create_date'
db.add_column('bonus_points_bonuspoint', 'create_date', self.gf('django.db.models.fields.DateField')(default=datetime.date(2012, 8, 9)), keep_default=False)
def backwards(self, orm):
# Deleting field 'BonusPoint.claim_date'
db.delete_column('bonus_points_bonuspoint', 'claim_date')
# Deleting field 'BonusPoint.create_date'
db.delete_column('bonus_points_bonuspoint', 'create_date')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 8, 9, 12, 8, 23, 650541)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 8, 9, 12, 8, 23, 650386)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'bonus_points.bonuspoint': {
'Meta': {'object_name': 'BonusPoint'},
'claim_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'create_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date(2012, 8, 9)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'point_value': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['bonus_points']
| gpl-3.0 |
hschaa/openwrt | scripts/flashing/jungo-image.py | 758 | 7069 | #!/usr/bin/env python
#
# Copyright 2008, 2009 (C) Jose Vasconcellos <jvasco@verizon.net>
#
# A script that can communicate with jungo-based routers
# (such as MI424-WR, USR8200 and WRV54G) to backup the installed
# firmware and replace the boot loader.
#
# Tested with Python 2.5 on Linux and Windows
#
"""Usage: %s [options] <IP_address> [image.bin | url]
Valid options:
\t-h | --help: usage statement
\t-d | --dump: create a flash dump
\t-f | --file: use <filename> to store dump contents
\t-u | --user: provide username (default admin)
\t-p | --pass: provide password (default password1)
\t --port: set port for http (default 8080)
\t-q | --quiet: don't display unnecessary information
\t-r | --reboot: reboot target on successful transfer
\t-V | --version: display version information
If no image (or url) is given, a flash dump is created.
A built-in http server is used when an image file is provided.
"""
import os
import sys
import getopt
import getpass
import telnetlib
import string
import binascii
import socket
import thread
import SocketServer
import SimpleHTTPServer
reboot = 0
HOST = "192.168.1.1"
PORT = 8080
user = "admin"
#password = getpass.getpass()
password = "password1"
proto = "http"
url = ""
imagefile = ""
dumpfile = ""
verbose = 1
do_dump = 0
dumplen = 0x10000
flashsize=4*1024*1024
#device="br0"
device="ixp0"
####################
def start_server(server):
httpd = SocketServer.TCPServer((server,PORT),SimpleHTTPServer.SimpleHTTPRequestHandler)
thread.start_new_thread(httpd.serve_forever,())
####################
def get_flash_size():
# make sure we don't have an A0 stepping
tn.write("cat /proc/cpuinfo\n")
buf = tn.read_until("Returned 0", 3)
if not buf:
print "Unable to obtain CPU information; make sure to not use A0 stepping!"
elif buf.find('rev 0') > 0:
print "Warning: IXP42x stepping A0 detected!"
if imagefile or url:
print "Error: No linux support for A0 stepping!"
sys.exit(2)
# now get flash size
tn.write("cat /proc/mtd\n")
buf = tn.read_until("Returned 0", 3)
if buf:
i = buf.find('mtd0:')
if i > 0:
return int(buf[i+6:].split()[0],16)
# use different command
tn.write("flash_layout\n")
buf = tn.read_until("Returned 0", 3)
i = buf.rfind('Range ')
if i > 0:
return int(buf[i+17:].split()[0],16)
print "Can't determine flash size!"
else:
print "Unable to obtain flash size!"
sys.exit(2)
def image_dump(tn, dumpfile):
if not dumpfile:
tn.write("ver\n");
buf = tn.read_until("Returned 0",2)
i = buf.find("Platform:")
if i < 0:
platform="jungo"
else:
line=buf[i+9:]
i=line.find('\n')
platform=line[:i].split()[-1]
tn.write("rg_conf_print /dev/%s/mac\n" % device);
buf = tn.read_until("Returned 0",3)
i = buf.find("mac(")
if i > 0:
i += 4
else:
print "No MAC address found! (use -f option)"
sys.exit(1)
dumpfile = "%s-%s.bin" % (platform, buf[i:i+17].replace(':',''))
else:
tn.write("\n")
print "Dumping flash contents (%dMB) to %s" % (flashsize/1048576, dumpfile)
f = open(dumpfile, "wb")
t=flashsize/dumplen
for addr in range(t):
if verbose:
sys.stdout.write('\r%d%%'%(100*addr/t))
sys.stdout.flush()
tn.write("flash_dump -r 0x%x -l %d -4\n" % (addr*dumplen, dumplen))
tn.read_until("\n")
count = addr*dumplen
while 1:
buf = tn.read_until("\n")
if buf.strip() == "Returned 0":
break
s = buf.split()
if s and s[0][-1] == ':':
a=int(s[0][:-1],16)
if a != count:
print "Format error: %x != %x"%(a,count)
sys.exit(2)
count += 16
f.write(binascii.a2b_hex(string.join(s[1:],'')))
tn.read_until(">",1)
f.close()
if verbose:
print ""
def telnet_option(sock,cmd,option):
#print "Option: %d %d" % (ord(cmd), ord(option))
if cmd == telnetlib.DO:
c=telnetlib.WILL
elif cmd == telnetlib.WILL:
c=telnetlib.DO
sock.sendall(telnetlib.IAC + c + option)
def telnet_timeout():
print "Fatal error: telnet timeout!"
sys.exit(1)
def usage():
print __doc__ % os.path.basename(sys.argv[0])
####################
try:
opts, args = getopt.getopt(sys.argv[1:], "hdf:qp:P:rvV", \
["help", "dump", "file=", "user=", "pass=", "port=",
"quiet=", "reboot", "verbose", "version"])
except getopt.GetoptError:
# print help information and exit:
usage()
sys.exit(1)
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit(1)
elif o in ("-V", "--version"):
print "%s: 0.11" % sys.argv[0]
sys.exit(1)
elif o in ("-d", "--no-dump"):
do_dump = 1
elif o in ("-f", "--file"):
dumpfile = a
elif o in ("-u", "--user"):
user = a
elif o in ("-p", "--pass"):
password = a
elif o == "--port":
PORT = int(a)
elif o in ("-q", "--quiet"):
verbose = 0
elif o in ("-r", "--reboot"):
reboot = 1
elif o in ("-v", "--verbose"):
verbose = 1
# make sure we have enough arguments
if len(args) > 0:
HOST = args[0]
if len(args) == 2:
if args[1].split(':')[0] in ("tftp", "http", "ftp"):
url = args[1]
else:
imagefile = args[1]
else:
do_dump = 1;
####################
# create a telnet session to the router
try:
tn = telnetlib.Telnet(HOST)
except socket.error, msg:
print "Unable to establish telnet session to %s: %s" % (HOST, msg)
sys.exit(1)
tn.set_option_negotiation_callback(telnet_option)
buf = tn.read_until("Username: ", 3)
if not buf:
telnet_timeout()
tn.write(user+"\n")
if password:
buf = tn.read_until("Password: ", 3)
if not buf:
telnet_timeout()
tn.write(password+"\n")
# wait for prompt
buf = tn.read_until("> ", 3)
if not buf:
telnet_timeout()
flashsize = get_flash_size()
if do_dump:
image_dump(tn, dumpfile)
if imagefile or url:
splitpath = os.path.split(imagefile)
# create load command
if url:
cmd = "load -u %s -r 0\n" % (url)
else:
server = tn.get_socket().getsockname()[0]
cmd = "load -u http://%s:%d/%s -r 0\n" % (server, PORT, splitpath[1])
if not os.access(imagefile, os.R_OK):
print "File access error: %s" % (imagefile)
sys.exit(3)
# make sure we're in the directory where the image is located
if splitpath[0]:
os.chdir(splitpath[0])
start_server(server)
if verbose:
print "Unlocking flash..."
tn.write("unlock 0 0x%x\n" % flashsize)
buf = tn.read_until("Returned 0",5)
if verbose:
print "Writing new image..."
print cmd,
tn.write(cmd)
buf = tn.read_until("Returned 0",10)
# wait till the transfer completed
buf = tn.read_until("Download completed successfully",20)
if buf:
print "Flash update complete!"
if reboot:
tn.write("reboot\n")
print "Rebooting..."
tn.write("exit\n")
tn.close()
| gpl-2.0 |
394954369/horizon | openstack_dashboard/dashboards/project/data_processing/job_binaries/tables.py | 4 | 2665 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from openstack_dashboard.api import sahara as saharaclient
from saharaclient.api import base as api_base
LOG = logging.getLogger(__name__)
class CreateJobBinary(tables.LinkAction):
name = "create job binary"
verbose_name = _("Create Job Binary")
url = "horizon:project:data_processing.job_binaries:create-job-binary"
classes = ("ajax-modal",)
icon = "plus"
class DeleteJobBinary(tables.BatchAction):
name = "delete"
action_present = _("Delete")
action_past = _("Deleted")
data_type_singular = _("Job binary")
data_type_plural = _("Job binaries")
classes = ('btn-danger', 'btn-terminate')
def action(self, request, obj_id):
jb = saharaclient.job_binary_get(request, obj_id)
(jb_type, jb_internal_id) = jb.url.split("://")
if jb_type == "internal-db":
try:
saharaclient.job_binary_internal_delete(request,
jb_internal_id)
except api_base.APIException:
# nothing to do for job-binary-internal if
# it does not exist.
pass
saharaclient.job_binary_delete(request, obj_id)
class DownloadJobBinary(tables.LinkAction):
name = "download job binary"
verbose_name = _("Download Job Binary")
url = "horizon:project:data_processing.job_binaries:download"
classes = ("btn-edit",)
class JobBinariesTable(tables.DataTable):
name = tables.Column("name",
verbose_name=_("Name"),
link=("horizon:project:data_processing.job_binaries:details"))
type = tables.Column("url",
verbose_name=_("Url"))
description = tables.Column("description",
verbose_name=_("Description"))
class Meta:
name = "job_binaries"
verbose_name = _("Job Binaries")
table_actions = (CreateJobBinary,
DeleteJobBinary)
row_actions = (DeleteJobBinary, DownloadJobBinary)
| apache-2.0 |
sommars/DynamicPrevariety | python_interface.py | 1 | 2548 | """
#Cyclic 4
R.<x1,x2,x3,x4> = QQ[]
polys = [x1+x2+x3+x4,x1*x2+x2*x3+x3*x4+x4*x1,x1*x2*x3+x2*x3*x4+x3*x4*x1+x4*x1*x2]
TropicalPrevariety(polys)
#Should be equivalent (up to homogenization) to:
R.ideal(polys).groebner_fan().tropical_intersection().rays()
#Reduced cyclic 8
R.<y_1,y_2,y_3,y_4,y_5,y_6,y_7> = QQ[]
polys = [1 + y_1 + y_2 + y_3 + y_4 + y_5 + y_6 + y_7,y_1 + y_1*y_2 + y_2*y_3
+ y_3*y_4 + y_4*y_5 + y_5*y_6 + y_6*y_7 + y_7,y_1*y_2 + y_1*y_2*y_3
+ y_2*y_3*y_4 + y_3*y_4*y_5 + y_4*y_5*y_6 + y_5*y_6*y_7
+ y_6*y_7 + y_7*y_1,y_1*y_2*y_3 + y_1*y_2*y_3*y_4 + y_2*y_3*y_4*y_5
+ y_3*y_4*y_5*y_6 + y_4*y_5*y_6*y_7 + y_5*y_6*y_7 + y_6*y_7*y_1
+ y_7*y_1*y_2,y_1*y_2*y_3*y_4 + y_1*y_2*y_3*y_4*y_5 + y_2*y_3*y_4*y_5*y_6
+ y_3*y_4*y_5*y_6*y_7 + y_4*y_5*y_6*y_7 + y_5*y_6*y_7*y_1 + y_6*y_7*y_1*y_2
+ y_7*y_1*y_2*y_3,y_1*y_2*y_3*y_4*y_5 + y_1*y_2*y_3*y_4*y_5*y_6
+ y_2*y_3*y_4*y_5*y_6*y_7 + y_3*y_4*y_5*y_6*y_7 + y_4*y_5*y_6*y_7*y_1
+ y_5*y_6*y_7*y_1*y_2 + y_6*y_7*y_1*y_2*y_3
+ y_7*y_1*y_2*y_3*y_4,y_1*y_2*y_3*y_4*y_5*y_6 + y_1*y_2*y_3*y_4*y_5*y_6*y_7
+ y_2*y_3*y_4*y_5*y_6*y_7+ y_3*y_4*y_5*y_6*y_7*y_1 + y_4*y_5*y_6*y_7*y_1*y_2
+ y_5*y_6*y_7*y_1*y_2*y_3+ y_6*y_7*y_1*y_2*y_3*y_4 + y_7*y_1*y_2*y_3*y_4*y_5]
TropicalPrevariety(polys)
"""
from subprocess import Popen, PIPE, call
#The below should work for generic machines
import os, inspect
pathToPrevariety = os.path.dirname(inspect.stack()[0][1])
def ParseOutput(FileName):
with open(FileName,"r") as OutputFile:
s = OutputFile.read()
IndexToRayMap = {}
ConesList = []
Rays = []
for l in s.splitlines():
if "vector" in l: # We are finished
break
if ":" in l: # it is a ray
(Index,Ray) = l.split(":")
Ray = eval(Ray.replace("{","[").replace("}","]"))
Rays.append(Ray)
IndexToRayMap[eval(Index.replace(":",""))] = Ray
continue
if "{" in l: # it is a cone
ConesList.append(Cone([IndexToRayMap[i] for i in eval(l.replace("{","[").replace("}","]"))]))
continue
Rays.sort()
return (ConesList, Rays)
def TropicalPrevariety(polys, ProcessCount = 1):
support = str([[[Integer(j) for j in i] for i in poly.exponents()] for poly in polys]).replace("], ", "]").replace(" ","")
if ProcessCount < 10:
support = '0' + str(ProcessCount) + support
else:
support = str(ProcessCount) + support
call([pathToPrevariety + "/prevariety", pathToPrevariety + "/examples/cyclic/cyclic8"])
#if len(ans) > 0 and ans[0] != '[':
# raise Exception("Internal error in tropical_prevariety")
return ParseOutput(pathToPrevariety + "/output.txt")
| gpl-3.0 |
zarnold/transitfeed | extensions/googletransit/pybcp47/testpybcp47.py | 6 | 4427 | #!/usr/bin/python2.5
# Copyright (C) 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Unit tests for the bcp47languageparser module.
import codecs
import os
import unittest
from bcp47languageparser import Bcp47LanguageParser
class PyBcp47TestCase(unittest.TestCase):
bcp47parser = Bcp47LanguageParser()
def testRegistryFileRecordsBeingWellformed(self):
# Test whether the parsed entries from the registry file in this package are
# valid. The registry file in this package is originally downloaded from
# http://www.iana.org/assignments/language-subtag-registry. Formatting
# rules of this file can be found at http://tools.ietf.org/html/rfc5646
for tag in self.bcp47parser.grandfathereds.keys():
self.assertTrue(self.bcp47parser.IsWellformed(tag),
"Grandfathered tag '%s' in language-subtag-registry.txt "
"seems to be invalid!" % (tag))
for tag in self.bcp47parser.redundants.keys():
self.assertTrue(self.bcp47parser.IsWellformed(tag),
"Redundant tag '%s' in language-subtag-registry.txt "
"seems to be invalid!" % (tag))
for tag in self.bcp47parser.languages.keys():
self.assertTrue(self.bcp47parser.IsWellformedSubtag(tag, "lang"),
"Language subtag '%s' in language-subtag-registry.txt "
"seems to be invalid!" % (tag))
for tag in self.bcp47parser.extlangs.keys():
# extlangs contains each for each extlang just the tag and the tag
# combined with its prefix. E.g. 'aao' and 'ar-aao'.
extlang_parts = tag.split("-")
extlang = extlang_parts[len(extlang_parts) - 1]
self.assertTrue(self.bcp47parser.IsWellformedSubtag(extlang, "extlang"),
"Extlang subtag '%s' in language-subtag-registry.txt "
"seems to be invalid!" % (tag))
for tag in self.bcp47parser.scripts.keys():
self.assertTrue(self.bcp47parser.IsWellformedSubtag(tag, "script"),
"Script subtag '%s' in language-subtag-registry.txt "
"seems to be invalid!" % (tag))
for tag in self.bcp47parser.regions.keys():
self.assertTrue(self.bcp47parser.IsWellformedSubtag(tag, "region"),
"Region subtag '%s' in language-subtag-registry.txt "
"seems to be invalid!" % (tag))
for tag in self.bcp47parser.variants.keys():
self.assertTrue(self.bcp47parser.IsWellformedSubtag(tag, "variant"),
"Variant subtag '%s' in language-subtag-registry.txt "
"seems to be invalid!" % (tag))
def testValidationWithSamples(self):
# Test whether samples are all well-formed but not valid.
self._CheckTagsInFile("well-formed-not-valid-tags.txt", True, False)
# Test whether samples are all not well-formed.
self._CheckTagsInFile("not-well-formed-tags.txt", False, False)
# Test whether samples are all valid.
self._CheckTagsInFile("valid-tags.txt", True, True)
def _CheckTagsInFile(self, filename, should_be_wellformed, should_be_valid):
full_filename = os.path.join(os.path.dirname(__file__), "testdata",
filename)
fileObj = codecs.open(full_filename, "r", "utf-8" )
for line in fileObj.xreadlines():
line_parts = line.split("#")
tag = line_parts[0].strip()
if tag:
lang_obj = self.bcp47parser.ParseLanguage(tag)
self.assertEqual(
lang_obj.wellformed, should_be_wellformed,
"the language code '%s' (%s) should%s be well-formed" %
(tag, lang_obj, str((not should_be_wellformed and " not") or "")))
self.assertEqual(
lang_obj.valid, should_be_valid,
"the language code '%s' (%s) should%s be valid" %
(tag, lang_obj, str((not should_be_valid and " not") or "")))
| apache-2.0 |
cartertech/odoo-hr-ng | hr_report_manpower/report/daily_manpower.py | 1 | 22435 | #-*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 One Click Software (http://oneclick.solutions)
# and Copyright (C) 2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, timedelta
from pytz import timezone, utc
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as OE_DATEFORMAT
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as OE_DATETIMEFORMAT
from report import report_sxw
class Parser(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context)
self.localcontext.update({
'get_no': self.get_no,
'get_date': self.get_date,
'get_present': self.get_present,
'get_absent': self.get_absent,
'get_restday': self.get_restday,
'get_al': self.get_al,
'get_sl': self.get_sl,
'get_ml': self.get_ml,
'get_ol': self.get_ol,
'get_terminated': self.get_terminated,
'get_tot_present': self.get_sum_present,
'get_tot_absent': self.get_sum_absent,
'get_tot_restday': self.get_sum_restday,
'get_tot_al': self.get_sum_al,
'get_tot_sl': self.get_sum_sl,
'get_tot_ml': self.get_sum_ml,
'get_tot_ol': self.get_sum_ol,
'get_tot_terminated': self.get_sum_terminated,
})
self.LVCODES = ['LVBEREAVEMENT', 'LVWEDDING', 'LVMMEDICAL', 'LVPTO', 'LVCIVIC', 'LVSICK',
'LVSICK50', 'LVSICK00', 'LVMATERNITY', 'LVANNUAL', 'LVTRAIN', 'LVUTO']
self.date = False
self.no = 0
self._present = 0
self._absent = 0
self._restday = 0
self._al = 0
self._sl = 0
self._ml = 0
self._ol = 0
self._terminated = 0
self._hol_absent = 0
def set_context(self, objects, data, ids, report_type=None):
if data.get('form', False) and data['form'].get('date', False):
self.date = data['form']['date']
return super(Parser, self).set_context(objects, data, ids, report_type=report_type)
def get_date(self):
return datetime.strptime(self.date, OE_DATEFORMAT).strftime('%B %d, %Y')
def get_no(self):
self.no += 1
return self.no
def _working_on_restday(self, employee_id, dt, utcdt, rest_days):
att_obj = self.pool.get('hr.attendance')
sched_obj = self.pool.get('hr.schedule')
detail_obj = self.pool.get('hr.schedule.detail')
res = False
sched_ids = sched_obj.search(self.cr, self.uid, [('date_start', '<=', self.date),
('date_end', '>=', self.date),
('employee_id', '=', employee_id),
])
sched = sched_obj.browse(self.cr, self.uid, sched_ids[0])
# It could be part of yesterday's schedule (i.e. - schedule that crosses
# midnight boundary. To make certain he/she is not working on rest day
# check if the attendance records are *after* the employee is supposed to
# punch out. Or if it is a rest-day and there is a schedule assume
# employee is working on rest day.
#
found_att = False
att_ids = att_obj.search(self.cr, self.uid,
[('name', '>=', utcdt.strftime(OE_DATETIMEFORMAT)),
('name', '<', (utcdt + timedelta(hours= +24)).strftime(OE_DATETIMEFORMAT)),
('action', '=', 'sign_in'),
('employee_id', '=', employee_id),
])
if len(att_ids) > 0:
attendances = att_obj.browse(self.cr, self.uid, att_ids)
detail_ids = detail_obj.search(self.cr, self.uid, [('schedule_id', '=', sched.id),
('day', '=', (dt + timedelta(days= -1)).strftime(OE_DATEFORMAT))],
order='date_start')
for detail in sched.detail_ids:
if len(detail_ids) > 0 and detail.id == detail_ids[-1]:
for att in attendances:
if att.name > detail.date_end:
found_att = True
if detail.day == dt.strftime(OE_DATEFORMAT):
found_att = True
res = found_att
return res
def get_present_employees(self, department_id):
att_obj = self.pool.get('hr.attendance')
sched_obj = self.pool.get('hr.schedule')
user = self.pool.get('res.users').browse(self.cr, self.uid, self.uid)
if user and user.tz:
local_tz = timezone(user.tz)
else:
local_tz = timezone('Africa/Addis_Ababa')
dt = datetime.strptime(self.date + ' 00:00:00', OE_DATETIMEFORMAT)
utcdt = (local_tz.localize(dt, is_dst=False)).astimezone(utc)
att_ids = att_obj.search(self.cr, self.uid,
[('name', '>=', utcdt.strftime(OE_DATETIMEFORMAT)),
('name', '<', (utcdt + timedelta(hours= +24)).strftime(OE_DATETIMEFORMAT)),
('action', '=', 'sign_in'),
'|', ('employee_id.department_id.id', '=', department_id),
('employee_id.saved_department_id.id', '=', department_id)
])
unique_ids = []
term_obj = self.pool.get('hr.employee.termination')
data = att_obj.read(self.cr, self.uid, att_ids, ['employee_id'])
for d in data:
# if this employee's employment was terminated skip it
term_ids = term_obj.search(self.cr, self.uid, [('name', '<=', self.date),
('employee_id', '=', d['employee_id'][0]),
('state', 'not in', ['cancel'])])
if len(term_ids) > 0:
continue
# If the employee is on rest day (and not working), skip it
rest_days = sched_obj.get_rest_days(self.cr, self.uid, d['employee_id'][0], dt)
if rest_days != None and dt.weekday() in rest_days:
if not self._working_on_restday(d['employee_id'][0], dt, utcdt, rest_days):
continue
if d['employee_id'][0] not in unique_ids:
unique_ids.append(d['employee_id'][0])
return unique_ids
def get_present(self, department_id):
unique_ids = self.get_present_employees(department_id)
onleave_ids = self.get_employees_on_leave(department_id, self.LVCODES)
present_ids = [i for i in unique_ids if i not in onleave_ids]
total = len(present_ids)
self._present += total
return total
def get_employee_start_date(self, employee_id):
first_day = False
c_obj = self.pool.get('hr.contract')
c_ids = c_obj.search(self.cr, self.uid, [('employee_id', '=', employee_id)])
for contract in c_obj.browse(self.cr, self.uid, c_ids):
if not first_day or contract.date_start < first_day:
first_day = contract.date_start
return first_day
def get_absent(self, department_id):
absent, absent_holiday = self._get_absent(department_id, show_holiday=False)
return (absent and absent or '-')
def _get_absent(self, department_id, show_holiday=False):
res = 0
res_holiday = 0
ee_leave_ids = self.get_employees_on_leave(department_id, self.LVCODES)
ee_withsched_ids = []
att_obj = self.pool.get('hr.attendance')
sched_obj = self.pool.get('hr.schedule')
holiday_obj = self.pool.get('hr.holidays.public')
user = self.pool.get('res.users').browse(self.cr, self.uid, self.uid)
if user and user.tz:
local_tz = timezone(user.tz)
else:
local_tz = timezone('Africa/Addis_Ababa')
dt = datetime.strptime(self.date + ' 00:00:00', OE_DATETIMEFORMAT)
utcdt = (local_tz.localize(dt, is_dst=False)).astimezone(utc)
public_holiday = holiday_obj.is_public_holiday(self.cr, self.uid, dt)
sched_ids = sched_obj.search(self.cr, self.uid, [('date_start', '<=', self.date),
('date_end', '>=', self.date),
'|', ('employee_id.department_id.id', '=', department_id),
('employee_id.saved_department_id.id', '=', department_id)
])
for sched in sched_obj.browse(self.cr, self.uid, sched_ids):
if sched.employee_id.id not in ee_withsched_ids:
ee_withsched_ids.append(sched.employee_id.id)
# skip if the employee is on leave
if sched.employee_id.id in ee_leave_ids:
continue
# Skip if the employee wasn't hired yet
hire_date = self.get_employee_start_date(sched.employee_id.id)
if not hire_date or (datetime.strptime(hire_date, OE_DATEFORMAT).date() > dt.date()):
continue
rest_days = sched_obj.get_rest_days(self.cr, self.uid, sched.employee_id.id, dt)
# if this is the employee's rest day skip it
if dt.weekday() in rest_days:
continue
# if this employee's employment was terminated skip it
term_ids = self.pool.get('hr.employee.termination').search(self.cr, self.uid,
[('name', '<=', self.date),
('employee_id.id', '=', sched.employee_id.id),
('state', 'not in', ['cancel'])])
if len(term_ids) > 0:
continue
# If this is a public holiday don't mark absent
if public_holiday and not show_holiday:
continue
# Did the employee punch in that day?
att_ids = att_obj.search(self.cr, self.uid, [('name', '>=', utcdt.strftime(OE_DATETIMEFORMAT)),
('name', '<', (utcdt + timedelta(hours= +24)).strftime(OE_DATETIMEFORMAT)),
('action', '=', 'sign_in'),
('employee_id.id', '=', sched.employee_id.id),
])
if len(att_ids) == 0:
if public_holiday and show_holiday:
res_holiday += 1
else:
res += 1
# Get employees who don't have a schedule
ee_nosched_ids = self.pool.get('hr.employee').search(self.cr, self.uid,
['|', ('department_id.id', '=', department_id),
('saved_department_id.id', '=', department_id),
('id', 'not in', ee_withsched_ids)])
for ee_id in ee_nosched_ids:
# skip if the employee is on leave
if ee_id in ee_leave_ids:
continue
# Skip if the employee wasn't hired yet
hire_date = self.get_employee_start_date(ee_id)
if not hire_date or (datetime.strptime(hire_date, OE_DATEFORMAT).date() > dt.date()):
continue
# if this employee's employment was terminated skip it
term_ids = self.pool.get('hr.employee.termination').search(self.cr, self.uid,
[('name', '<=', self.date),
('employee_id.id', '=', ee_id),
('state', 'not in', ['cancel'])])
if len(term_ids) > 0:
continue
# If this is a public holiday don't mark absent
if public_holiday and not show_holiday:
continue
att_ids = att_obj.search(self.cr, self.uid, [('name', '>=', utcdt.strftime(OE_DATETIMEFORMAT)),
('name', '<', (utcdt + timedelta(hours= +24)).strftime(OE_DATETIMEFORMAT)),
('action', '=', 'sign_in'),
('employee_id.id', '=', ee_id),
])
if len(att_ids) == 0:
if public_holiday and show_holiday:
res_holiday += 1
else:
res += 1
self._absent += res
self._hol_absent += res_holiday
return res, res_holiday
def _on_leave(self, cr, uid, employee_id, d):
leave_obj = self.pool.get('hr.holidays')
user = self.pool.get('res.users').browse(cr, uid, uid)
if user and user.tz:
local_tz = timezone(user.tz)
else:
local_tz = timezone('Africa/Addis_Ababa')
dtStart = datetime.strptime(d.strftime(OE_DATEFORMAT) + ' 00:00:00', OE_DATETIMEFORMAT)
utcdtStart = (local_tz.localize(dtStart, is_dst=False)).astimezone(utc)
utcdtNextStart = utcdtStart + timedelta(hours= +24)
leave_ids = leave_obj.search(self.cr, self.uid, [('employee_id', '=', employee_id),
('date_from', '<', utcdtNextStart.strftime(OE_DATETIMEFORMAT)),
('date_to', '>=', utcdtStart.strftime(OE_DATETIMEFORMAT)),
('type', '=', 'remove'),
('state', 'in', ['validate', 'validate1']),
])
return (len(leave_ids) > 0)
def get_restday(self, department_id):
sched_obj = self.pool.get('hr.schedule')
detail_obj = self.pool.get('hr.schedule.detail')
att_obj = self.pool.get('hr.attendance')
user = self.pool.get('res.users').browse(self.cr, self.uid, self.uid)
if user and user.tz:
local_tz = timezone(user.tz)
else:
local_tz = timezone('Africa/Addis_Ababa')
dt = datetime.strptime(self.date + ' 00:00:00', OE_DATETIMEFORMAT)
utcdt = (local_tz.localize(dt, is_dst=False)).astimezone(utc)
sched_ids = sched_obj.search(self.cr, self.uid, [('date_start', '<=', self.date),
('date_end', '>=', self.date),
'|', ('employee_id.department_id.id', '=', department_id),
('employee_id.saved_department_id.id', '=', department_id)
])
res = 0
otr = 0 # restday OT
for sched in sched_obj.browse(self.cr, self.uid, sched_ids):
# If the employee is on leave, skip it
if self._on_leave(self.cr, self.uid, sched.employee_id.id,
datetime.strptime(self.date, OE_DATEFORMAT).date()):
continue
rest_days = sched_obj.get_rest_days(self.cr, self.uid, sched.employee_id.id, dt)
if rest_days != None and dt.weekday() in rest_days:
if self._working_on_restday(sched.employee_id.id, dt, utcdt, rest_days):
otr += 1
else:
res += 1
self._restday += res
res_str = otr > 0 and str(res) +'('+ str(otr) + ')' or str(res)
return ((res or otr) and res_str or '-')
def _get_leave_ids(self, department_id, codes):
if isinstance(codes, str):
codes = [codes]
leave_obj = self.pool.get('hr.holidays')
user = self.pool.get('res.users').browse(self.cr, self.uid, self.uid)
if user and user.tz:
local_tz = timezone(user.tz)
else:
local_tz = timezone('Africa/Addis_Ababa')
dtStart = datetime.strptime(self.date + ' 00:00:00', OE_DATETIMEFORMAT)
utcdtStart = (local_tz.localize(dtStart, is_dst=False)).astimezone(utc)
utcdtNextStart = utcdtStart + timedelta(hours= +24)
leave_ids = leave_obj.search(self.cr, self.uid, [('holiday_status_id.code', 'in', codes),
('date_from', '<', utcdtNextStart.strftime(OE_DATETIMEFORMAT)),
('date_to', '>=', utcdtStart.strftime(OE_DATETIMEFORMAT)),
('type', '=', 'remove'),
('state', 'in', ['validate', 'validate1']),
'|', ('employee_id.department_id.id', '=', department_id),
('employee_id.saved_department_id.id', '=', department_id)
])
return leave_ids
def get_leave(self, department_id, codes):
leave_ids = self._get_leave_ids(department_id, codes)
res = len(leave_ids)
return res
def get_employees_on_leave(self, department_id, codes):
leave_ids = self._get_leave_ids(department_id, codes)
employee_ids = []
data = self.pool.get('hr.holidays').read(self.cr, self.uid, leave_ids, ['employee_id'])
for d in data:
if d.get('employee_id', False) and d['employee_id'][0] not in employee_ids:
employee_ids.append(d['employee_id'][0])
return employee_ids
def get_al(self, department_id):
res = self.get_leave(department_id, 'LVANNUAL')
self._al += res
return (res and res or '-')
def get_sl(self, department_id):
res = self.get_leave(department_id, ['LVSICK', 'LVSICK50', 'LVSICK00'])
self._sl += res
return (res and res or '-')
def get_ml(self, department_id):
res = self.get_leave(department_id, 'LVMATERNITY')
self._ml += res
return (res and res or '-')
def get_ol(self, department_id):
holiday_obj = self.pool.get('hr.holidays.public')
dt = datetime.strptime(self.date + ' 00:00:00', OE_DATETIMEFORMAT)
public_holiday = holiday_obj.is_public_holiday(self.cr, self.uid, dt)
codes = ['LVBEREAVEMENT', 'LVWEDDING', 'LVMMEDICAL', 'LVPTO', 'LVCIVIC']
res = self.get_leave(department_id, codes)
self._ol += res
absent_holiday = 0
if public_holiday:
absent, absent_holiday = self._get_absent(department_id, show_holiday=True)
res_str = absent_holiday > 0 and str(res) +'('+ str(absent_holiday) + ')' or str(res)
return ((res or absent_holiday) and res_str or '-')
def get_terminated(self, department_id):
res = 0
seen_ids = []
term_obj = self.pool.get('hr.employee.termination')
term_ids = term_obj.search(self.cr, self.uid, [('name', '=', self.date)])
for term in term_obj.browse(self.cr, self.uid, term_ids):
if term.employee_id.department_id:
dept_id = term.employee_id.department_id.id
elif term.employee_id.saved_department_id:
dept_id = term.employee_id.saved_department_id.id
else:
dept_id = False
if term.employee_id.id not in seen_ids and dept_id == department_id:
res += 1
seen_ids.append(term.employee_id.id)
self._terminated += res
return (res and res or '-')
def get_sum_present(self):
return self._present
def get_sum_absent(self):
return self._absent
def get_sum_restday(self):
return self._restday
def get_sum_al(self):
return self._al
def get_sum_sl(self):
return self._sl
def get_sum_ml(self):
return self._ml
def get_sum_ol(self):
res_str = self._hol_absent > 0 and str(self._ol) +'('+ str(self._hol_absent) + ')' or str(self._ol)
return ((self._ol or self._hol_absent) and res_str or '-')
def get_sum_terminated(self):
return self._terminated
| agpl-3.0 |
maohongyuan/kbengine | kbe/res/scripts/common/Lib/test/test_generators.py | 72 | 50910 | import gc
import sys
import unittest
import weakref
from test import support
class FinalizationTest(unittest.TestCase):
def test_frame_resurrect(self):
# A generator frame can be resurrected by a generator's finalization.
def gen():
nonlocal frame
try:
yield
finally:
frame = sys._getframe()
g = gen()
wr = weakref.ref(g)
next(g)
del g
support.gc_collect()
self.assertIs(wr(), None)
self.assertTrue(frame)
del frame
support.gc_collect()
def test_refcycle(self):
# A generator caught in a refcycle gets finalized anyway.
old_garbage = gc.garbage[:]
finalized = False
def gen():
nonlocal finalized
try:
g = yield
yield 1
finally:
finalized = True
g = gen()
next(g)
g.send(g)
self.assertGreater(sys.getrefcount(g), 2)
self.assertFalse(finalized)
del g
support.gc_collect()
self.assertTrue(finalized)
self.assertEqual(gc.garbage, old_garbage)
tutorial_tests = """
Let's try a simple generator:
>>> def f():
... yield 1
... yield 2
>>> for i in f():
... print(i)
1
2
>>> g = f()
>>> next(g)
1
>>> next(g)
2
"Falling off the end" stops the generator:
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 2, in g
StopIteration
"return" also stops the generator:
>>> def f():
... yield 1
... return
... yield 2 # never reached
...
>>> g = f()
>>> next(g)
1
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 3, in f
StopIteration
>>> next(g) # once stopped, can't be resumed
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
"raise StopIteration" stops the generator too:
>>> def f():
... yield 1
... raise StopIteration
... yield 2 # never reached
...
>>> g = f()
>>> next(g)
1
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
However, they are not exactly equivalent:
>>> def g1():
... try:
... return
... except:
... yield 1
...
>>> list(g1())
[]
>>> def g2():
... try:
... raise StopIteration
... except:
... yield 42
>>> print(list(g2()))
[42]
This may be surprising at first:
>>> def g3():
... try:
... return
... finally:
... yield 1
...
>>> list(g3())
[1]
Let's create an alternate range() function implemented as a generator:
>>> def yrange(n):
... for i in range(n):
... yield i
...
>>> list(yrange(5))
[0, 1, 2, 3, 4]
Generators always return to the most recent caller:
>>> def creator():
... r = yrange(5)
... print("creator", next(r))
... return r
...
>>> def caller():
... r = creator()
... for i in r:
... print("caller", i)
...
>>> caller()
creator 0
caller 1
caller 2
caller 3
caller 4
Generators can call other generators:
>>> def zrange(n):
... for i in yrange(n):
... yield i
...
>>> list(zrange(5))
[0, 1, 2, 3, 4]
"""
# The examples from PEP 255.
pep_tests = """
Specification: Yield
Restriction: A generator cannot be resumed while it is actively
running:
>>> def g():
... i = next(me)
... yield i
>>> me = g()
>>> next(me)
Traceback (most recent call last):
...
File "<string>", line 2, in g
ValueError: generator already executing
Specification: Return
Note that return isn't always equivalent to raising StopIteration: the
difference lies in how enclosing try/except constructs are treated.
For example,
>>> def f1():
... try:
... return
... except:
... yield 1
>>> print(list(f1()))
[]
because, as in any function, return simply exits, but
>>> def f2():
... try:
... raise StopIteration
... except:
... yield 42
>>> print(list(f2()))
[42]
because StopIteration is captured by a bare "except", as is any
exception.
Specification: Generators and Exception Propagation
>>> def f():
... return 1//0
>>> def g():
... yield f() # the zero division exception propagates
... yield 42 # and we'll never get here
>>> k = g()
>>> next(k)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 2, in g
File "<stdin>", line 2, in f
ZeroDivisionError: integer division or modulo by zero
>>> next(k) # and the generator cannot be resumed
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
>>>
Specification: Try/Except/Finally
>>> def f():
... try:
... yield 1
... try:
... yield 2
... 1//0
... yield 3 # never get here
... except ZeroDivisionError:
... yield 4
... yield 5
... raise
... except:
... yield 6
... yield 7 # the "raise" above stops this
... except:
... yield 8
... yield 9
... try:
... x = 12
... finally:
... yield 10
... yield 11
>>> print(list(f()))
[1, 2, 4, 5, 8, 9, 10, 11]
>>>
Guido's binary tree example.
>>> # A binary tree class.
>>> class Tree:
...
... def __init__(self, label, left=None, right=None):
... self.label = label
... self.left = left
... self.right = right
...
... def __repr__(self, level=0, indent=" "):
... s = level*indent + repr(self.label)
... if self.left:
... s = s + "\\n" + self.left.__repr__(level+1, indent)
... if self.right:
... s = s + "\\n" + self.right.__repr__(level+1, indent)
... return s
...
... def __iter__(self):
... return inorder(self)
>>> # Create a Tree from a list.
>>> def tree(list):
... n = len(list)
... if n == 0:
... return []
... i = n // 2
... return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
>>> # Show it off: create a tree.
>>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
>>> # A recursive generator that generates Tree labels in in-order.
>>> def inorder(t):
... if t:
... for x in inorder(t.left):
... yield x
... yield t.label
... for x in inorder(t.right):
... yield x
>>> # Show it off: create a tree.
>>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
>>> # Print the nodes of the tree in in-order.
>>> for x in t:
... print(' '+x, end='')
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
>>> # A non-recursive generator.
>>> def inorder(node):
... stack = []
... while node:
... while node.left:
... stack.append(node)
... node = node.left
... yield node.label
... while not node.right:
... try:
... node = stack.pop()
... except IndexError:
... return
... yield node.label
... node = node.right
>>> # Exercise the non-recursive generator.
>>> for x in t:
... print(' '+x, end='')
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
"""
# Examples from Iterator-List and Python-Dev and c.l.py.
email_tests = """
The difference between yielding None and returning it.
>>> def g():
... for i in range(3):
... yield None
... yield None
... return
>>> list(g())
[None, None, None, None]
Ensure that explicitly raising StopIteration acts like any other exception
in try/except, not like a return.
>>> def g():
... yield 1
... try:
... raise StopIteration
... except:
... yield 2
... yield 3
>>> list(g())
[1, 2, 3]
Next one was posted to c.l.py.
>>> def gcomb(x, k):
... "Generate all combinations of k elements from list x."
...
... if k > len(x):
... return
... if k == 0:
... yield []
... else:
... first, rest = x[0], x[1:]
... # A combination does or doesn't contain first.
... # If it does, the remainder is a k-1 comb of rest.
... for c in gcomb(rest, k-1):
... c.insert(0, first)
... yield c
... # If it doesn't contain first, it's a k comb of rest.
... for c in gcomb(rest, k):
... yield c
>>> seq = list(range(1, 5))
>>> for k in range(len(seq) + 2):
... print("%d-combs of %s:" % (k, seq))
... for c in gcomb(seq, k):
... print(" ", c)
0-combs of [1, 2, 3, 4]:
[]
1-combs of [1, 2, 3, 4]:
[1]
[2]
[3]
[4]
2-combs of [1, 2, 3, 4]:
[1, 2]
[1, 3]
[1, 4]
[2, 3]
[2, 4]
[3, 4]
3-combs of [1, 2, 3, 4]:
[1, 2, 3]
[1, 2, 4]
[1, 3, 4]
[2, 3, 4]
4-combs of [1, 2, 3, 4]:
[1, 2, 3, 4]
5-combs of [1, 2, 3, 4]:
From the Iterators list, about the types of these things.
>>> def g():
... yield 1
...
>>> type(g)
<class 'function'>
>>> i = g()
>>> type(i)
<class 'generator'>
>>> [s for s in dir(i) if not s.startswith('_')]
['close', 'gi_code', 'gi_frame', 'gi_running', 'send', 'throw']
>>> from test.support import HAVE_DOCSTRINGS
>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).')
Implement next(self).
>>> iter(i) is i
True
>>> import types
>>> isinstance(i, types.GeneratorType)
True
And more, added later.
>>> i.gi_running
0
>>> type(i.gi_frame)
<class 'frame'>
>>> i.gi_running = 42
Traceback (most recent call last):
...
AttributeError: readonly attribute
>>> def g():
... yield me.gi_running
>>> me = g()
>>> me.gi_running
0
>>> next(me)
1
>>> me.gi_running
0
A clever union-find implementation from c.l.py, due to David Eppstein.
Sent: Friday, June 29, 2001 12:16 PM
To: python-list@python.org
Subject: Re: PEP 255: Simple Generators
>>> class disjointSet:
... def __init__(self, name):
... self.name = name
... self.parent = None
... self.generator = self.generate()
...
... def generate(self):
... while not self.parent:
... yield self
... for x in self.parent.generator:
... yield x
...
... def find(self):
... return next(self.generator)
...
... def union(self, parent):
... if self.parent:
... raise ValueError("Sorry, I'm not a root!")
... self.parent = parent
...
... def __str__(self):
... return self.name
>>> names = "ABCDEFGHIJKLM"
>>> sets = [disjointSet(name) for name in names]
>>> roots = sets[:]
>>> import random
>>> gen = random.Random(42)
>>> while 1:
... for s in sets:
... print(" %s->%s" % (s, s.find()), end='')
... print()
... if len(roots) > 1:
... s1 = gen.choice(roots)
... roots.remove(s1)
... s2 = gen.choice(roots)
... s1.union(s2)
... print("merged", s1, "into", s2)
... else:
... break
A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
merged K into B
A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
merged A into F
A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
merged E into F
A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
merged D into C
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
merged M into C
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
merged J into B
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
merged B into C
A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
merged F into G
A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
merged L into C
A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
merged G into I
A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
merged I into H
A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
merged C into H
A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
"""
# Emacs turd '
# Fun tests (for sufficiently warped notions of "fun").
fun_tests = """
Build up to a recursive Sieve of Eratosthenes generator.
>>> def firstn(g, n):
... return [next(g) for i in range(n)]
>>> def intsfrom(i):
... while 1:
... yield i
... i += 1
>>> firstn(intsfrom(5), 7)
[5, 6, 7, 8, 9, 10, 11]
>>> def exclude_multiples(n, ints):
... for i in ints:
... if i % n:
... yield i
>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
[1, 2, 4, 5, 7, 8]
>>> def sieve(ints):
... prime = next(ints)
... yield prime
... not_divisible_by_prime = exclude_multiples(prime, ints)
... for p in sieve(not_divisible_by_prime):
... yield p
>>> primes = sieve(intsfrom(2))
>>> firstn(primes, 20)
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
Another famous problem: generate all integers of the form
2**i * 3**j * 5**k
in increasing order, where i,j,k >= 0. Trickier than it may look at first!
Try writing it without generators, and correctly, and without generating
3 internal results for each result output.
>>> def times(n, g):
... for i in g:
... yield n * i
>>> firstn(times(10, intsfrom(1)), 10)
[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
>>> def merge(g, h):
... ng = next(g)
... nh = next(h)
... while 1:
... if ng < nh:
... yield ng
... ng = next(g)
... elif ng > nh:
... yield nh
... nh = next(h)
... else:
... yield ng
... ng = next(g)
... nh = next(h)
The following works, but is doing a whale of a lot of redundant work --
it's not clear how to get the internal uses of m235 to share a single
generator. Note that me_times2 (etc) each need to see every element in the
result sequence. So this is an example where lazy lists are more natural
(you can look at the head of a lazy list any number of times).
>>> def m235():
... yield 1
... me_times2 = times(2, m235())
... me_times3 = times(3, m235())
... me_times5 = times(5, m235())
... for i in merge(merge(me_times2,
... me_times3),
... me_times5):
... yield i
Don't print "too many" of these -- the implementation above is extremely
inefficient: each call of m235() leads to 3 recursive calls, and in
turn each of those 3 more, and so on, and so on, until we've descended
enough levels to satisfy the print stmts. Very odd: when I printed 5
lines of results below, this managed to screw up Win98's malloc in "the
usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
address space, and it *looked* like a very slow leak.
>>> result = m235()
>>> for i in range(3):
... print(firstn(result, 15))
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
Heh. Here's one way to get a shared list, complete with an excruciating
namespace renaming trick. The *pretty* part is that the times() and merge()
functions can be reused as-is, because they only assume their stream
arguments are iterable -- a LazyList is the same as a generator to times().
>>> class LazyList:
... def __init__(self, g):
... self.sofar = []
... self.fetch = g.__next__
...
... def __getitem__(self, i):
... sofar, fetch = self.sofar, self.fetch
... while i >= len(sofar):
... sofar.append(fetch())
... return sofar[i]
>>> def m235():
... yield 1
... # Gack: m235 below actually refers to a LazyList.
... me_times2 = times(2, m235)
... me_times3 = times(3, m235)
... me_times5 = times(5, m235)
... for i in merge(merge(me_times2,
... me_times3),
... me_times5):
... yield i
Print as many of these as you like -- *this* implementation is memory-
efficient.
>>> m235 = LazyList(m235())
>>> for i in range(5):
... print([m235[j] for j in range(15*i, 15*(i+1))])
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
Ye olde Fibonacci generator, LazyList style.
>>> def fibgen(a, b):
...
... def sum(g, h):
... while 1:
... yield next(g) + next(h)
...
... def tail(g):
... next(g) # throw first away
... for x in g:
... yield x
...
... yield a
... yield b
... for s in sum(iter(fib),
... tail(iter(fib))):
... yield s
>>> fib = LazyList(fibgen(1, 2))
>>> firstn(iter(fib), 17)
[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
Running after your tail with itertools.tee (new in version 2.4)
The algorithms "m235" (Hamming) and Fibonacci presented above are both
examples of a whole family of FP (functional programming) algorithms
where a function produces and returns a list while the production algorithm
suppose the list as already produced by recursively calling itself.
For these algorithms to work, they must:
- produce at least a first element without presupposing the existence of
the rest of the list
- produce their elements in a lazy manner
To work efficiently, the beginning of the list must not be recomputed over
and over again. This is ensured in most FP languages as a built-in feature.
In python, we have to explicitly maintain a list of already computed results
and abandon genuine recursivity.
This is what had been attempted above with the LazyList class. One problem
with that class is that it keeps a list of all of the generated results and
therefore continually grows. This partially defeats the goal of the generator
concept, viz. produce the results only as needed instead of producing them
all and thereby wasting memory.
Thanks to itertools.tee, it is now clear "how to get the internal uses of
m235 to share a single generator".
>>> from itertools import tee
>>> def m235():
... def _m235():
... yield 1
... for n in merge(times(2, m2),
... merge(times(3, m3),
... times(5, m5))):
... yield n
... m1 = _m235()
... m2, m3, m5, mRes = tee(m1, 4)
... return mRes
>>> it = m235()
>>> for i in range(5):
... print(firstn(it, 15))
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
The "tee" function does just what we want. It internally keeps a generated
result for as long as it has not been "consumed" from all of the duplicated
iterators, whereupon it is deleted. You can therefore print the hamming
sequence during hours without increasing memory usage, or very little.
The beauty of it is that recursive running-after-their-tail FP algorithms
are quite straightforwardly expressed with this Python idiom.
Ye olde Fibonacci generator, tee style.
>>> def fib():
...
... def _isum(g, h):
... while 1:
... yield next(g) + next(h)
...
... def _fib():
... yield 1
... yield 2
... next(fibTail) # throw first away
... for res in _isum(fibHead, fibTail):
... yield res
...
... realfib = _fib()
... fibHead, fibTail, fibRes = tee(realfib, 3)
... return fibRes
>>> firstn(fib(), 17)
[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
"""
# syntax_tests mostly provokes SyntaxErrors. Also fiddling with #if 0
# hackery.
syntax_tests = """
These are fine:
>>> def f():
... yield 1
... return
>>> def f():
... try:
... yield 1
... finally:
... pass
>>> def f():
... try:
... try:
... 1//0
... except ZeroDivisionError:
... yield 666
... except:
... pass
... finally:
... pass
>>> def f():
... try:
... try:
... yield 12
... 1//0
... except ZeroDivisionError:
... yield 666
... except:
... try:
... x = 12
... finally:
... yield 12
... except:
... return
>>> list(f())
[12, 666]
>>> def f():
... yield
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... yield
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... yield 1
>>> type(f())
<class 'generator'>
>>> def f():
... if "":
... yield None
>>> type(f())
<class 'generator'>
>>> def f():
... return
... try:
... if x==4:
... pass
... elif 0:
... try:
... 1//0
... except SyntaxError:
... pass
... else:
... if 0:
... while 12:
... x += 1
... yield 2 # don't blink
... f(a, b, c, d, e)
... else:
... pass
... except:
... x = 1
... return
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... def g():
... yield 1
...
>>> type(f())
<class 'NoneType'>
>>> def f():
... if 0:
... class C:
... def __init__(self):
... yield 1
... def f(self):
... yield 2
>>> type(f())
<class 'NoneType'>
>>> def f():
... if 0:
... return
... if 0:
... yield 2
>>> type(f())
<class 'generator'>
This one caused a crash (see SF bug 567538):
>>> def f():
... for i in range(3):
... try:
... continue
... finally:
... yield i
...
>>> g = f()
>>> print(next(g))
0
>>> print(next(g))
1
>>> print(next(g))
2
>>> print(next(g))
Traceback (most recent call last):
StopIteration
Test the gi_code attribute
>>> def f():
... yield 5
...
>>> g = f()
>>> g.gi_code is f.__code__
True
>>> next(g)
5
>>> next(g)
Traceback (most recent call last):
StopIteration
>>> g.gi_code is f.__code__
True
Test the __name__ attribute and the repr()
>>> def f():
... yield 5
...
>>> g = f()
>>> g.__name__
'f'
>>> repr(g) # doctest: +ELLIPSIS
'<generator object f at ...>'
Lambdas shouldn't have their usual return behavior.
>>> x = lambda: (yield 1)
>>> list(x())
[1]
>>> x = lambda: ((yield 1), (yield 2))
>>> list(x())
[1, 2]
"""
# conjoin is a simple backtracking generator, named in honor of Icon's
# "conjunction" control structure. Pass a list of no-argument functions
# that return iterable objects. Easiest to explain by example: assume the
# function list [x, y, z] is passed. Then conjoin acts like:
#
# def g():
# values = [None] * 3
# for values[0] in x():
# for values[1] in y():
# for values[2] in z():
# yield values
#
# So some 3-lists of values *may* be generated, each time we successfully
# get into the innermost loop. If an iterator fails (is exhausted) before
# then, it "backtracks" to get the next value from the nearest enclosing
# iterator (the one "to the left"), and starts all over again at the next
# slot (pumps a fresh iterator). Of course this is most useful when the
# iterators have side-effects, so that which values *can* be generated at
# each slot depend on the values iterated at previous slots.
def simple_conjoin(gs):
values = [None] * len(gs)
def gen(i):
if i >= len(gs):
yield values
else:
for values[i] in gs[i]():
for x in gen(i+1):
yield x
for x in gen(0):
yield x
# That works fine, but recursing a level and checking i against len(gs) for
# each item produced is inefficient. By doing manual loop unrolling across
# generator boundaries, it's possible to eliminate most of that overhead.
# This isn't worth the bother *in general* for generators, but conjoin() is
# a core building block for some CPU-intensive generator applications.
def conjoin(gs):
n = len(gs)
values = [None] * n
# Do one loop nest at time recursively, until the # of loop nests
# remaining is divisible by 3.
def gen(i):
if i >= n:
yield values
elif (n-i) % 3:
ip1 = i+1
for values[i] in gs[i]():
for x in gen(ip1):
yield x
else:
for x in _gen3(i):
yield x
# Do three loop nests at a time, recursing only if at least three more
# remain. Don't call directly: this is an internal optimization for
# gen's use.
def _gen3(i):
assert i < n and (n-i) % 3 == 0
ip1, ip2, ip3 = i+1, i+2, i+3
g, g1, g2 = gs[i : ip3]
if ip3 >= n:
# These are the last three, so we can yield values directly.
for values[i] in g():
for values[ip1] in g1():
for values[ip2] in g2():
yield values
else:
# At least 6 loop nests remain; peel off 3 and recurse for the
# rest.
for values[i] in g():
for values[ip1] in g1():
for values[ip2] in g2():
for x in _gen3(ip3):
yield x
for x in gen(0):
yield x
# And one more approach: For backtracking apps like the Knight's Tour
# solver below, the number of backtracking levels can be enormous (one
# level per square, for the Knight's Tour, so that e.g. a 100x100 board
# needs 10,000 levels). In such cases Python is likely to run out of
# stack space due to recursion. So here's a recursion-free version of
# conjoin too.
# NOTE WELL: This allows large problems to be solved with only trivial
# demands on stack space. Without explicitly resumable generators, this is
# much harder to achieve. OTOH, this is much slower (up to a factor of 2)
# than the fancy unrolled recursive conjoin.
def flat_conjoin(gs): # rename to conjoin to run tests with this instead
n = len(gs)
values = [None] * n
iters = [None] * n
_StopIteration = StopIteration # make local because caught a *lot*
i = 0
while 1:
# Descend.
try:
while i < n:
it = iters[i] = gs[i]().__next__
values[i] = it()
i += 1
except _StopIteration:
pass
else:
assert i == n
yield values
# Backtrack until an older iterator can be resumed.
i -= 1
while i >= 0:
try:
values[i] = iters[i]()
# Success! Start fresh at next level.
i += 1
break
except _StopIteration:
# Continue backtracking.
i -= 1
else:
assert i < 0
break
# A conjoin-based N-Queens solver.
class Queens:
def __init__(self, n):
self.n = n
rangen = range(n)
# Assign a unique int to each column and diagonal.
# columns: n of those, range(n).
# NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
# each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
# based.
# NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
# each, smallest i+j is 0, largest is 2n-2.
# For each square, compute a bit vector of the columns and
# diagonals it covers, and for each row compute a function that
# generates the possiblities for the columns in that row.
self.rowgenerators = []
for i in rangen:
rowuses = [(1 << j) | # column ordinal
(1 << (n + i-j + n-1)) | # NW-SE ordinal
(1 << (n + 2*n-1 + i+j)) # NE-SW ordinal
for j in rangen]
def rowgen(rowuses=rowuses):
for j in rangen:
uses = rowuses[j]
if uses & self.used == 0:
self.used |= uses
yield j
self.used &= ~uses
self.rowgenerators.append(rowgen)
# Generate solutions.
def solve(self):
self.used = 0
for row2col in conjoin(self.rowgenerators):
yield row2col
def printsolution(self, row2col):
n = self.n
assert n == len(row2col)
sep = "+" + "-+" * n
print(sep)
for i in range(n):
squares = [" " for j in range(n)]
squares[row2col[i]] = "Q"
print("|" + "|".join(squares) + "|")
print(sep)
# A conjoin-based Knight's Tour solver. This is pretty sophisticated
# (e.g., when used with flat_conjoin above, and passing hard=1 to the
# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
# creating 10s of thousands of generators then!), and is lengthy.
class Knights:
def __init__(self, m, n, hard=0):
self.m, self.n = m, n
# solve() will set up succs[i] to be a list of square #i's
# successors.
succs = self.succs = []
# Remove i0 from each of its successor's successor lists, i.e.
# successors can't go back to i0 again. Return 0 if we can
# detect this makes a solution impossible, else return 1.
def remove_from_successors(i0, len=len):
# If we remove all exits from a free square, we're dead:
# even if we move to it next, we can't leave it again.
# If we create a square with one exit, we must visit it next;
# else somebody else will have to visit it, and since there's
# only one adjacent, there won't be a way to leave it again.
# Finelly, if we create more than one free square with a
# single exit, we can only move to one of them next, leaving
# the other one a dead end.
ne0 = ne1 = 0
for i in succs[i0]:
s = succs[i]
s.remove(i0)
e = len(s)
if e == 0:
ne0 += 1
elif e == 1:
ne1 += 1
return ne0 == 0 and ne1 < 2
# Put i0 back in each of its successor's successor lists.
def add_to_successors(i0):
for i in succs[i0]:
succs[i].append(i0)
# Generate the first move.
def first():
if m < 1 or n < 1:
return
# Since we're looking for a cycle, it doesn't matter where we
# start. Starting in a corner makes the 2nd move easy.
corner = self.coords2index(0, 0)
remove_from_successors(corner)
self.lastij = corner
yield corner
add_to_successors(corner)
# Generate the second moves.
def second():
corner = self.coords2index(0, 0)
assert self.lastij == corner # i.e., we started in the corner
if m < 3 or n < 3:
return
assert len(succs[corner]) == 2
assert self.coords2index(1, 2) in succs[corner]
assert self.coords2index(2, 1) in succs[corner]
# Only two choices. Whichever we pick, the other must be the
# square picked on move m*n, as it's the only way to get back
# to (0, 0). Save its index in self.final so that moves before
# the last know it must be kept free.
for i, j in (1, 2), (2, 1):
this = self.coords2index(i, j)
final = self.coords2index(3-i, 3-j)
self.final = final
remove_from_successors(this)
succs[final].append(corner)
self.lastij = this
yield this
succs[final].remove(corner)
add_to_successors(this)
# Generate moves 3 thru m*n-1.
def advance(len=len):
# If some successor has only one exit, must take it.
# Else favor successors with fewer exits.
candidates = []
for i in succs[self.lastij]:
e = len(succs[i])
assert e > 0, "else remove_from_successors() pruning flawed"
if e == 1:
candidates = [(e, i)]
break
candidates.append((e, i))
else:
candidates.sort()
for e, i in candidates:
if i != self.final:
if remove_from_successors(i):
self.lastij = i
yield i
add_to_successors(i)
# Generate moves 3 thru m*n-1. Alternative version using a
# stronger (but more expensive) heuristic to order successors.
# Since the # of backtracking levels is m*n, a poor move early on
# can take eons to undo. Smallest square board for which this
# matters a lot is 52x52.
def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
# If some successor has only one exit, must take it.
# Else favor successors with fewer exits.
# Break ties via max distance from board centerpoint (favor
# corners and edges whenever possible).
candidates = []
for i in succs[self.lastij]:
e = len(succs[i])
assert e > 0, "else remove_from_successors() pruning flawed"
if e == 1:
candidates = [(e, 0, i)]
break
i1, j1 = self.index2coords(i)
d = (i1 - vmid)**2 + (j1 - hmid)**2
candidates.append((e, -d, i))
else:
candidates.sort()
for e, d, i in candidates:
if i != self.final:
if remove_from_successors(i):
self.lastij = i
yield i
add_to_successors(i)
# Generate the last move.
def last():
assert self.final in succs[self.lastij]
yield self.final
if m*n < 4:
self.squaregenerators = [first]
else:
self.squaregenerators = [first, second] + \
[hard and advance_hard or advance] * (m*n - 3) + \
[last]
def coords2index(self, i, j):
assert 0 <= i < self.m
assert 0 <= j < self.n
return i * self.n + j
def index2coords(self, index):
assert 0 <= index < self.m * self.n
return divmod(index, self.n)
def _init_board(self):
succs = self.succs
del succs[:]
m, n = self.m, self.n
c2i = self.coords2index
offsets = [( 1, 2), ( 2, 1), ( 2, -1), ( 1, -2),
(-1, -2), (-2, -1), (-2, 1), (-1, 2)]
rangen = range(n)
for i in range(m):
for j in rangen:
s = [c2i(i+io, j+jo) for io, jo in offsets
if 0 <= i+io < m and
0 <= j+jo < n]
succs.append(s)
# Generate solutions.
def solve(self):
self._init_board()
for x in conjoin(self.squaregenerators):
yield x
def printsolution(self, x):
m, n = self.m, self.n
assert len(x) == m*n
w = len(str(m*n))
format = "%" + str(w) + "d"
squares = [[None] * n for i in range(m)]
k = 1
for i in x:
i1, j1 = self.index2coords(i)
squares[i1][j1] = format % k
k += 1
sep = "+" + ("-" * w + "+") * n
print(sep)
for i in range(m):
row = squares[i]
print("|" + "|".join(row) + "|")
print(sep)
conjoin_tests = """
Generate the 3-bit binary numbers in order. This illustrates dumbest-
possible use of conjoin, just to generate the full cross-product.
>>> for c in conjoin([lambda: iter((0, 1))] * 3):
... print(c)
[0, 0, 0]
[0, 0, 1]
[0, 1, 0]
[0, 1, 1]
[1, 0, 0]
[1, 0, 1]
[1, 1, 0]
[1, 1, 1]
For efficiency in typical backtracking apps, conjoin() yields the same list
object each time. So if you want to save away a full account of its
generated sequence, you need to copy its results.
>>> def gencopy(iterator):
... for x in iterator:
... yield x[:]
>>> for n in range(10):
... all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
... print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
0 1 True True
1 2 True True
2 4 True True
3 8 True True
4 16 True True
5 32 True True
6 64 True True
7 128 True True
8 256 True True
9 512 True True
And run an 8-queens solver.
>>> q = Queens(8)
>>> LIMIT = 2
>>> count = 0
>>> for row2col in q.solve():
... count += 1
... if count <= LIMIT:
... print("Solution", count)
... q.printsolution(row2col)
Solution 1
+-+-+-+-+-+-+-+-+
|Q| | | | | | | |
+-+-+-+-+-+-+-+-+
| | | | |Q| | | |
+-+-+-+-+-+-+-+-+
| | | | | | | |Q|
+-+-+-+-+-+-+-+-+
| | | | | |Q| | |
+-+-+-+-+-+-+-+-+
| | |Q| | | | | |
+-+-+-+-+-+-+-+-+
| | | | | | |Q| |
+-+-+-+-+-+-+-+-+
| |Q| | | | | | |
+-+-+-+-+-+-+-+-+
| | | |Q| | | | |
+-+-+-+-+-+-+-+-+
Solution 2
+-+-+-+-+-+-+-+-+
|Q| | | | | | | |
+-+-+-+-+-+-+-+-+
| | | | | |Q| | |
+-+-+-+-+-+-+-+-+
| | | | | | | |Q|
+-+-+-+-+-+-+-+-+
| | |Q| | | | | |
+-+-+-+-+-+-+-+-+
| | | | | | |Q| |
+-+-+-+-+-+-+-+-+
| | | |Q| | | | |
+-+-+-+-+-+-+-+-+
| |Q| | | | | | |
+-+-+-+-+-+-+-+-+
| | | | |Q| | | |
+-+-+-+-+-+-+-+-+
>>> print(count, "solutions in all.")
92 solutions in all.
And run a Knight's Tour on a 10x10 board. Note that there are about
20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
>>> k = Knights(10, 10)
>>> LIMIT = 2
>>> count = 0
>>> for x in k.solve():
... count += 1
... if count <= LIMIT:
... print("Solution", count)
... k.printsolution(x)
... else:
... break
Solution 1
+---+---+---+---+---+---+---+---+---+---+
| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
+---+---+---+---+---+---+---+---+---+---+
| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
+---+---+---+---+---+---+---+---+---+---+
| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
+---+---+---+---+---+---+---+---+---+---+
| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
+---+---+---+---+---+---+---+---+---+---+
| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
+---+---+---+---+---+---+---+---+---+---+
| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
+---+---+---+---+---+---+---+---+---+---+
| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
+---+---+---+---+---+---+---+---+---+---+
| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
+---+---+---+---+---+---+---+---+---+---+
| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
+---+---+---+---+---+---+---+---+---+---+
| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
+---+---+---+---+---+---+---+---+---+---+
Solution 2
+---+---+---+---+---+---+---+---+---+---+
| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
+---+---+---+---+---+---+---+---+---+---+
| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
+---+---+---+---+---+---+---+---+---+---+
| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
+---+---+---+---+---+---+---+---+---+---+
| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
+---+---+---+---+---+---+---+---+---+---+
| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
+---+---+---+---+---+---+---+---+---+---+
| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
+---+---+---+---+---+---+---+---+---+---+
| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
+---+---+---+---+---+---+---+---+---+---+
| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
+---+---+---+---+---+---+---+---+---+---+
| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
+---+---+---+---+---+---+---+---+---+---+
| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
+---+---+---+---+---+---+---+---+---+---+
"""
weakref_tests = """\
Generators are weakly referencable:
>>> import weakref
>>> def gen():
... yield 'foo!'
...
>>> wr = weakref.ref(gen)
>>> wr() is gen
True
>>> p = weakref.proxy(gen)
Generator-iterators are weakly referencable as well:
>>> gi = gen()
>>> wr = weakref.ref(gi)
>>> wr() is gi
True
>>> p = weakref.proxy(gi)
>>> list(p)
['foo!']
"""
coroutine_tests = """\
Sending a value into a started generator:
>>> def f():
... print((yield 1))
... yield 2
>>> g = f()
>>> next(g)
1
>>> g.send(42)
42
2
Sending a value into a new generator produces a TypeError:
>>> f().send("foo")
Traceback (most recent call last):
...
TypeError: can't send non-None value to a just-started generator
Yield by itself yields None:
>>> def f(): yield
>>> list(f())
[None]
An obscene abuse of a yield expression within a generator expression:
>>> list((yield 21) for i in range(4))
[21, None, 21, None, 21, None, 21, None]
And a more sane, but still weird usage:
>>> def f(): list(i for i in [(yield 26)])
>>> type(f())
<class 'generator'>
A yield expression with augmented assignment.
>>> def coroutine(seq):
... count = 0
... while count < 200:
... count += yield
... seq.append(count)
>>> seq = []
>>> c = coroutine(seq)
>>> next(c)
>>> print(seq)
[]
>>> c.send(10)
>>> print(seq)
[10]
>>> c.send(10)
>>> print(seq)
[10, 20]
>>> c.send(10)
>>> print(seq)
[10, 20, 30]
Check some syntax errors for yield expressions:
>>> f=lambda: (yield 1),(yield 2)
Traceback (most recent call last):
...
SyntaxError: 'yield' outside function
>>> def f(): x = yield = y
Traceback (most recent call last):
...
SyntaxError: assignment to yield expression not possible
>>> def f(): (yield bar) = y
Traceback (most recent call last):
...
SyntaxError: can't assign to yield expression
>>> def f(): (yield bar) += y
Traceback (most recent call last):
...
SyntaxError: can't assign to yield expression
Now check some throw() conditions:
>>> def f():
... while True:
... try:
... print((yield))
... except ValueError as v:
... print("caught ValueError (%s)" % (v))
>>> import sys
>>> g = f()
>>> next(g)
>>> g.throw(ValueError) # type only
caught ValueError ()
>>> g.throw(ValueError("xyz")) # value only
caught ValueError (xyz)
>>> g.throw(ValueError, ValueError(1)) # value+matching type
caught ValueError (1)
>>> g.throw(ValueError, TypeError(1)) # mismatched type, rewrapped
caught ValueError (1)
>>> g.throw(ValueError, ValueError(1), None) # explicit None traceback
caught ValueError (1)
>>> g.throw(ValueError(1), "foo") # bad args
Traceback (most recent call last):
...
TypeError: instance exception may not have a separate value
>>> g.throw(ValueError, "foo", 23) # bad args
Traceback (most recent call last):
...
TypeError: throw() third argument must be a traceback object
>>> g.throw("abc")
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not str
>>> g.throw(0)
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not int
>>> g.throw(list)
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not type
>>> def throw(g,exc):
... try:
... raise exc
... except:
... g.throw(*sys.exc_info())
>>> throw(g,ValueError) # do it with traceback included
caught ValueError ()
>>> g.send(1)
1
>>> throw(g,TypeError) # terminate the generator
Traceback (most recent call last):
...
TypeError
>>> print(g.gi_frame)
None
>>> g.send(2)
Traceback (most recent call last):
...
StopIteration
>>> g.throw(ValueError,6) # throw on closed generator
Traceback (most recent call last):
...
ValueError: 6
>>> f().throw(ValueError,7) # throw on just-opened generator
Traceback (most recent call last):
...
ValueError: 7
Plain "raise" inside a generator should preserve the traceback (#13188).
The traceback should have 3 levels:
- g.throw()
- f()
- 1/0
>>> def f():
... try:
... yield
... except:
... raise
>>> g = f()
>>> try:
... 1/0
... except ZeroDivisionError as v:
... try:
... g.throw(v)
... except Exception as w:
... tb = w.__traceback__
>>> levels = 0
>>> while tb:
... levels += 1
... tb = tb.tb_next
>>> levels
3
Now let's try closing a generator:
>>> def f():
... try: yield
... except GeneratorExit:
... print("exiting")
>>> g = f()
>>> next(g)
>>> g.close()
exiting
>>> g.close() # should be no-op now
>>> f().close() # close on just-opened generator should be fine
>>> def f(): yield # an even simpler generator
>>> f().close() # close before opening
>>> g = f()
>>> next(g)
>>> g.close() # close normally
And finalization:
>>> def f():
... try: yield
... finally:
... print("exiting")
>>> g = f()
>>> next(g)
>>> del g
exiting
GeneratorExit is not caught by except Exception:
>>> def f():
... try: yield
... except Exception:
... print('except')
... finally:
... print('finally')
>>> g = f()
>>> next(g)
>>> del g
finally
Now let's try some ill-behaved generators:
>>> def f():
... try: yield
... except GeneratorExit:
... yield "foo!"
>>> g = f()
>>> next(g)
>>> g.close()
Traceback (most recent call last):
...
RuntimeError: generator ignored GeneratorExit
>>> g.close()
Our ill-behaved code should be invoked during GC:
>>> import sys, io
>>> old, sys.stderr = sys.stderr, io.StringIO()
>>> g = f()
>>> next(g)
>>> del g
>>> "RuntimeError: generator ignored GeneratorExit" in sys.stderr.getvalue()
True
>>> sys.stderr = old
And errors thrown during closing should propagate:
>>> def f():
... try: yield
... except GeneratorExit:
... raise TypeError("fie!")
>>> g = f()
>>> next(g)
>>> g.close()
Traceback (most recent call last):
...
TypeError: fie!
Ensure that various yield expression constructs make their
enclosing function a generator:
>>> def f(): x += yield
>>> type(f())
<class 'generator'>
>>> def f(): x = yield
>>> type(f())
<class 'generator'>
>>> def f(): lambda x=(yield): 1
>>> type(f())
<class 'generator'>
>>> def f(): x=(i for i in (yield) if (yield))
>>> type(f())
<class 'generator'>
>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
>>> data = [1,2]
>>> g = f(data)
>>> type(g)
<class 'generator'>
>>> g.send(None)
'a'
>>> data
[1, 2]
>>> g.send(0)
'b'
>>> data
[27, 2]
>>> try: g.send(1)
... except StopIteration: pass
>>> data
[27, 27]
"""
refleaks_tests = """
Prior to adding cycle-GC support to itertools.tee, this code would leak
references. We add it to the standard suite so the routine refleak-tests
would trigger if it starts being uncleanable again.
>>> import itertools
>>> def leak():
... class gen:
... def __iter__(self):
... return self
... def __next__(self):
... return self.item
... g = gen()
... head, tail = itertools.tee(g)
... g.item = head
... return head
>>> it = leak()
Make sure to also test the involvement of the tee-internal teedataobject,
which stores returned items.
>>> item = next(it)
This test leaked at one point due to generator finalization/destruction.
It was copied from Lib/test/leakers/test_generator_cycle.py before the file
was removed.
>>> def leak():
... def gen():
... while True:
... yield g
... g = gen()
>>> leak()
This test isn't really generator related, but rather exception-in-cleanup
related. The coroutine tests (above) just happen to cause an exception in
the generator's __del__ (tp_del) method. We can also test for this
explicitly, without generators. We do have to redirect stderr to avoid
printing warnings and to doublecheck that we actually tested what we wanted
to test.
>>> import sys, io
>>> old = sys.stderr
>>> try:
... sys.stderr = io.StringIO()
... class Leaker:
... def __del__(self):
... def invoke(message):
... raise RuntimeError(message)
... invoke("test")
...
... l = Leaker()
... del l
... err = sys.stderr.getvalue().strip()
... "Exception ignored in" in err
... "RuntimeError: test" in err
... "Traceback" in err
... "in invoke" in err
... finally:
... sys.stderr = old
True
True
True
True
These refleak tests should perhaps be in a testfile of their own,
test_generators just happened to be the test that drew these out.
"""
__test__ = {"tut": tutorial_tests,
"pep": pep_tests,
"email": email_tests,
"fun": fun_tests,
"syntax": syntax_tests,
"conjoin": conjoin_tests,
"weakref": weakref_tests,
"coroutine": coroutine_tests,
"refleaks": refleaks_tests,
}
# Magic test name that regrtest.py invokes *after* importing this module.
# This worms around a bootstrap problem.
# Note that doctest and regrtest both look in sys.argv for a "-v" argument,
# so this works as expected in both ways of running regrtest.
def test_main(verbose=None):
from test import support, test_generators
support.run_unittest(__name__)
support.run_doctest(test_generators, verbose)
# This part isn't needed for regrtest, but for running the test directly.
if __name__ == "__main__":
test_main(1)
| lgpl-3.0 |
mitocw/edx-platform | openedx/core/djangoapps/user_authn/views/tests/test_register.py | 2 | 79864 | # -*- coding: utf-8 -*-
"""Tests for account creation"""
import json
from unittest import skipIf, skipUnless
from datetime import datetime
import ddt
import httpretty
import mock
import six
from six.moves import range
from django.conf import settings
from django.contrib.auth.models import User
from django.core import mail
from django.test import TransactionTestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.urls import reverse
from pytz import UTC
from social_django.models import Partial, UserSocialAuth
from openedx.core.djangoapps.site_configuration.helpers import get_value
from openedx.core.djangoapps.site_configuration.tests.test_util import with_site_configuration
from openedx.core.djangoapps.user_api.accounts import (
EMAIL_BAD_LENGTH_MSG,
EMAIL_CONFLICT_MSG,
EMAIL_INVALID_MSG,
EMAIL_MAX_LENGTH,
EMAIL_MIN_LENGTH,
NAME_MAX_LENGTH,
REQUIRED_FIELD_CONFIRM_EMAIL_MSG,
USERNAME_MAX_LENGTH,
USERNAME_MIN_LENGTH,
USERNAME_BAD_LENGTH_MSG,
USERNAME_CONFLICT_MSG,
USERNAME_INVALID_CHARS_ASCII,
USERNAME_INVALID_CHARS_UNICODE,
)
from openedx.core.djangoapps.user_api.accounts.api import get_account_settings
from openedx.core.djangoapps.user_api.accounts.tests import testutils
from openedx.core.djangoapps.user_api.accounts.tests.retirement_helpers import ( # pylint: disable=unused-import
RetirementTestCase,
fake_requested_retirement,
setup_retirement_states,
)
from openedx.core.djangoapps.user_api.tests.test_helpers import TestCaseForm
from openedx.core.djangoapps.user_api.tests.test_constants import SORTED_COUNTRIES
from openedx.core.djangoapps.user_api.tests.test_views import UserAPITestCase
from openedx.core.djangoapps.user_authn.views.register import RegistrationValidationThrottle, \
REGISTRATION_FAILURE_LOGGING_FLAG
from openedx.core.djangoapps.waffle_utils.testutils import override_waffle_flag
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, skip_unless_lms
from openedx.core.lib.api import test_utils
from student.helpers import authenticate_new_user
from student.tests.factories import UserFactory
from third_party_auth.tests.testutil import ThirdPartyAuthTestMixin, simulate_running_pipeline
from third_party_auth.tests.utils import (
ThirdPartyOAuthTestMixin,
ThirdPartyOAuthTestMixinFacebook,
ThirdPartyOAuthTestMixinGoogle
)
from util.password_policy_validators import (
DEFAULT_MAX_PASSWORD_LENGTH,
create_validator_config,
password_validators_instruction_texts,
password_validators_restrictions
)
@ddt.ddt
@skip_unless_lms
class RegistrationViewValidationErrorTest(ThirdPartyAuthTestMixin, UserAPITestCase, RetirementTestCase):
"""
Tests for catching duplicate email and username validation errors within
the registration end-points of the User API.
"""
maxDiff = None
USERNAME = "bob"
EMAIL = "bob@example.com"
PASSWORD = "password"
NAME = "Bob Smith"
EDUCATION = "m"
YEAR_OF_BIRTH = "1998"
ADDRESS = "123 Fake Street"
CITY = "Springfield"
COUNTRY = "us"
GOALS = "Learn all the things!"
def setUp(self): # pylint: disable=arguments-differ
super(RegistrationViewValidationErrorTest, self).setUp()
self.url = reverse("user_api_registration")
def test_register_retired_email_validation_error(self):
# Register the first user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Initiate retirement for the above user:
fake_requested_retirement(User.objects.get(username=self.USERNAME))
# Try to create a second user with the same email address as the retired user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": "Someone Else",
"username": "someone_else",
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
"email": [{
"user_message": (
u"It looks like {} belongs to an existing account. "
"Try again with a different email address."
).format(
self.EMAIL
)
}]
}
)
def test_register_duplicate_retired_username_account_validation_error(self):
# Register the first user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Initiate retirement for the above user.
fake_requested_retirement(User.objects.get(username=self.USERNAME))
with mock.patch('openedx.core.djangoapps.user_authn.views.register.do_create_account') as dummy_do_create_acct:
# do_create_account should *not* be called - the duplicate retired username
# should be detected before account creation is called.
dummy_do_create_acct.side_effect = Exception('do_create_account should *not* have been called!')
# Try to create a second user with the same username.
response = self.client.post(self.url, {
"email": "someone+else@example.com",
"name": "Someone Else",
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
"username": [{
"user_message": (
u"It looks like {} belongs to an existing account. "
"Try again with a different username."
).format(
self.USERNAME
)
}]
}
)
def test_register_duplicate_email_validation_error(self):
# Register the first user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Try to create a second user with the same email address
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": "Someone Else",
"username": "someone_else",
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
"email": [{
"user_message": (
u"It looks like {} belongs to an existing account. "
"Try again with a different email address."
).format(
self.EMAIL
)
}]
}
)
@override_waffle_flag(REGISTRATION_FAILURE_LOGGING_FLAG, True)
def test_registration_failure_logging(self):
# Register a user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Try to create a second user with the same email address
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": "Someone Else",
"username": "someone_else",
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
"email": [{
"user_message": (
u"It looks like {} belongs to an existing account. "
"Try again with a different email address."
).format(
self.EMAIL
)
}]
}
)
def test_register_duplicate_username_account_validation_error(self):
# Register the first user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Try to create a second user with the same username
response = self.client.post(self.url, {
"email": "someone+else@example.com",
"name": "Someone Else",
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
"username": [{
"user_message": (
u"It looks like {} belongs to an existing account. "
"Try again with a different username."
).format(
self.USERNAME
)
}]
}
)
def test_register_duplicate_username_and_email_validation_errors(self):
# Register the first user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Try to create a second user with the same username and email
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": "Someone Else",
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
"username": [{
"user_message": (
u"It looks like {} belongs to an existing account. "
"Try again with a different username."
).format(
self.USERNAME
)
}],
"email": [{
"user_message": (
u"It looks like {} belongs to an existing account. "
"Try again with a different email address."
).format(
self.EMAIL
)
}]
}
)
@ddt.ddt
@skip_unless_lms
class RegistrationViewTest(ThirdPartyAuthTestMixin, UserAPITestCase):
"""Tests for the registration end-points of the User API. """
maxDiff = None
USERNAME = "bob"
EMAIL = "bob@example.com"
PASSWORD = "password"
NAME = "Bob Smith"
EDUCATION = "m"
YEAR_OF_BIRTH = "1998"
ADDRESS = "123 Fake Street"
CITY = "Springfield"
COUNTRY = "US"
GOALS = "Learn all the things!"
PROFESSION_OPTIONS = [
{
"name": u'--',
"value": u'',
"default": True
},
{
"value": u'software engineer',
"name": u'Software Engineer',
"default": False
},
{
"value": u'teacher',
"name": u'Teacher',
"default": False
},
{
"value": u'other',
"name": u'Other',
"default": False
}
]
SPECIALTY_OPTIONS = [
{
"name": u'--',
"value": u'',
"default": True
},
{
"value": "aerospace",
"name": "Aerospace",
"default": False
},
{
"value": u'early education',
"name": u'Early Education',
"default": False
},
{
"value": u'n/a',
"name": u'N/A',
"default": False
}
]
link_template = u"<a href='/honor' rel='noopener' target='_blank'>{link_label}</a>"
def setUp(self): # pylint: disable=arguments-differ
super(RegistrationViewTest, self).setUp()
self.url = reverse("user_api_registration")
@ddt.data("get", "post")
def test_auth_disabled(self, method):
self.assertAuthDisabled(method, self.url)
def test_allowed_methods(self):
self.assertAllowedMethods(self.url, ["GET", "POST", "HEAD", "OPTIONS"])
def test_put_not_allowed(self):
response = self.client.put(self.url)
self.assertHttpMethodNotAllowed(response)
def test_delete_not_allowed(self):
response = self.client.delete(self.url)
self.assertHttpMethodNotAllowed(response)
def test_patch_not_allowed(self):
response = self.client.patch(self.url)
self.assertHttpMethodNotAllowed(response)
def test_register_form_default_fields(self):
no_extra_fields_setting = {}
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"email",
u"type": u"email",
u"required": True,
u"label": u"Email",
u"instructions": u"This is what you will use to login.",
u"restrictions": {
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH
},
}
)
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"name",
u"type": u"text",
u"required": True,
u"label": u"Full Name",
u"instructions": u"This name will be used on any certificates that you earn.",
u"restrictions": {
"max_length": 255
},
}
)
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"username",
u"type": u"text",
u"required": True,
u"label": u"Public Username",
u"instructions": u"The name that will identify you in your courses. "
u"It cannot be changed later.",
u"restrictions": {
"min_length": USERNAME_MIN_LENGTH,
"max_length": USERNAME_MAX_LENGTH
},
}
)
self._assert_reg_field(
no_extra_fields_setting,
{
u"placeholder": "",
u"name": u"password",
u"type": u"password",
u"required": True,
u"label": u"Password",
u"instructions": password_validators_instruction_texts(),
u"restrictions": password_validators_restrictions(),
}
)
@override_settings(AUTH_PASSWORD_VALIDATORS=[
create_validator_config('util.password_policy_validators.MinimumLengthValidator', {'min_length': 2}),
create_validator_config('util.password_policy_validators.UppercaseValidator', {'min_upper': 3}),
create_validator_config('util.password_policy_validators.SymbolValidator', {'min_symbol': 1}),
])
def test_register_form_password_complexity(self):
no_extra_fields_setting = {}
# Without enabling password policy
self._assert_reg_field(
no_extra_fields_setting,
{
u'name': u'password',
u'label': u'Password',
u"instructions": password_validators_instruction_texts(),
u"restrictions": password_validators_restrictions(),
}
)
msg = u'Your password must contain at least 2 characters, including '\
u'3 uppercase letters & 1 symbol.'
self._assert_reg_field(
no_extra_fields_setting,
{
u'name': u'password',
u'label': u'Password',
u'instructions': msg,
u"restrictions": password_validators_restrictions(),
}
)
@override_settings(REGISTRATION_EXTENSION_FORM='openedx.core.djangoapps.user_api.tests.test_helpers.TestCaseForm')
def test_extension_form_fields(self):
no_extra_fields_setting = {}
# Verify other fields didn't disappear for some reason.
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"email",
u"type": u"email",
u"required": True,
u"label": u"Email",
u"instructions": u"This is what you will use to login.",
u"restrictions": {
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH
},
}
)
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"favorite_editor",
u"type": u"select",
u"required": False,
u"label": u"Favorite Editor",
u"placeholder": u"cat",
u"defaultValue": u"vim",
u"errorMessages": {
u'required': u'This field is required.',
u'invalid_choice': u'Select a valid choice. %(value)s is not one of the available choices.',
}
}
)
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"favorite_movie",
u"type": u"text",
u"required": True,
u"label": u"Fav Flick",
u"placeholder": None,
u"defaultValue": None,
u"errorMessages": {
u'required': u'Please tell us your favorite movie.',
u'invalid': u"We're pretty sure you made that movie up."
},
u"restrictions": {
"min_length": TestCaseForm.MOVIE_MIN_LEN,
"max_length": TestCaseForm.MOVIE_MAX_LEN,
}
}
)
@ddt.data(
('pk', 'PK', 'Bob123', 'Bob123'),
('Pk', 'PK', None, ''),
('pK', 'PK', 'Bob123@edx.org', 'Bob123_edx_org'),
('PK', 'PK', 'Bob123123123123123123123123123123123123', 'Bob123123123123123123123123123'),
('us', 'US', 'Bob-1231231&23123+1231(2312312312@3123123123', 'Bob-1231231_23123_1231_2312312'),
)
@ddt.unpack
def test_register_form_third_party_auth_running_google(
self,
input_country_code,
expected_country_code,
input_username,
expected_username):
no_extra_fields_setting = {}
country_options = (
[
{
"name": "--",
"value": "",
"default": False
}
] + [
{
"value": country_code,
"name": six.text_type(country_name),
"default": True if country_code == expected_country_code else False
}
for country_code, country_name in SORTED_COUNTRIES
]
)
provider = self.configure_google_provider(enabled=True)
with simulate_running_pipeline(
"openedx.core.djangoapps.user_authn.views.login_form.third_party_auth.pipeline", "google-oauth2",
email="bob@example.com",
fullname="Bob",
username=input_username,
country=input_country_code
):
self._assert_password_field_hidden(no_extra_fields_setting)
self._assert_social_auth_provider_present(no_extra_fields_setting, provider)
# Email should be filled in
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"email",
u"defaultValue": u"bob@example.com",
u"type": u"email",
u"required": True,
u"label": u"Email",
u"instructions": u"This is what you will use to login.",
u"restrictions": {
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH
},
}
)
# Full Name should be filled in
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"name",
u"defaultValue": u"Bob",
u"type": u"text",
u"required": True,
u"label": u"Full Name",
u"instructions": u"This name will be used on any certificates that you earn.",
u"restrictions": {
"max_length": NAME_MAX_LENGTH,
}
}
)
# Username should be filled in
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"username",
u"defaultValue": expected_username,
u"type": u"text",
u"required": True,
u"label": u"Public Username",
u"instructions": u"The name that will identify you in your courses. "
u"It cannot be changed later.",
u"restrictions": {
"min_length": USERNAME_MIN_LENGTH,
"max_length": USERNAME_MAX_LENGTH
}
}
)
# Country should be filled in.
self._assert_reg_field(
{u"country": u"required"},
{
u"label": u"Country or Region of Residence",
u"name": u"country",
u"defaultValue": expected_country_code,
u"type": u"select",
u"required": True,
u"options": country_options,
u"instructions": u"The country or region where you live.",
u"errorMessages": {
u"required": u"Select your country or region of residence."
},
}
)
def test_register_form_level_of_education(self):
self._assert_reg_field(
{"level_of_education": "optional"},
{
"name": "level_of_education",
"type": "select",
"required": False,
"label": "Highest level of education completed",
"options": [
{"value": "", "name": "--", "default": True},
{"value": "p", "name": "Doctorate", "default": False},
{"value": "m", "name": "Master's or professional degree", "default": False},
{"value": "b", "name": "Bachelor's degree", "default": False},
{"value": "a", "name": "Associate degree", "default": False},
{"value": "hs", "name": "Secondary/high school", "default": False},
{"value": "jhs", "name": "Junior secondary/junior high/middle school", "default": False},
{"value": "el", "name": "Elementary/primary school", "default": False},
{"value": "none", "name": "No formal education", "default": False},
{"value": "other", "name": "Other education", "default": False},
],
"errorMessages": {
"required": "Select the highest level of education you have completed."
}
}
)
@mock.patch('openedx.core.djangoapps.user_authn.views.registration_form._')
def test_register_form_level_of_education_translations(self, fake_gettext):
fake_gettext.side_effect = lambda text: text + ' TRANSLATED'
self._assert_reg_field(
{"level_of_education": "optional"},
{
"name": "level_of_education",
"type": "select",
"required": False,
"label": "Highest level of education completed TRANSLATED",
"options": [
{"value": "", "name": "--", "default": True},
{"value": "p", "name": "Doctorate TRANSLATED", "default": False},
{"value": "m", "name": "Master's or professional degree TRANSLATED", "default": False},
{"value": "b", "name": "Bachelor's degree TRANSLATED", "default": False},
{"value": "a", "name": "Associate degree TRANSLATED", "default": False},
{"value": "hs", "name": "Secondary/high school TRANSLATED", "default": False},
{"value": "jhs", "name": "Junior secondary/junior high/middle school TRANSLATED", "default": False},
{"value": "el", "name": "Elementary/primary school TRANSLATED", "default": False},
{"value": "none", "name": "No formal education TRANSLATED", "default": False},
{"value": "other", "name": "Other education TRANSLATED", "default": False},
],
"errorMessages": {
"required": "Select the highest level of education you have completed."
}
}
)
def test_register_form_gender(self):
self._assert_reg_field(
{"gender": "optional"},
{
"name": "gender",
"type": "select",
"required": False,
"label": "Gender",
"options": [
{"value": "", "name": "--", "default": True},
{"value": "m", "name": "Male", "default": False},
{"value": "f", "name": "Female", "default": False},
{"value": "o", "name": "Other/Prefer Not to Say", "default": False},
],
}
)
@mock.patch('openedx.core.djangoapps.user_authn.views.registration_form._')
def test_register_form_gender_translations(self, fake_gettext):
fake_gettext.side_effect = lambda text: text + ' TRANSLATED'
self._assert_reg_field(
{"gender": "optional"},
{
"name": "gender",
"type": "select",
"required": False,
"label": "Gender TRANSLATED",
"options": [
{"value": "", "name": "--", "default": True},
{"value": "m", "name": "Male TRANSLATED", "default": False},
{"value": "f", "name": "Female TRANSLATED", "default": False},
{"value": "o", "name": "Other/Prefer Not to Say TRANSLATED", "default": False},
],
}
)
def test_register_form_year_of_birth(self):
this_year = datetime.now(UTC).year
year_options = (
[
{
"value": "",
"name": "--",
"default": True
}
] + [
{
"value": six.text_type(year),
"name": six.text_type(year),
"default": False
}
for year in range(this_year, this_year - 120, -1)
]
)
self._assert_reg_field(
{"year_of_birth": "optional"},
{
"name": "year_of_birth",
"type": "select",
"required": False,
"label": "Year of birth",
"options": year_options,
}
)
def test_register_form_profession_without_profession_options(self):
self._assert_reg_field(
{"profession": "required"},
{
"name": "profession",
"type": "text",
"required": True,
"label": "Profession",
"errorMessages": {
"required": "Enter your profession."
}
}
)
@with_site_configuration(
configuration={
"EXTRA_FIELD_OPTIONS": {"profession": ["Software Engineer", "Teacher", "Other"]}
}
)
def test_register_form_profession_with_profession_options(self):
self._assert_reg_field(
{"profession": "required"},
{
"name": "profession",
"type": "select",
"required": True,
"label": "Profession",
"options": self.PROFESSION_OPTIONS,
"errorMessages": {
"required": "Select your profession."
},
}
)
def test_register_form_specialty_without_specialty_options(self):
self._assert_reg_field(
{"specialty": "required"},
{
"name": "specialty",
"type": "text",
"required": True,
"label": "Specialty",
"errorMessages": {
"required": "Enter your specialty."
}
}
)
@with_site_configuration(
configuration={
"EXTRA_FIELD_OPTIONS": {"specialty": ["Aerospace", "Early Education", "N/A"]}
}
)
def test_register_form_specialty_with_specialty_options(self):
self._assert_reg_field(
{"specialty": "required"},
{
"name": "specialty",
"type": "select",
"required": True,
"label": "Specialty",
"options": self.SPECIALTY_OPTIONS,
"errorMessages": {
"required": "Select your specialty."
},
}
)
def test_registration_form_mailing_address(self):
self._assert_reg_field(
{"mailing_address": "optional"},
{
"name": "mailing_address",
"type": "textarea",
"required": False,
"label": "Mailing address",
"errorMessages": {
"required": "Enter your mailing address."
}
}
)
def test_registration_form_goals(self):
self._assert_reg_field(
{"goals": "optional"},
{
"name": "goals",
"type": "textarea",
"required": False,
"label": u"Tell us why you're interested in {platform_name}".format(
platform_name=settings.PLATFORM_NAME
),
"errorMessages": {
"required": "Tell us your goals."
}
}
)
def test_registration_form_city(self):
self._assert_reg_field(
{"city": "optional"},
{
"name": "city",
"type": "text",
"required": False,
"label": "City",
"errorMessages": {
"required": "Enter your city."
}
}
)
def test_registration_form_state(self):
self._assert_reg_field(
{"state": "optional"},
{
"name": "state",
"type": "text",
"required": False,
"label": "State/Province/Region",
}
)
def test_registration_form_country(self):
country_options = (
[
{
"name": "--",
"value": "",
"default": True
}
] + [
{
"value": country_code,
"name": six.text_type(country_name),
"default": False
}
for country_code, country_name in SORTED_COUNTRIES
]
)
self._assert_reg_field(
{"country": "required"},
{
"label": "Country or Region of Residence",
"name": "country",
"type": "select",
"instructions": "The country or region where you live.",
"required": True,
"options": country_options,
"errorMessages": {
"required": "Select your country or region of residence."
},
}
)
def test_registration_form_confirm_email(self):
self._assert_reg_field(
{"confirm_email": "required"},
{
"name": "confirm_email",
"type": "text",
"required": True,
"label": "Confirm Email",
"errorMessages": {
"required": "The email addresses do not match.",
}
}
)
@override_settings(
MKTG_URLS={"ROOT": "https://www.test.com/", "HONOR": "honor"},
)
@mock.patch.dict(settings.FEATURES, {"ENABLE_MKTG_SITE": True})
def test_registration_honor_code_mktg_site_enabled(self):
link_template = "<a href='https://www.test.com/honor' rel='noopener' target='_blank'>{link_label}</a>"
link_template2 = u"<a href='#' rel='noopener' target='_blank'>{link_label}</a>"
link_label = "Terms of Service and Honor Code"
link_label2 = "Privacy Policy"
self._assert_reg_field(
{"honor_code": "required"},
{
"label": (u"By creating an account, you agree to the {spacing}"
u"{link_label} {spacing}"
u"and you acknowledge that {platform_name} and each Member process your "
u"personal data in accordance {spacing}"
u"with the {link_label2}.").format(
platform_name=settings.PLATFORM_NAME,
link_label=link_template.format(link_label=link_label),
link_label2=link_template2.format(link_label=link_label2),
spacing=' ' * 18
),
"name": "honor_code",
"defaultValue": False,
"type": "plaintext",
"required": True,
"errorMessages": {
"required": u"You must agree to the {platform_name} {link_label}".format(
platform_name=settings.PLATFORM_NAME,
link_label=link_label
)
}
}
)
@override_settings(MKTG_URLS_LINK_MAP={"HONOR": "honor"})
@mock.patch.dict(settings.FEATURES, {"ENABLE_MKTG_SITE": False})
def test_registration_honor_code_mktg_site_disabled(self):
link_template = "<a href='/privacy' rel='noopener' target='_blank'>{link_label}</a>"
link_label = "Terms of Service and Honor Code"
link_label2 = "Privacy Policy"
self._assert_reg_field(
{"honor_code": "required"},
{
"label": (u"By creating an account, you agree to the {spacing}"
u"{link_label} {spacing}"
u"and you acknowledge that {platform_name} and each Member process your "
u"personal data in accordance {spacing}"
u"with the {link_label2}.").format(
platform_name=settings.PLATFORM_NAME,
link_label=self.link_template.format(link_label=link_label),
link_label2=link_template.format(link_label=link_label2),
spacing=' ' * 18
),
"name": "honor_code",
"defaultValue": False,
"type": "plaintext",
"required": True,
"errorMessages": {
"required": u"You must agree to the {platform_name} {link_label}".format(
platform_name=settings.PLATFORM_NAME,
link_label=link_label
)
}
}
)
@override_settings(MKTG_URLS={
"ROOT": "https://www.test.com/",
"HONOR": "honor",
"TOS": "tos",
})
@mock.patch.dict(settings.FEATURES, {"ENABLE_MKTG_SITE": True})
def test_registration_separate_terms_of_service_mktg_site_enabled(self):
# Honor code field should say ONLY honor code,
# not "terms of service and honor code"
link_label = 'Honor Code'
link_template = u"<a href='https://www.test.com/honor' rel='noopener' target='_blank'>{link_label}</a>"
self._assert_reg_field(
{"honor_code": "required", "terms_of_service": "required"},
{
"label": u"I agree to the {platform_name} {link_label}".format(
platform_name=settings.PLATFORM_NAME,
link_label=link_template.format(link_label=link_label)
),
"name": "honor_code",
"defaultValue": False,
"type": "checkbox",
"required": True,
"errorMessages": {
"required": u"You must agree to the {platform_name} {link_label}".format(
platform_name=settings.PLATFORM_NAME,
link_label=link_label
)
}
}
)
# Terms of service field should also be present
link_label = "Terms of Service"
link_template = u"<a href='https://www.test.com/tos' rel='noopener' target='_blank'>{link_label}</a>"
self._assert_reg_field(
{"honor_code": "required", "terms_of_service": "required"},
{
"label": u"I agree to the {platform_name} {link_label}".format(
platform_name=settings.PLATFORM_NAME,
link_label=link_template.format(link_label=link_label)
),
"name": "terms_of_service",
"defaultValue": False,
"type": "checkbox",
"required": True,
"errorMessages": {
"required": u"You must agree to the {platform_name} {link_label}".format(
platform_name=settings.PLATFORM_NAME,
link_label=link_label
)
}
}
)
@override_settings(MKTG_URLS_LINK_MAP={"HONOR": "honor", "TOS": "tos"})
@mock.patch.dict(settings.FEATURES, {"ENABLE_MKTG_SITE": False})
def test_registration_separate_terms_of_service_mktg_site_disabled(self):
# Honor code field should say ONLY honor code,
# not "terms of service and honor code"
link_label = 'Honor Code'
self._assert_reg_field(
{"honor_code": "required", "terms_of_service": "required"},
{
"label": u"I agree to the {platform_name} {link_label}".format(
platform_name=settings.PLATFORM_NAME,
link_label=self.link_template.format(link_label=link_label)
),
"name": "honor_code",
"defaultValue": False,
"type": "checkbox",
"required": True,
"errorMessages": {
"required": u"You must agree to the {platform_name} Honor Code".format(
platform_name=settings.PLATFORM_NAME
)
}
}
)
link_label = 'Terms of Service'
# Terms of service field should also be present
link_template = u"<a href='/tos' rel='noopener' target='_blank'>{link_label}</a>"
self._assert_reg_field(
{"honor_code": "required", "terms_of_service": "required"},
{
"label": u"I agree to the {platform_name} {link_label}".format(
platform_name=settings.PLATFORM_NAME,
link_label=link_template.format(link_label=link_label)
),
"name": "terms_of_service",
"defaultValue": False,
"type": "checkbox",
"required": True,
"errorMessages": {
"required": u"You must agree to the {platform_name} Terms of Service".format(
platform_name=settings.PLATFORM_NAME
)
}
}
)
@override_settings(
REGISTRATION_EXTRA_FIELDS={
"level_of_education": "optional",
"gender": "optional",
"year_of_birth": "optional",
"mailing_address": "optional",
"goals": "optional",
"city": "optional",
"state": "optional",
"country": "required",
"honor_code": "required",
"confirm_email": "required",
},
REGISTRATION_EXTENSION_FORM='openedx.core.djangoapps.user_api.tests.test_helpers.TestCaseForm',
)
def test_field_order(self):
response = self.client.get(self.url)
self.assertHttpOK(response)
# Verify that all fields render in the correct order
form_desc = json.loads(response.content.decode('utf-8'))
field_names = [field["name"] for field in form_desc["fields"]]
self.assertEqual(field_names, [
"email",
"name",
"username",
"password",
"favorite_movie",
"favorite_editor",
"confirm_email",
"city",
"state",
"country",
"gender",
"year_of_birth",
"level_of_education",
"mailing_address",
"goals",
"honor_code",
])
@override_settings(
REGISTRATION_EXTRA_FIELDS={
"level_of_education": "optional",
"gender": "optional",
"year_of_birth": "optional",
"mailing_address": "optional",
"goals": "optional",
"city": "optional",
"state": "optional",
"country": "required",
"honor_code": "required",
"confirm_email": "required",
},
REGISTRATION_FIELD_ORDER=[
"name",
"username",
"email",
"confirm_email",
"password",
"first_name",
"last_name",
"city",
"state",
"country",
"gender",
"year_of_birth",
"level_of_education",
"company",
"title",
"job_title",
"mailing_address",
"goals",
"honor_code",
"terms_of_service",
"specialty",
"profession",
],
)
def test_field_order_override(self):
response = self.client.get(self.url)
self.assertHttpOK(response)
# Verify that all fields render in the correct order
form_desc = json.loads(response.content.decode('utf-8'))
field_names = [field["name"] for field in form_desc["fields"]]
self.assertEqual(field_names, [
"name",
"username",
"email",
"confirm_email",
"password",
"city",
"state",
"country",
"gender",
"year_of_birth",
"level_of_education",
"mailing_address",
"goals",
"honor_code",
])
@override_settings(
REGISTRATION_EXTRA_FIELDS={
"level_of_education": "optional",
"gender": "optional",
"year_of_birth": "optional",
"mailing_address": "optional",
"goals": "optional",
"city": "optional",
"state": "optional",
"country": "required",
"honor_code": "required",
"confirm_email": "required",
},
REGISTRATION_EXTENSION_FORM='openedx.core.djangoapps.user_api.tests.test_helpers.TestCaseForm',
REGISTRATION_FIELD_ORDER=[
"name",
"confirm_email",
"password",
"first_name",
"last_name",
"gender",
"year_of_birth",
"level_of_education",
"company",
"title",
"mailing_address",
"goals",
"honor_code",
"terms_of_service",
],
)
def test_field_order_invalid_override(self):
response = self.client.get(self.url)
self.assertHttpOK(response)
# Verify that all fields render in the correct order
form_desc = json.loads(response.content.decode('utf-8'))
field_names = [field["name"] for field in form_desc["fields"]]
self.assertEqual(field_names, [
"email",
"name",
"username",
"password",
"favorite_movie",
"favorite_editor",
"confirm_email",
"city",
"state",
"country",
"gender",
"year_of_birth",
"level_of_education",
"mailing_address",
"goals",
"honor_code",
])
def test_register(self):
# Create a new registration
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
self.assertIn(settings.EDXMKTG_LOGGED_IN_COOKIE_NAME, self.client.cookies)
self.assertIn(settings.EDXMKTG_USER_INFO_COOKIE_NAME, self.client.cookies)
user = User.objects.get(username=self.USERNAME)
request = RequestFactory().get('/url')
request.user = user
account_settings = get_account_settings(request)[0]
self.assertEqual(self.USERNAME, account_settings["username"])
self.assertEqual(self.EMAIL, account_settings["email"])
self.assertFalse(account_settings["is_active"])
self.assertEqual(self.NAME, account_settings["name"])
# Verify that we've been logged in
# by trying to access a page that requires authentication
response = self.client.get(reverse("dashboard"))
self.assertHttpOK(response)
@override_settings(REGISTRATION_EXTRA_FIELDS={
"level_of_education": "optional",
"gender": "optional",
"year_of_birth": "optional",
"mailing_address": "optional",
"goals": "optional",
"country": "required",
})
def test_register_with_profile_info(self):
# Register, providing lots of demographic info
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"level_of_education": self.EDUCATION,
"mailing_address": self.ADDRESS,
"year_of_birth": self.YEAR_OF_BIRTH,
"goals": self.GOALS,
"country": self.COUNTRY,
"honor_code": "true",
})
self.assertHttpOK(response)
# Verify the user's account
user = User.objects.get(username=self.USERNAME)
request = RequestFactory().get('/url')
request.user = user
account_settings = get_account_settings(request)[0]
self.assertEqual(account_settings["level_of_education"], self.EDUCATION)
self.assertEqual(account_settings["mailing_address"], self.ADDRESS)
self.assertEqual(account_settings["year_of_birth"], int(self.YEAR_OF_BIRTH))
self.assertEqual(account_settings["goals"], self.GOALS)
self.assertEqual(account_settings["country"], self.COUNTRY)
@override_settings(REGISTRATION_EXTENSION_FORM='openedx.core.djangoapps.user_api.tests.test_helpers.TestCaseForm')
@mock.patch('openedx.core.djangoapps.user_api.tests.test_helpers.TestCaseForm.DUMMY_STORAGE', new_callable=dict)
@mock.patch(
'openedx.core.djangoapps.user_api.tests.test_helpers.DummyRegistrationExtensionModel',
)
def test_with_extended_form(self, dummy_model, storage_dict):
dummy_model_instance = mock.Mock()
dummy_model.return_value = dummy_model_instance
# Create a new registration
self.assertEqual(storage_dict, {})
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
"favorite_movie": "Inception",
"favorite_editor": "cat",
})
self.assertHttpOK(response)
self.assertIn(settings.EDXMKTG_LOGGED_IN_COOKIE_NAME, self.client.cookies)
self.assertIn(settings.EDXMKTG_USER_INFO_COOKIE_NAME, self.client.cookies)
user = User.objects.get(username=self.USERNAME)
request = RequestFactory().get('/url')
request.user = user
account_settings = get_account_settings(request)[0]
self.assertEqual(self.USERNAME, account_settings["username"])
self.assertEqual(self.EMAIL, account_settings["email"])
self.assertFalse(account_settings["is_active"])
self.assertEqual(self.NAME, account_settings["name"])
self.assertEqual(storage_dict, {'favorite_movie': "Inception", "favorite_editor": "cat"})
self.assertEqual(dummy_model_instance.user, user)
# Verify that we've been logged in
# by trying to access a page that requires authentication
response = self.client.get(reverse("dashboard"))
self.assertHttpOK(response)
def test_activation_email(self):
# Register, which should trigger an activation email
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Verify that the activation email was sent
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
self.assertEqual(sent_email.to, [self.EMAIL])
self.assertEqual(
sent_email.subject,
u"Action Required: Activate your {platform} account".format(platform=settings.PLATFORM_NAME)
)
self.assertIn(
u"high-quality {platform} courses".format(platform=settings.PLATFORM_NAME),
sent_email.body
)
@ddt.data(
{"email": ""},
{"email": "invalid"},
{"name": ""},
{"username": ""},
{"username": "a"},
{"password": ""},
)
def test_register_invalid_input(self, invalid_fields):
# Initially, the field values are all valid
data = {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
}
# Override the valid fields, making the input invalid
data.update(invalid_fields)
# Attempt to create the account, expecting an error response
response = self.client.post(self.url, data)
self.assertHttpBadRequest(response)
@override_settings(REGISTRATION_EXTRA_FIELDS={"country": "required"})
@ddt.data("email", "name", "username", "password", "country")
def test_register_missing_required_field(self, missing_field):
data = {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"country": self.COUNTRY,
}
del data[missing_field]
# Send a request missing a field
response = self.client.post(self.url, data)
self.assertHttpBadRequest(response)
def test_register_duplicate_email(self):
# Register the first user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Try to create a second user with the same email address
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": "Someone Else",
"username": "someone_else",
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
"email": [{
"user_message": (
u"It looks like {} belongs to an existing account. "
"Try again with a different email address."
).format(
self.EMAIL
)
}]
}
)
def test_register_duplicate_username(self):
# Register the first user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Try to create a second user with the same username
response = self.client.post(self.url, {
"email": "someone+else@example.com",
"name": "Someone Else",
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
"username": [{
"user_message": (
u"It looks like {} belongs to an existing account. "
"Try again with a different username."
).format(
self.USERNAME
)
}]
}
)
def test_register_duplicate_username_and_email(self):
# Register the first user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Try to create a second user with the same username
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": "Someone Else",
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
"username": [{
"user_message": (
u"It looks like {} belongs to an existing account. "
"Try again with a different username."
).format(
self.USERNAME
)
}],
"email": [{
"user_message": (
u"It looks like {} belongs to an existing account. "
"Try again with a different email address."
).format(
self.EMAIL
)
}]
}
)
@override_settings(REGISTRATION_EXTRA_FIELDS={"honor_code": "hidden", "terms_of_service": "hidden"})
def test_register_hidden_honor_code_and_terms_of_service(self):
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
})
self.assertHttpOK(response)
def test_missing_fields(self):
response = self.client.post(
self.url,
{
"email": self.EMAIL,
"name": self.NAME,
"honor_code": "true",
}
)
self.assertEqual(response.status_code, 400)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
u"username": [{u"user_message": USERNAME_BAD_LENGTH_MSG}],
u"password": [{u"user_message": u"This field is required."}],
}
)
def test_country_overrides(self):
"""Test that overridden countries are available in country list."""
# Retrieve the registration form description
with override_settings(REGISTRATION_EXTRA_FIELDS={"country": "required"}):
response = self.client.get(self.url)
self.assertHttpOK(response)
self.assertContains(response, 'Kosovo')
def test_password_with_spaces(self):
"""Test that spaces are stripped correctly from password while creating an account."""
unstripped_password = self.PASSWORD + ' '
with mock.patch(
'openedx.core.djangoapps.user_authn.views.register.authenticate_new_user',
wraps=authenticate_new_user
) as mock_authenticate_new_user:
self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": unstripped_password,
"honor_code": "true",
})
mock_authenticate_new_user.assert_called_with(
mock_authenticate_new_user.call_args[0][0], # get request object from mock
self.USERNAME,
unstripped_password.strip()
)
def test_create_account_not_allowed(self):
"""
Test case to check user creation is forbidden when ALLOW_PUBLIC_ACCOUNT_CREATION feature flag is turned off
"""
def _side_effect_for_get_value(value, default=None):
"""
returns a side_effect with given return value for a given value
"""
if value == 'ALLOW_PUBLIC_ACCOUNT_CREATION':
return False
else:
return get_value(value, default)
with mock.patch('openedx.core.djangoapps.site_configuration.helpers.get_value') as mock_get_value:
mock_get_value.side_effect = _side_effect_for_get_value
response = self.client.post(self.url, {"email": self.EMAIL, "username": self.USERNAME})
self.assertEqual(response.status_code, 403)
def _assert_fields_match(self, actual_field, expected_field):
"""
Assert that the actual field and the expected field values match.
"""
self.assertIsNot(
actual_field, None,
msg=u"Could not find field {name}".format(name=expected_field["name"])
)
for key in expected_field:
self.assertEqual(
actual_field[key], expected_field[key],
msg=u"Expected {expected} for {key} but got {actual} instead".format(
key=key,
actual=actual_field[key],
expected=expected_field[key]
)
)
def _populate_always_present_fields(self, field):
"""
Populate field dictionary with keys and values that are always present.
"""
defaults = [
("label", ""),
("instructions", ""),
("placeholder", ""),
("defaultValue", ""),
("restrictions", {}),
("errorMessages", {}),
]
field.update({
key: value
for key, value in defaults if key not in field
})
def _assert_reg_field(self, extra_fields_setting, expected_field):
"""
Retrieve the registration form description from the server and
verify that it contains the expected field.
Args:
extra_fields_setting (dict): Override the Django setting controlling
which extra fields are displayed in the form.
expected_field (dict): The field definition we expect to find in the form.
Raises:
AssertionError
"""
# Add in fields that are always present
self._populate_always_present_fields(expected_field)
# Retrieve the registration form description
with override_settings(REGISTRATION_EXTRA_FIELDS=extra_fields_setting):
response = self.client.get(self.url)
self.assertHttpOK(response)
# Verify that the form description matches what we'd expect
form_desc = json.loads(response.content.decode('utf-8'))
actual_field = None
for field in form_desc["fields"]:
if field["name"] == expected_field["name"]:
actual_field = field
break
self._assert_fields_match(actual_field, expected_field)
def _assert_password_field_hidden(self, field_settings):
self._assert_reg_field(field_settings, {
"name": "password",
"type": "hidden",
"required": False
})
def _assert_social_auth_provider_present(self, field_settings, backend):
self._assert_reg_field(field_settings, {
"name": "social_auth_provider",
"type": "hidden",
"required": False,
"defaultValue": backend.name
})
@httpretty.activate
@ddt.ddt
class ThirdPartyRegistrationTestMixin(ThirdPartyOAuthTestMixin, CacheIsolationTestCase):
"""
Tests for the User API registration endpoint with 3rd party authentication.
"""
CREATE_USER = False
ENABLED_CACHES = ['default']
__test__ = False
def setUp(self):
super(ThirdPartyRegistrationTestMixin, self).setUp()
self.url = reverse('user_api_registration')
def tearDown(self):
super(ThirdPartyRegistrationTestMixin, self).tearDown()
Partial.objects.all().delete()
def data(self, user=None):
"""Returns the request data for the endpoint."""
return {
"provider": self.BACKEND,
"access_token": self.access_token,
"client_id": self.client_id,
"honor_code": "true",
"country": "US",
"username": user.username if user else "test_username",
"name": user.first_name if user else "test name",
"email": user.email if user else "test@test.com"
}
def _assert_existing_user_error(self, response):
"""Assert that the given response was an error with the given status_code and error code."""
self.assertEqual(response.status_code, 409)
errors = json.loads(response.content.decode('utf-8'))
for conflict_attribute in ["username", "email"]:
self.assertIn(conflict_attribute, errors)
self.assertIn("belongs to an existing account", errors[conflict_attribute][0]["user_message"])
def _assert_access_token_error(self, response, expected_error_message):
"""Assert that the given response was an error for the access_token field with the given error message."""
self.assertEqual(response.status_code, 400)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
"access_token": [{"user_message": expected_error_message}],
}
)
def _assert_third_party_session_expired_error(self, response, expected_error_message):
"""Assert that given response is an error due to third party session expiry"""
self.assertEqual(response.status_code, 400)
response_json = json.loads(response.content.decode('utf-8'))
self.assertDictEqual(
response_json,
{
"session_expired": [{"user_message": expected_error_message}],
}
)
def _verify_user_existence(self, user_exists, social_link_exists, user_is_active=None, username=None):
"""Verifies whether the user object exists."""
users = User.objects.filter(username=(username if username else "test_username"))
self.assertEqual(users.exists(), user_exists)
if user_exists:
self.assertEqual(users[0].is_active, user_is_active)
self.assertEqual(
UserSocialAuth.objects.filter(user=users[0], provider=self.BACKEND).exists(),
social_link_exists
)
else:
self.assertEqual(UserSocialAuth.objects.count(), 0)
def test_success(self):
self._verify_user_existence(user_exists=False, social_link_exists=False)
self._setup_provider_response(success=True)
response = self.client.post(self.url, self.data())
self.assertEqual(response.status_code, 200)
self._verify_user_existence(user_exists=True, social_link_exists=True, user_is_active=False)
def test_unlinked_active_user(self):
user = UserFactory()
response = self.client.post(self.url, self.data(user))
self._assert_existing_user_error(response)
self._verify_user_existence(
user_exists=True, social_link_exists=False, user_is_active=True, username=user.username
)
def test_unlinked_inactive_user(self):
user = UserFactory(is_active=False)
response = self.client.post(self.url, self.data(user))
self._assert_existing_user_error(response)
self._verify_user_existence(
user_exists=True, social_link_exists=False, user_is_active=False, username=user.username
)
def test_user_already_registered(self):
self._setup_provider_response(success=True)
user = UserFactory()
UserSocialAuth.objects.create(user=user, provider=self.BACKEND, uid=self.social_uid)
response = self.client.post(self.url, self.data(user))
self._assert_existing_user_error(response)
self._verify_user_existence(
user_exists=True, social_link_exists=True, user_is_active=True, username=user.username
)
def test_social_user_conflict(self):
self._setup_provider_response(success=True)
user = UserFactory()
UserSocialAuth.objects.create(user=user, provider=self.BACKEND, uid=self.social_uid)
response = self.client.post(self.url, self.data())
self._assert_access_token_error(response, "The provided access_token is already associated with another user.")
self._verify_user_existence(
user_exists=True, social_link_exists=True, user_is_active=True, username=user.username
)
def test_invalid_token(self):
self._setup_provider_response(success=False)
response = self.client.post(self.url, self.data())
self._assert_access_token_error(response, "The provided access_token is not valid.")
self._verify_user_existence(user_exists=False, social_link_exists=False)
def test_missing_token(self):
data = self.data()
data.pop("access_token")
response = self.client.post(self.url, data)
self._assert_access_token_error(
response,
u"An access_token is required when passing value ({}) for provider.".format(self.BACKEND)
)
self._verify_user_existence(user_exists=False, social_link_exists=False)
def test_expired_pipeline(self):
"""
Test that there is an error and account is not created
when request is made for account creation using third (Google, Facebook etc) party with pipeline
getting expired using browser (not mobile application).
NOTE: We are NOT using actual pipeline here so pipeline is always expired in this environment.
we don't have to explicitly expire pipeline.
"""
data = self.data()
# provider is sent along request when request is made from mobile application
data.pop("provider")
# to identify that request is made using browser
data.update({"social_auth_provider": "Google"})
response = self.client.post(self.url, data)
self._assert_third_party_session_expired_error(
response,
u"Registration using {provider} has timed out.".format(provider="Google")
)
self._verify_user_existence(user_exists=False, social_link_exists=False)
@skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
class TestFacebookRegistrationView(
ThirdPartyRegistrationTestMixin, ThirdPartyOAuthTestMixinFacebook, TransactionTestCase
):
"""Tests the User API registration endpoint with Facebook authentication."""
__test__ = True
def test_social_auth_exception(self):
"""
According to the do_auth method in social_core.backends.facebook.py,
the Facebook API sometimes responds back a JSON with just False as value.
"""
self._setup_provider_response_with_body(200, json.dumps("false"))
response = self.client.post(self.url, self.data())
self._assert_access_token_error(response, "The provided access_token is not valid.")
self._verify_user_existence(user_exists=False, social_link_exists=False)
@skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
class TestGoogleRegistrationView(
ThirdPartyRegistrationTestMixin, ThirdPartyOAuthTestMixinGoogle, TransactionTestCase
):
"""Tests the User API registration endpoint with Google authentication."""
__test__ = True
@ddt.ddt
class RegistrationValidationViewTests(test_utils.ApiTestCase):
"""
Tests for validity of user data in registration forms.
"""
endpoint_name = 'registration_validation'
path = reverse(endpoint_name)
def get_validation_decision(self, data):
response = self.client.post(self.path, data)
return response.data.get('validation_decisions', {})
def assertValidationDecision(self, data, decision):
self.assertEqual(
self.get_validation_decision(data),
decision
)
def assertNotValidationDecision(self, data, decision):
self.assertNotEqual(
self.get_validation_decision(data),
decision
)
def test_no_decision_for_empty_request(self):
self.assertValidationDecision(
{},
{}
)
def test_no_decision_for_invalid_request(self):
self.assertValidationDecision(
{'invalid_field': 'random_user_data'},
{}
)
@ddt.data(
['name', [name for name in testutils.VALID_NAMES]],
['email', [email for email in testutils.VALID_EMAILS]],
['password', [password for password in testutils.VALID_PASSWORDS]],
['username', [username for username in testutils.VALID_USERNAMES]],
['country', [country for country in testutils.VALID_COUNTRIES]]
)
@ddt.unpack
def test_positive_validation_decision(self, form_field_name, user_data):
"""
Test if {0} as any item in {1} gives a positive validation decision.
"""
self.assertValidationDecision(
{form_field_name: user_data},
{form_field_name: ''}
)
@ddt.data(
# Skip None type for invalidity checks.
['name', [name for name in testutils.INVALID_NAMES[1:]]],
['email', [email for email in testutils.INVALID_EMAILS[1:]]],
['password', [password for password in testutils.INVALID_PASSWORDS[1:]]],
['username', [username for username in testutils.INVALID_USERNAMES[1:]]],
['country', [country for country in testutils.INVALID_COUNTRIES[1:]]]
)
@ddt.unpack
def test_negative_validation_decision(self, form_field_name, user_data):
"""
Test if {0} as any item in {1} gives a negative validation decision.
"""
self.assertNotValidationDecision(
{form_field_name: user_data},
{form_field_name: ''}
)
@ddt.data(
['username', 'username@email.com'], # No conflict
['user', 'username@email.com'], # Username conflict
['username', 'user@email.com'], # Email conflict
['user', 'user@email.com'] # Both conflict
)
@ddt.unpack
def test_existence_conflict(self, username, email):
"""
Test if username '{0}' and email '{1}' have conflicts with
username 'user' and email 'user@email.com'.
"""
user = User.objects.create_user(username='user', email='user@email.com')
self.assertValidationDecision(
{
'username': username,
'email': email
},
{
# pylint: disable=no-member
"username": USERNAME_CONFLICT_MSG.format(
username=user.username
) if username == user.username else '',
# pylint: disable=no-member
"email": EMAIL_CONFLICT_MSG.format(
email_address=user.email
) if email == user.email else ''
}
)
@ddt.data('', ('e' * EMAIL_MAX_LENGTH) + '@email.com')
def test_email_bad_length_validation_decision(self, email):
self.assertValidationDecision(
{'email': email},
{'email': EMAIL_BAD_LENGTH_MSG}
)
def test_email_generically_invalid_validation_decision(self):
email = 'email'
self.assertValidationDecision(
{'email': email},
# pylint: disable=no-member
{'email': EMAIL_INVALID_MSG.format(email=email)}
)
def test_confirm_email_matches_email(self):
email = 'user@email.com'
self.assertValidationDecision(
{'email': email, 'confirm_email': email},
{'email': '', 'confirm_email': ''}
)
@ddt.data('', 'users@other.email')
def test_confirm_email_doesnt_equal_email(self, confirm_email):
self.assertValidationDecision(
{'email': 'user@email.com', 'confirm_email': confirm_email},
{'email': '', 'confirm_email': six.text_type(REQUIRED_FIELD_CONFIRM_EMAIL_MSG)}
)
@ddt.data(
'u' * (USERNAME_MIN_LENGTH - 1),
'u' * (USERNAME_MAX_LENGTH + 1)
)
def test_username_bad_length_validation_decision(self, username):
self.assertValidationDecision(
{'username': username},
{'username': six.text_type(USERNAME_BAD_LENGTH_MSG)}
)
@skipUnless(settings.FEATURES.get("ENABLE_UNICODE_USERNAME"), "Unicode usernames disabled.")
@ddt.data(*testutils.INVALID_USERNAMES_UNICODE)
def test_username_invalid_unicode_validation_decision(self, username):
self.assertValidationDecision(
{'username': username},
{'username': six.text_type(USERNAME_INVALID_CHARS_UNICODE)}
)
@skipIf(settings.FEATURES.get("ENABLE_UNICODE_USERNAME"), "Unicode usernames enabled.")
@ddt.data(*testutils.INVALID_USERNAMES_ASCII)
def test_username_invalid_ascii_validation_decision(self, username):
self.assertValidationDecision(
{'username': username},
{"username": six.text_type(USERNAME_INVALID_CHARS_ASCII)}
)
def test_password_empty_validation_decision(self):
# 2 is the default setting for minimum length found in lms/envs/common.py
# under AUTH_PASSWORD_VALIDATORS.MinimumLengthValidator
msg = u'This password is too short. It must contain at least 2 characters.'
self.assertValidationDecision(
{'password': ''},
{"password": msg}
)
def test_password_bad_min_length_validation_decision(self):
password = 'p'
# 2 is the default setting for minimum length found in lms/envs/common.py
# under AUTH_PASSWORD_VALIDATORS.MinimumLengthValidator
msg = u'This password is too short. It must contain at least 2 characters.'
self.assertValidationDecision(
{'password': password},
{"password": msg}
)
def test_password_bad_max_length_validation_decision(self):
password = 'p' * DEFAULT_MAX_PASSWORD_LENGTH
# 75 is the default setting for maximum length found in lms/envs/common.py
# under AUTH_PASSWORD_VALIDATORS.MaximumLengthValidator
msg = u'This password is too long. It must contain no more than 75 characters.'
self.assertValidationDecision(
{'password': password},
{"password": msg}
)
def test_password_equals_username_validation_decision(self):
self.assertValidationDecision(
{"username": "somephrase", "password": "somephrase"},
{"username": "", "password": u"The password is too similar to the username."}
)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'registration_proxy',
}
}
)
def test_rate_limiting_registration_view(self):
"""
Confirm rate limits work as expected for registration
end point /api/user/v1/validation/registration/. Note
that drf's rate limiting makes use of the default cache
to enforce limits; that's why this test needs a "real"
default cache (as opposed to the usual-for-tests DummyCache)
"""
for _ in range(RegistrationValidationThrottle().num_requests):
self.request_without_auth('post', self.path)
response = self.request_without_auth('post', self.path)
self.assertEqual(response.status_code, 429)
| agpl-3.0 |
sebdelsol/pyload | module/plugins/hoster/CzshareCom.py | 1 | 5871 | # -*- coding: utf-8 -*-
#
# Test links:
# http://czshare.com/5278880/random.bin
import re
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
from module.utils import parseFileSize
class CzshareCom(SimpleHoster):
__name__ = "CzshareCom"
__type__ = "hoster"
__version__ = "0.95"
__pattern__ = r'http://(?:www\.)?(czshare|sdilej)\.(com|cz)/(\d+/|download\.php\?).*'
__description__ = """CZshare.com hoster plugin, now Sdilej.cz"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
NAME_PATTERN = r'<div class="tab" id="parameters">\s*<p>\s*Cel. n.zev: <a href=[^>]*>(?P<N>[^<]+)</a>'
SIZE_PATTERN = r'<div class="tab" id="category">(?:\s*<p>[^\n]*</p>)*\s*Velikost:\s*(?P<S>[\d .,]+)(?P<U>[\w^_]+)\s*</div>'
OFFLINE_PATTERN = r'<div class="header clearfix">\s*<h2 class="red">'
SIZE_REPLACEMENTS = [(' ', '')]
URL_REPLACEMENTS = [(r'http://[^/]*/download.php\?.*?id=(\w+).*', r'http://sdilej.cz/\1/x/')]
FORCE_CHECK_TRAFFIC = True
FREE_URL_PATTERN = r'<a href="([^"]+)" class="page-download">[^>]*alt="([^"]+)" /></a>'
FREE_FORM_PATTERN = r'<form action="download\.php" method="post">\s*<img src="captcha\.php" id="captcha" />(.*?)</form>'
PREMIUM_FORM_PATTERN = r'<form action="/profi_down\.php" method="post">(.*?)</form>'
FORM_INPUT_PATTERN = r'<input[^>]* name="([^"]+)" value="([^"]+)"[^>]*/>'
MULTIDL_PATTERN = r'<p><font color=\'red\'>Z[^<]*PROFI.</font></p>'
USER_CREDIT_PATTERN = r'<div class="credit">\s*kredit: <strong>([\d .,]+)(\w+)</strong>\s*</div><!-- .credit -->'
def checkTrafficLeft(self):
# check if user logged in
m = re.search(self.USER_CREDIT_PATTERN, self.html)
if m is None:
self.account.relogin(self.user)
self.html = self.load(self.pyfile.url, cookies=True, decode=True)
m = re.search(self.USER_CREDIT_PATTERN, self.html)
if m is None:
return False
# check user credit
try:
credit = parseFileSize(m.group(1).replace(' ', ''), m.group(2))
self.logInfo(_("Premium download for %i KiB of Credit") % (self.pyfile.size / 1024))
self.logInfo(_("User %s has %i KiB left") % (self.user, credit / 1024))
if credit < self.pyfile.size:
self.logInfo(_("Not enough credit to download file: %s") % self.pyfile.name)
return False
except Exception, e:
# let's continue and see what happens...
self.logError(e)
return True
def handlePremium(self):
# parse download link
try:
form = re.search(self.PREMIUM_FORM_PATTERN, self.html, re.S).group(1)
inputs = dict(re.findall(self.FORM_INPUT_PATTERN, form))
except Exception, e:
self.logError(e)
self.resetAccount()
# download the file, destination is determined by pyLoad
self.download("http://sdilej.cz/profi_down.php", post=inputs, disposition=True)
self.checkDownloadedFile()
def handleFree(self):
# get free url
m = re.search(self.FREE_URL_PATTERN, self.html)
if m is None:
self.error(_("FREE_URL_PATTERN not found"))
parsed_url = "http://sdilej.cz" + m.group(1)
self.logDebug("PARSED_URL:" + parsed_url)
# get download ticket and parse html
self.html = self.load(parsed_url, cookies=True, decode=True)
if re.search(self.MULTIDL_PATTERN, self.html):
self.longWait(5 * 60, 12)
try:
form = re.search(self.FREE_FORM_PATTERN, self.html, re.S).group(1)
inputs = dict(re.findall(self.FORM_INPUT_PATTERN, form))
self.pyfile.size = int(inputs['size'])
except Exception, e:
self.logError(e)
self.error(_("Form"))
# get and decrypt captcha
captcha_url = 'http://sdilej.cz/captcha.php'
for _i in xrange(5):
inputs['captchastring2'] = self.decryptCaptcha(captcha_url)
self.html = self.load(parsed_url, cookies=True, post=inputs, decode=True)
if u"<li>Zadaný ověřovací kód nesouhlasí!</li>" in self.html:
self.invalidCaptcha()
elif re.search(self.MULTIDL_PATTERN, self.html):
self.longWait(5 * 60, 12)
else:
self.correctCaptcha()
break
else:
self.fail(_("No valid captcha code entered"))
m = re.search("countdown_number = (\d+);", self.html)
self.setWait(int(m.group(1)) if m else 50)
# download the file, destination is determined by pyLoad
self.logDebug("WAIT URL", self.req.lastEffectiveURL)
m = re.search("free_wait.php\?server=(.*?)&(.*)", self.req.lastEffectiveURL)
if m is None:
self.error(_("Download URL not found"))
url = "http://%s/download.php?%s" % (m.group(1), m.group(2))
self.wait()
self.download(url)
self.checkDownloadedFile()
def checkDownloadedFile(self):
# check download
check = self.checkDownload({
"temp_offline": re.compile(r"^Soubor je do.*asn.* nedostupn.*$"),
"credit": re.compile(r"^Nem.*te dostate.*n.* kredit.$"),
"multi_dl": re.compile(self.MULTIDL_PATTERN),
"captcha_err": "<li>Zadaný ověřovací kód nesouhlasí!</li>"
})
if check == "temp_offline":
self.fail(_("File not available - try later"))
if check == "credit":
self.resetAccount()
elif check == "multi_dl":
self.longWait(5 * 60, 12)
elif check == "captcha_err":
self.invalidCaptcha()
self.retry()
getInfo = create_getInfo(CzshareCom)
| gpl-3.0 |
chaffra/sympy | sympy/tensor/indexed.py | 5 | 21131 | """Module that defines indexed objects
The classes ``IndexedBase``, ``Indexed``, and ``Idx`` represent a
matrix element ``M[i, j]`` as in the following diagram::
1) The Indexed class represents the entire indexed object.
|
___|___
' '
M[i, j]
/ \__\______
| |
| |
| 2) The Idx class represents indices; each Idx can
| optionally contain information about its range.
|
3) IndexedBase represents the 'stem' of an indexed object, here `M`.
The stem used by itself is usually taken to represent the entire
array.
There can be any number of indices on an Indexed object. No
transformation properties are implemented in these Base objects, but
implicit contraction of repeated indices is supported.
Note that the support for complicated (i.e. non-atomic) integer
expressions as indices is limited. (This should be improved in
future releases.)
Examples
========
To express the above matrix element example you would write:
>>> from sympy import symbols, IndexedBase, Idx
>>> M = IndexedBase('M')
>>> i, j = symbols('i j', cls=Idx)
>>> M[i, j]
M[i, j]
Repeated indices in a product implies a summation, so to express a
matrix-vector product in terms of Indexed objects:
>>> x = IndexedBase('x')
>>> M[i, j]*x[j]
M[i, j]*x[j]
If the indexed objects will be converted to component based arrays, e.g.
with the code printers or the autowrap framework, you also need to provide
(symbolic or numerical) dimensions. This can be done by passing an
optional shape parameter to IndexedBase upon construction:
>>> dim1, dim2 = symbols('dim1 dim2', integer=True)
>>> A = IndexedBase('A', shape=(dim1, 2*dim1, dim2))
>>> A.shape
(dim1, 2*dim1, dim2)
>>> A[i, j, 3].shape
(dim1, 2*dim1, dim2)
If an IndexedBase object has no shape information, it is assumed that the
array is as large as the ranges of its indices:
>>> n, m = symbols('n m', integer=True)
>>> i = Idx('i', m)
>>> j = Idx('j', n)
>>> M[i, j].shape
(m, n)
>>> M[i, j].ranges
[(0, m - 1), (0, n - 1)]
The above can be compared with the following:
>>> A[i, 2, j].shape
(dim1, 2*dim1, dim2)
>>> A[i, 2, j].ranges
[(0, m - 1), None, (0, n - 1)]
To analyze the structure of indexed expressions, you can use the methods
get_indices() and get_contraction_structure():
>>> from sympy.tensor import get_indices, get_contraction_structure
>>> get_indices(A[i, j, j])
({i}, {})
>>> get_contraction_structure(A[i, j, j])
{(j,): {A[i, j, j]}}
See the appropriate docstrings for a detailed explanation of the output.
"""
# TODO: (some ideas for improvement)
#
# o test and guarantee numpy compatibility
# - implement full support for broadcasting
# - strided arrays
#
# o more functions to analyze indexed expressions
# - identify standard constructs, e.g matrix-vector product in a subexpression
#
# o functions to generate component based arrays (numpy and sympy.Matrix)
# - generate a single array directly from Indexed
# - convert simple sub-expressions
#
# o sophisticated indexing (possibly in subclasses to preserve simplicity)
# - Idx with range smaller than dimension of Indexed
# - Idx with stepsize != 1
# - Idx with step determined by function call
from __future__ import print_function, division
import collections
from sympy.core.sympify import _sympify
from sympy.functions.special.tensor_functions import KroneckerDelta
from sympy.core import Expr, Tuple, Symbol, sympify, S
from sympy.core.compatibility import is_sequence, string_types, NotIterable, range
class IndexException(Exception):
pass
class Indexed(Expr):
"""Represents a mathematical object with indices.
>>> from sympy import Indexed, IndexedBase, Idx, symbols
>>> i, j = symbols('i j', cls=Idx)
>>> Indexed('A', i, j)
A[i, j]
It is recommended that ``Indexed`` objects be created via ``IndexedBase``:
>>> A = IndexedBase('A')
>>> Indexed('A', i, j) == A[i, j]
True
"""
is_commutative = True
is_Indexed = True
is_Symbol = True
is_symbol = True
is_Atom = True
def __new__(cls, base, *args, **kw_args):
from sympy.utilities.misc import filldedent
from sympy.tensor.array.ndim_array import NDimArray
from sympy.matrices.matrices import MatrixBase
if not args:
raise IndexException("Indexed needs at least one index.")
if isinstance(base, (string_types, Symbol)):
base = IndexedBase(base)
elif not hasattr(base, '__getitem__') and not isinstance(base, IndexedBase):
raise TypeError(filldedent("""
Indexed expects string, Symbol, or IndexedBase as base."""))
args = list(map(sympify, args))
if isinstance(base, (NDimArray, collections.Iterable, Tuple, MatrixBase)) and all([i.is_number for i in args]):
return base[args]
return Expr.__new__(cls, base, *args, **kw_args)
@property
def _diff_wrt(self):
"""Allow derivatives with respect to an ``Indexed`` object."""
return True
def _eval_derivative(self, wrt):
from sympy.tensor.array.ndim_array import NDimArray
if isinstance(wrt, Indexed) and wrt.base == self.base:
if len(self.indices) != len(wrt.indices):
msg = "Different # of indices: d({!s})/d({!s})".format(self,
wrt)
raise IndexException(msg)
result = S.One
for index1, index2 in zip(self.indices, wrt.indices):
result *= KroneckerDelta(index1, index2)
return result
elif isinstance(self.base, NDimArray):
from sympy.tensor.array import derive_by_array
return Indexed(derive_by_array(self.base, wrt), *self.args[1:])
else:
if Tuple(self.indices).has(wrt):
return S.NaN
return S.Zero
@property
def base(self):
"""Returns the ``IndexedBase`` of the ``Indexed`` object.
Examples
========
>>> from sympy import Indexed, IndexedBase, Idx, symbols
>>> i, j = symbols('i j', cls=Idx)
>>> Indexed('A', i, j).base
A
>>> B = IndexedBase('B')
>>> B == B[i, j].base
True
"""
return self.args[0]
@property
def indices(self):
"""
Returns the indices of the ``Indexed`` object.
Examples
========
>>> from sympy import Indexed, Idx, symbols
>>> i, j = symbols('i j', cls=Idx)
>>> Indexed('A', i, j).indices
(i, j)
"""
return self.args[1:]
@property
def rank(self):
"""
Returns the rank of the ``Indexed`` object.
Examples
========
>>> from sympy import Indexed, Idx, symbols
>>> i, j, k, l, m = symbols('i:m', cls=Idx)
>>> Indexed('A', i, j).rank
2
>>> q = Indexed('A', i, j, k, l, m)
>>> q.rank
5
>>> q.rank == len(q.indices)
True
"""
return len(self.args) - 1
@property
def shape(self):
"""Returns a list with dimensions of each index.
Dimensions is a property of the array, not of the indices. Still, if
the ``IndexedBase`` does not define a shape attribute, it is assumed
that the ranges of the indices correspond to the shape of the array.
>>> from sympy import IndexedBase, Idx, symbols
>>> n, m = symbols('n m', integer=True)
>>> i = Idx('i', m)
>>> j = Idx('j', m)
>>> A = IndexedBase('A', shape=(n, n))
>>> B = IndexedBase('B')
>>> A[i, j].shape
(n, n)
>>> B[i, j].shape
(m, m)
"""
from sympy.utilities.misc import filldedent
if self.base.shape:
return self.base.shape
try:
return Tuple(*[i.upper - i.lower + 1 for i in self.indices])
except AttributeError:
raise IndexException(filldedent("""
Range is not defined for all indices in: %s""" % self))
except TypeError:
raise IndexException(filldedent("""
Shape cannot be inferred from Idx with
undefined range: %s""" % self))
@property
def ranges(self):
"""Returns a list of tuples with lower and upper range of each index.
If an index does not define the data members upper and lower, the
corresponding slot in the list contains ``None`` instead of a tuple.
Examples
========
>>> from sympy import Indexed,Idx, symbols
>>> Indexed('A', Idx('i', 2), Idx('j', 4), Idx('k', 8)).ranges
[(0, 1), (0, 3), (0, 7)]
>>> Indexed('A', Idx('i', 3), Idx('j', 3), Idx('k', 3)).ranges
[(0, 2), (0, 2), (0, 2)]
>>> x, y, z = symbols('x y z', integer=True)
>>> Indexed('A', x, y, z).ranges
[None, None, None]
"""
ranges = []
for i in self.indices:
try:
ranges.append(Tuple(i.lower, i.upper))
except AttributeError:
ranges.append(None)
return ranges
def _sympystr(self, p):
indices = list(map(p.doprint, self.indices))
return "%s[%s]" % (p.doprint(self.base), ", ".join(indices))
# @property
# def free_symbols(self):
# return {self.base}
class IndexedBase(Expr, NotIterable):
"""Represent the base or stem of an indexed object
The IndexedBase class represent an array that contains elements. The main purpose
of this class is to allow the convenient creation of objects of the Indexed
class. The __getitem__ method of IndexedBase returns an instance of
Indexed. Alone, without indices, the IndexedBase class can be used as a
notation for e.g. matrix equations, resembling what you could do with the
Symbol class. But, the IndexedBase class adds functionality that is not
available for Symbol instances:
- An IndexedBase object can optionally store shape information. This can
be used in to check array conformance and conditions for numpy
broadcasting. (TODO)
- An IndexedBase object implements syntactic sugar that allows easy symbolic
representation of array operations, using implicit summation of
repeated indices.
- The IndexedBase object symbolizes a mathematical structure equivalent
to arrays, and is recognized as such for code generation and automatic
compilation and wrapping.
>>> from sympy.tensor import IndexedBase, Idx
>>> from sympy import symbols
>>> A = IndexedBase('A'); A
A
>>> type(A)
<class 'sympy.tensor.indexed.IndexedBase'>
When an IndexedBase object receives indices, it returns an array with named
axes, represented by an Indexed object:
>>> i, j = symbols('i j', integer=True)
>>> A[i, j, 2]
A[i, j, 2]
>>> type(A[i, j, 2])
<class 'sympy.tensor.indexed.Indexed'>
The IndexedBase constructor takes an optional shape argument. If given,
it overrides any shape information in the indices. (But not the index
ranges!)
>>> m, n, o, p = symbols('m n o p', integer=True)
>>> i = Idx('i', m)
>>> j = Idx('j', n)
>>> A[i, j].shape
(m, n)
>>> B = IndexedBase('B', shape=(o, p))
>>> B[i, j].shape
(o, p)
"""
is_commutative = True
is_Symbol = True
is_symbol = True
is_Atom = True
def __new__(cls, label, shape=None, **kw_args):
if isinstance(label, string_types):
label = Symbol(label)
elif isinstance(label, Symbol):
pass
else:
label = _sympify(label)
if is_sequence(shape):
shape = Tuple(*shape)
elif shape is not None:
shape = Tuple(shape)
if shape is not None:
obj = Expr.__new__(cls, label, shape, **kw_args)
else:
obj = Expr.__new__(cls, label, **kw_args)
obj._shape = shape
return obj
def __getitem__(self, indices, **kw_args):
if is_sequence(indices):
# Special case needed because M[*my_tuple] is a syntax error.
if self.shape and len(self.shape) != len(indices):
raise IndexException("Rank mismatch.")
return Indexed(self, *indices, **kw_args)
else:
if self.shape and len(self.shape) != 1:
raise IndexException("Rank mismatch.")
return Indexed(self, indices, **kw_args)
@property
def shape(self):
"""Returns the shape of the ``IndexedBase`` object.
Examples
========
>>> from sympy import IndexedBase, Idx, Symbol
>>> from sympy.abc import x, y
>>> IndexedBase('A', shape=(x, y)).shape
(x, y)
Note: If the shape of the ``IndexedBase`` is specified, it will override
any shape information given by the indices.
>>> A = IndexedBase('A', shape=(x, y))
>>> B = IndexedBase('B')
>>> i = Idx('i', 2)
>>> j = Idx('j', 1)
>>> A[i, j].shape
(x, y)
>>> B[i, j].shape
(2, 1)
"""
return self._shape
@property
def label(self):
"""Returns the label of the ``IndexedBase`` object.
Examples
========
>>> from sympy import IndexedBase
>>> from sympy.abc import x, y
>>> IndexedBase('A', shape=(x, y)).label
A
"""
return self.args[0]
def _sympystr(self, p):
return p.doprint(self.label)
class Idx(Expr):
"""Represents an integer index as an ``Integer`` or integer expression.
There are a number of ways to create an ``Idx`` object. The constructor
takes two arguments:
``label``
An integer or a symbol that labels the index.
``range``
Optionally you can specify a range as either
* ``Symbol`` or integer: This is interpreted as a dimension. Lower and
upper bounds are set to ``0`` and ``range - 1``, respectively.
* ``tuple``: The two elements are interpreted as the lower and upper
bounds of the range, respectively.
Note: the ``Idx`` constructor is rather pedantic in that it only accepts
integer arguments. The only exception is that you can use ``-oo`` and
``oo`` to specify an unbounded range. For all other cases, both label and
bounds must be declared as integers, e.g. if ``n`` is given as an argument
then ``n.is_integer`` must return ``True``.
For convenience, if the label is given as a string it is automatically
converted to an integer symbol. (Note: this conversion is not done for
range or dimension arguments.)
Examples
========
>>> from sympy import IndexedBase, Idx, symbols, oo
>>> n, i, L, U = symbols('n i L U', integer=True)
If a string is given for the label an integer ``Symbol`` is created and the
bounds are both ``None``:
>>> idx = Idx('qwerty'); idx
qwerty
>>> idx.lower, idx.upper
(None, None)
Both upper and lower bounds can be specified:
>>> idx = Idx(i, (L, U)); idx
i
>>> idx.lower, idx.upper
(L, U)
When only a single bound is given it is interpreted as the dimension
and the lower bound defaults to 0:
>>> idx = Idx(i, n); idx.lower, idx.upper
(0, n - 1)
>>> idx = Idx(i, 4); idx.lower, idx.upper
(0, 3)
>>> idx = Idx(i, oo); idx.lower, idx.upper
(0, oo)
"""
is_integer = True
is_finite = True
is_real = True
is_Symbol = True
is_symbol = True
is_Atom = True
_diff_wrt = True
def __new__(cls, label, range=None, **kw_args):
from sympy.utilities.misc import filldedent
if isinstance(label, string_types):
label = Symbol(label, integer=True)
label, range = list(map(sympify, (label, range)))
if label.is_Number:
if not label.is_integer:
raise TypeError("Index is not an integer number.")
return label
if not label.is_integer:
raise TypeError("Idx object requires an integer label.")
elif is_sequence(range):
if len(range) != 2:
raise ValueError(filldedent("""
Idx range tuple must have length 2, but got %s""" % len(range)))
for bound in range:
if not (bound.is_integer or abs(bound) is S.Infinity):
raise TypeError("Idx object requires integer bounds.")
args = label, Tuple(*range)
elif isinstance(range, Expr):
if not (range.is_integer or range is S.Infinity):
raise TypeError("Idx object requires an integer dimension.")
args = label, Tuple(0, range - 1)
elif range:
raise TypeError(filldedent("""
The range must be an ordered iterable or
integer SymPy expression."""))
else:
args = label,
obj = Expr.__new__(cls, *args, **kw_args)
obj._assumptions["finite"] = True
obj._assumptions["real"] = True
return obj
@property
def label(self):
"""Returns the label (Integer or integer expression) of the Idx object.
Examples
========
>>> from sympy import Idx, Symbol
>>> x = Symbol('x', integer=True)
>>> Idx(x).label
x
>>> j = Symbol('j', integer=True)
>>> Idx(j).label
j
>>> Idx(j + 1).label
j + 1
"""
return self.args[0]
@property
def lower(self):
"""Returns the lower bound of the ``Idx``.
Examples
========
>>> from sympy import Idx
>>> Idx('j', 2).lower
0
>>> Idx('j', 5).lower
0
>>> Idx('j').lower is None
True
"""
try:
return self.args[1][0]
except IndexError:
return
@property
def upper(self):
"""Returns the upper bound of the ``Idx``.
Examples
========
>>> from sympy import Idx
>>> Idx('j', 2).upper
1
>>> Idx('j', 5).upper
4
>>> Idx('j').upper is None
True
"""
try:
return self.args[1][1]
except IndexError:
return
def _sympystr(self, p):
return p.doprint(self.label)
@property
def free_symbols(self):
return {self}
def __le__(self, other):
if isinstance(other, Idx):
other_upper = other if other.upper is None else other.upper
other_lower = other if other.lower is None else other.lower
else:
other_upper = other
other_lower = other
if self.upper is not None and (self.upper <= other_lower) == True:
return True
if self.lower is not None and (self.lower > other_upper) == True:
return False
return super(Idx, self).__le__(other)
def __ge__(self, other):
if isinstance(other, Idx):
other_upper = other if other.upper is None else other.upper
other_lower = other if other.lower is None else other.lower
else:
other_upper = other
other_lower = other
if self.lower is not None and (self.lower >= other_upper) == True:
return True
if self.upper is not None and (self.upper < other_lower) == True:
return False
return super(Idx, self).__ge__(other)
def __lt__(self, other):
if isinstance(other, Idx):
other_upper = other if other.upper is None else other.upper
other_lower = other if other.lower is None else other.lower
else:
other_upper = other
other_lower = other
if self.upper is not None and (self.upper < other_lower) == True:
return True
if self.lower is not None and (self.lower >= other_upper) == True:
return False
return super(Idx, self).__lt__(other)
def __gt__(self, other):
if isinstance(other, Idx):
other_upper = other if other.upper is None else other.upper
other_lower = other if other.lower is None else other.lower
else:
other_upper = other
other_lower = other
if self.lower is not None and (self.lower > other_upper) == True:
return True
if self.upper is not None and (self.upper <= other_lower) == True:
return False
return super(Idx, self).__gt__(other)
| bsd-3-clause |
guerrerocarlos/odoo | addons/auth_openid/res_users.py | 163 | 3778 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.modules.registry import RegistryManager
from openerp.osv import osv, fields
import openerp.exceptions
from openerp import tools
import utils
class res_users(osv.osv):
_inherit = 'res.users'
# TODO create helper fields for autofill openid_url and openid_email -> http://pad.openerp.com/web-openid
_columns = {
'openid_url': fields.char('OpenID URL', size=1024, copy=False),
'openid_email': fields.char('OpenID Email', size=256, copy=False,
help="Used for disambiguation in case of a shared OpenID URL"),
'openid_key': fields.char('OpenID Key', size=utils.KEY_LENGTH,
readonly=True, copy=False),
}
def _check_openid_url_email(self, cr, uid, ids, context=None):
return all(self.search_count(cr, uid, [('active', '=', True), ('openid_url', '=', u.openid_url), ('openid_email', '=', u.openid_email)]) == 1 \
for u in self.browse(cr, uid, ids, context) if u.active and u.openid_url)
def _check_openid_url_email_msg(self, cr, uid, ids, context):
return "There is already an active user with this OpenID Email for this OpenID URL"
_constraints = [
(_check_openid_url_email, lambda self, *a, **kw: self._check_openid_url_email_msg(*a, **kw), ['active', 'openid_url', 'openid_email']),
]
def _login(self, db, login, password):
result = super(res_users, self)._login(db, login, password)
if result:
return result
else:
with RegistryManager.get(db).cursor() as cr:
cr.execute("""UPDATE res_users
SET login_date=now() AT TIME ZONE 'UTC'
WHERE login=%s AND openid_key=%s AND active=%s RETURNING id""",
(tools.ustr(login), tools.ustr(password), True))
# beware: record cache may be invalid
res = cr.fetchone()
cr.commit()
return res[0] if res else False
def check(self, db, uid, passwd):
try:
return super(res_users, self).check(db, uid, passwd)
except openerp.exceptions.AccessDenied:
if not passwd:
raise
with RegistryManager.get(db).cursor() as cr:
cr.execute('''SELECT COUNT(1)
FROM res_users
WHERE id=%s
AND openid_key=%s
AND active=%s''',
(int(uid), passwd, True))
if not cr.fetchone()[0]:
raise
self._uid_cache.setdefault(db, {})[uid] = passwd
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
manuelli/director | src/python/director/planplayback.py | 1 | 7857 | import os
import vtkAll as vtk
import math
import time
import re
import numpy as np
from director.timercallback import TimerCallback
from director import objectmodel as om
from director.simpletimer import SimpleTimer
from director.utime import getUtime
from director import robotstate
import copy
import pickle
import scipy.interpolate
def asRobotPlan(msg):
'''
If the given message is a robot_plan_with_supports_t then this function returns
the plan message contained within it. For any other message type, this function
just returns its input argument.
'''
try:
import drc as lcmdrc
except ImportError:
pass
else:
if isinstance(msg, lcmdrc.robot_plan_with_supports_t):
return msg.plan
return msg
class PlanPlayback(object):
def __init__(self):
self.animationCallback = None
self.animationTimer = None
self.interpolationMethod = 'slinear'
self.playbackSpeed = 1.0
self.jointNameRegex = ''
@staticmethod
def getPlanPoses(msgOrList):
if isinstance(msgOrList, list):
messages = msgOrList
allPoseTimes, allPoses = PlanPlayback.getPlanPoses(messages[0])
for msg in messages[1:]:
poseTimes, poses = PlanPlayback.getPlanPoses(msg)
poseTimes += allPoseTimes[-1]
allPoseTimes = np.hstack((allPoseTimes, poseTimes[1:]))
allPoses += poses[1:]
return allPoseTimes, allPoses
else:
msg = asRobotPlan(msgOrList)
poses = []
poseTimes = []
for plan in msg.plan:
pose = robotstate.convertStateMessageToDrakePose(plan)
poseTimes.append(plan.utime / 1e6)
poses.append(pose)
return np.array(poseTimes), poses
@staticmethod
def getPlanElapsedTime(msg):
msg = asRobotPlan(msg)
startTime = msg.plan[0].utime
endTime = msg.plan[-1].utime
return (endTime - startTime) / 1e6
@staticmethod
def mergePlanMessages(plans):
msg = copy.deepcopy(plans[0])
for plan in plans[1:]:
plan = copy.deepcopy(plan)
lastTime = msg.plan[-1].utime
for state in plan.plan:
state.utime += lastTime
msg.plan_info += plan.plan_info
msg.plan += plan.plan
msg.num_states = len(msg.plan)
return msg
@staticmethod
def isPlanInfoFeasible(info):
return 0 <= info < 10
@staticmethod
def isPlanFeasible(plan):
plan = asRobotPlan(plan)
return plan is not None and (max(plan.plan_info) < 10 and min(plan.plan_info) >= 0)
def stopAnimation(self):
if self.animationTimer:
self.animationTimer.stop()
def setInterpolationMethod(method):
self.interpolationMethod = method
def playPlan(self, msg, jointController):
self.playPlans([msg], jointController)
def playPlans(self, messages, jointController):
assert len(messages)
poseTimes, poses = self.getPlanPoses(messages)
self.playPoses(poseTimes, poses, jointController)
def getPoseInterpolatorFromPlan(self, message):
poseTimes, poses = self.getPlanPoses(message)
return self.getPoseInterpolator(poseTimes, poses)
def getPoseInterpolator(self, poseTimes, poses, unwrap_rpy=True):
if unwrap_rpy:
poses = np.array(poses, copy=True)
poses[:,3:6] = np.unwrap(poses[:,3:6],axis=0)
if self.interpolationMethod in ['slinear', 'quadratic', 'cubic']:
f = scipy.interpolate.interp1d(poseTimes, poses, axis=0, kind=self.interpolationMethod)
elif self.interpolationMethod == 'pchip':
f = scipy.interpolate.PchipInterpolator(poseTimes, poses, axis=0)
return f
def getPlanPoseMeshes(self, messages, jointController, robotModel, numberOfSamples):
poseTimes, poses = self.getPlanPoses(messages)
f = self.getPoseInterpolator(poseTimes, poses)
sampleTimes = np.linspace(poseTimes[0], poseTimes[-1], numberOfSamples)
meshes = []
for sampleTime in sampleTimes:
pose = f(sampleTime)
jointController.setPose('plan_playback', pose)
polyData = vtk.vtkPolyData()
robotModel.model.getModelMesh(polyData)
meshes.append(polyData)
return meshes
def showPoseAtTime(self, time, jointController, poseInterpolator):
pose = poseInterpolator(time)
jointController.setPose('plan_playback', pose)
def playPoses(self, poseTimes, poses, jointController):
f = self.getPoseInterpolator(poseTimes, poses)
timer = SimpleTimer()
def updateAnimation():
tNow = timer.elapsed() * self.playbackSpeed
if tNow > poseTimes[-1]:
pose = poses[-1]
jointController.setPose('plan_playback', pose)
if self.animationCallback:
self.animationCallback()
return False
pose = f(tNow)
jointController.setPose('plan_playback', pose)
if self.animationCallback:
self.animationCallback()
self.animationTimer = TimerCallback()
self.animationTimer.targetFps = 60
self.animationTimer.callback = updateAnimation
self.animationTimer.start()
updateAnimation()
def picklePlan(self, filename, msg):
poseTimes, poses = self.getPlanPoses(msg)
pickle.dump((poseTimes, poses), open(filename, 'w'))
def getMovingJointNames(self, msg):
poseTimes, poses = self.getPlanPoses(msg)
diffs = np.diff(poses, axis=0)
jointIds = np.unique(np.where(diffs != 0.0)[1])
jointNames = [robotstate.getDrakePoseJointNames()[jointId] for jointId in jointIds]
return jointNames
def plotPlan(self, msg):
poseTimes, poses = self.getPlanPoses(msg)
self.plotPoses(poseTimes, poses)
def plotPoses(self, poseTimes, poses):
import matplotlib.pyplot as plt
poses = np.array(poses)
if self.jointNameRegex:
jointIds = range(poses.shape[1])
else:
diffs = np.diff(poses, axis=0)
jointIds = np.unique(np.where(diffs != 0.0)[1])
jointNames = [robotstate.getDrakePoseJointNames()[jointId] for jointId in jointIds]
jointTrajectories = [poses[:,jointId] for jointId in jointIds]
seriesNames = []
sampleResolutionInSeconds = 0.01
numberOfSamples = (poseTimes[-1] - poseTimes[0]) / sampleResolutionInSeconds
xnew = np.linspace(poseTimes[0], poseTimes[-1], numberOfSamples)
fig = plt.figure()
ax = fig.add_subplot(111)
for jointId, jointName, jointTrajectory in zip(jointIds, jointNames, jointTrajectories):
if self.jointNameRegex and not re.match(self.jointNameRegex, jointName):
continue
x = poseTimes
y = jointTrajectory
y = np.rad2deg(y)
if self.interpolationMethod in ['slinear', 'quadratic', 'cubic']:
f = scipy.interpolate.interp1d(x, y, kind=self.interpolationMethod)
elif self.interpolationMethod == 'pchip':
f = scipy.interpolate.PchipInterpolator(x, y)
ax.plot(x, y, 'ko')
seriesNames.append(jointName + ' points')
ax.plot(xnew, f(xnew), '-')
seriesNames.append(jointName + ' ' + self.interpolationMethod)
ax.legend(seriesNames, loc='upper right').draggable()
ax.set_xlabel('time (s)')
ax.set_ylabel('joint angle (deg)')
ax.set_title('joint trajectories')
plt.show()
| bsd-3-clause |
Neil741/ryu-master | ryu/services/protocols/bgp/net_ctrl.py | 8 | 14584 | # Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Network Controller interface to BGP.
Network controller w.r.t BGPS for APGW Automation project is named as APGW
Agent and Route Server.
"""
import logging
import socket
import traceback
from ryu.services.protocols.bgp import api
from ryu.services.protocols.bgp.api.base import ApiException
from ryu.services.protocols.bgp.api.base import NEXT_HOP
from ryu.services.protocols.bgp.api.base import ORIGIN_RD
from ryu.services.protocols.bgp.api.base import PREFIX
from ryu.services.protocols.bgp.api.base import ROUTE_DISTINGUISHER
from ryu.services.protocols.bgp.api.base import VPN_LABEL
from ryu.services.protocols.bgp.base import Activity
from ryu.services.protocols.bgp.base import add_bgp_error_metadata
from ryu.services.protocols.bgp.base import BGPSException
from ryu.services.protocols.bgp.base import FlexinetPeer
from ryu.services.protocols.bgp.base import NET_CTRL_ERROR_CODE
from ryu.services.protocols.bgp.constants import VRF_TABLE
from ryu.services.protocols.bgp.rtconf.vrfs import VRF_RF
from ryu.services.protocols.bgp.rtconf.vrfs import VrfConf
from ryu.services.protocols.bgp.utils.validation import is_valid_ipv4
# Logger instance for this module.
LOG = logging.getLogger('bgpspeaker.net_ctrl')
# Network controller service socket constants.
NC_RPC_BIND_IP = 'apgw_rpc_bind_ip'
NC_RPC_BIND_PORT = 'apgw_rpc_bind_port'
# Notification symbols
NOTF_ADD_REMOTE_PREFX = 'prefix.add_remote'
NOTF_DELETE_REMOTE_PREFX = 'prefix.delete_remote'
NOTF_ADD_LOCAL_PREFX = 'prefix.add_local'
NOTF_DELETE_LOCAL_PREFX = 'prefix.delete_local'
NOTF_LOG = 'logging'
# MessagePackRPC message type constants
RPC_MSG_REQUEST = 0
RPC_MSG_RESPONSE = 1
RPC_MSG_NOTIFY = 2
#
# Indexes for various RPC message types.
#
RPC_IDX_MSG_TYP = 0
RPC_IDX_MSG_ID = 1
RPC_IDX_REQ_SYM = 2
RPC_IDX_REQ_PARAM = 3
RPC_IDX_RES_ERR = 2
RPC_IDX_RES_RST = 3
RPC_IDX_NTF_SYM = 1
RPC_IDX_NTF_PARAM = 2
# RPC socket receive buffer size in bytes.
RPC_SOCK_BUFF_SIZE = 4096
@add_bgp_error_metadata(code=NET_CTRL_ERROR_CODE,
sub_code=1,
def_desc='Unknown Network controller exception')
class NetworkControllerError(BGPSException):
"""Common base class for exceptions related to RPC calls.
"""
pass
class RpcSession(Activity):
"""Provides message-pack RPC abstraction for one session.
It contains message-pack packer, un-packer, message ID sequence
and utilities that use these. It also cares about socket communication w/
RPC peer.
"""
def __init__(self, socket, outgoing_msg_sink_iter):
super(RpcSession, self).__init__("RpcSession(%s)" % socket)
import msgpack
self._packer = msgpack.Packer()
self._unpacker = msgpack.Unpacker()
self._next_msgid = 0
self._socket = socket
self._outgoing_msg_sink_iter = outgoing_msg_sink_iter
def stop(self):
super(RpcSession, self).stop()
LOG.critical(
'RPC Session to %s stopped', str(self._socket.getpeername())
)
def _run(self):
# Process outgoing messages in new thread.
green_out = self._spawn('net_ctrl._process_outgoing',
self._process_outgoing_msg,
self._outgoing_msg_sink_iter)
# Process incoming messages in new thread.
green_in = self._spawn('net_ctrl._process_incoming',
self._process_incoming_msgs)
LOG.critical(
'RPC Session to %s started', str(self._socket.getpeername())
)
green_in.wait()
green_out.wait()
def _next_msg_id(self):
this_id = self._next_msgid
self._next_msgid += 1
return this_id
def create_request(self, method, params):
msgid = self._next_msg_id()
return self._packer.pack([RPC_MSG_REQUEST, msgid, method, params])
def create_error_response(self, msgid, error):
if error is None:
raise NetworkControllerError(desc='Creating error without body!')
return self._packer.pack([RPC_MSG_RESPONSE, msgid, error, None])
def create_success_response(self, msgid, result):
if result is None:
raise NetworkControllerError(desc='Creating response without '
'body!')
return self._packer.pack([RPC_MSG_RESPONSE, msgid, None, result])
def create_notification(self, method, params):
return self._packer.pack([RPC_MSG_NOTIFY, method, params])
def feed_and_get_messages(self, data):
self._unpacker.feed(data)
messages = []
for msg in self._unpacker:
messages.append(msg)
return messages
def feed_and_get_first_message(self, data):
self._unpacker.feed(data)
for msg in self._unpacker:
return msg
def send_notification(self, method, params):
rpc_msg = self.create_notification(method, params)
return self._sendall(rpc_msg)
def _process_incoming_msgs(self):
LOG.debug('NetworkController started processing incoming messages')
assert self._socket
while True:
# Wait for request/response/notification from peer.
msg_buff = self._recv()
if len(msg_buff) == 0:
LOG.info('Peer %r disconnected.' % self._socket)
break
messages = self.feed_and_get_messages(msg_buff)
for msg in messages:
if msg[0] == RPC_MSG_REQUEST:
try:
result = _handle_request(msg)
_send_success_response(self, self._socket, msg, result)
except BGPSException as e:
_send_error_response(self, self._socket, msg,
e.message)
elif msg[0] == RPC_MSG_RESPONSE:
_handle_response(msg)
elif msg[0] == RPC_MSG_NOTIFY:
_handle_notification(msg)
else:
LOG.error('Invalid message type: %r' % msg)
self.pause(0)
def _process_outgoing_msg(self, sink_iter):
"""For every message we construct a corresponding RPC message to be
sent over the given socket inside given RPC session.
This function should be launched in a new green thread as
it loops forever.
"""
LOG.debug('NetworkController processing outgoing request list.')
# TODO(Team): handle un-expected exception breaking the loop in
# graceful manner. Discuss this with other component developers.
# TODO(PH): We should try not to sent routes from bgp peer that is not
# in established state.
from ryu.services.protocols.bgp.model import \
FlexinetOutgoingRoute
while True:
# sink iter is Sink instance and next is blocking so this isn't
# active wait.
for outgoing_msg in sink_iter:
if isinstance(outgoing_msg, FlexinetOutgoingRoute):
rpc_msg = _create_prefix_notif(outgoing_msg, self)
else:
raise NotImplementedError(
'Do not handle out going message'
' of type %s' %
outgoing_msg.__class__)
if rpc_msg:
self._sendall(rpc_msg)
self.pause(0)
def _recv(self):
return self._sock_wrap(self._socket.recv)(RPC_SOCK_BUFF_SIZE)
def _sendall(self, msg):
return self._sock_wrap(self._socket.sendall)(msg)
def _sock_wrap(self, func):
def wrapper(*args, **kwargs):
try:
ret = func(*args, **kwargs)
except socket.error:
LOG.error(traceback.format_exc())
self._socket_error()
return
return ret
return wrapper
def _socket_error(self):
if self.started:
self.stop()
def _create_prefix_notif(outgoing_msg, rpc_session):
"""Constructs prefix notification with data from given outgoing message.
Given RPC session is used to create RPC notification message.
"""
assert(outgoing_msg)
path = outgoing_msg.path
assert(path)
vpn_nlri = path.nlri
rpc_msg = None
assert path.source is not None
if path.source != VRF_TABLE:
# Extract relevant info for update-add/update-delete.
params = [{ROUTE_DISTINGUISHER: outgoing_msg.route_dist,
PREFIX: vpn_nlri.prefix,
NEXT_HOP: path.nexthop,
VPN_LABEL: path.label_list[0],
VRF_RF: VrfConf.rf_2_vrf_rf(path.route_family)}]
if not path.is_withdraw:
# Create notification to NetworkController.
rpc_msg = rpc_session.create_notification(NOTF_ADD_REMOTE_PREFX,
params)
else:
# Create update-delete request to NetworkController.`
rpc_msg = rpc_session.create_notification(NOTF_DELETE_REMOTE_PREFX,
params)
else:
# Extract relevant info for update-add/update-delete.
params = [{ROUTE_DISTINGUISHER: outgoing_msg.route_dist,
PREFIX: vpn_nlri.prefix,
NEXT_HOP: path.nexthop,
VRF_RF: VrfConf.rf_2_vrf_rf(path.route_family),
ORIGIN_RD: path.origin_rd}]
if not path.is_withdraw:
# Create notification to NetworkController.
rpc_msg = rpc_session.create_notification(NOTF_ADD_LOCAL_PREFX,
params)
else:
# Create update-delete request to NetworkController.`
rpc_msg = rpc_session.create_notification(NOTF_DELETE_LOCAL_PREFX,
params)
return rpc_msg
def _validate_rpc_ip(rpc_server_ip):
"""Validates given ip for use as rpc host bind address.
"""
if not is_valid_ipv4(rpc_server_ip):
raise NetworkControllerError(desc='Invalid rpc ip address.')
return rpc_server_ip
def _validate_rpc_port(port):
"""Validates give port for use as rpc server port.
"""
if not port:
raise NetworkControllerError(desc='Invalid rpc port number.')
if not isinstance(port, (int, long)) and isinstance(port, str):
port = int(port)
if port <= 0:
raise NetworkControllerError(desc='Invalid rpc port number %s' % port)
return port
class _NetworkController(FlexinetPeer, Activity):
"""Network controller peer.
Provides MessagePackRPC interface for flexinet peers like Network
controller to peer and have RPC session with BGPS process. This RPC
interface provides access to BGPS API.
"""
def __init__(self):
FlexinetPeer.__init__(self)
Activity.__init__(self, name='NETWORK_CONTROLLER')
# Outstanding requests, i.e. requests for which we are yet to receive
# response from peer. We currently do not have any requests going out.
self._outstanding_reqs = {}
self._rpc_session = None
def _run(self, *args, **kwargs):
"""Runs RPC server.
Wait for peer to connect and start rpc session with it.
For every connection we start and new rpc session.
"""
apgw_rpc_bind_ip = _validate_rpc_ip(kwargs.pop(NC_RPC_BIND_IP))
apgw_rpc_bind_port = _validate_rpc_port(kwargs.pop(NC_RPC_BIND_PORT))
sock_addr = (apgw_rpc_bind_ip, apgw_rpc_bind_port)
LOG.debug('NetworkController started listening for connections...')
server_thread, socket = self._listen_tcp(sock_addr,
self._start_rpc_session)
self.pause(0)
server_thread.wait()
def _start_rpc_session(self, socket):
"""Starts a new RPC session with given connection.
"""
if self._rpc_session and self._rpc_session.started:
self._rpc_session.stop()
self._rpc_session = RpcSession(socket, self)
self._rpc_session.start()
def send_rpc_notification(self, method, params):
if (self.started and self._rpc_session is not None and
self._rpc_session.started):
return self._rpc_session.send_notification(method, params)
def _handle_response(response):
raise NotImplementedError('BGPS is not making any request hence should not'
' get any response. Response: %s' % response)
def _handle_notification(notification):
LOG.debug('Notification from NetworkController<<: %s %s' %
(notification[RPC_IDX_NTF_SYM], notification[RPC_IDX_NTF_PARAM]))
operation, params = notification[1], notification[2]
return api.base.call(operation, **params[0])
def _handle_request(request):
LOG.debug('Request from NetworkController<<: %s %s' %
(request[RPC_IDX_REQ_SYM], request[RPC_IDX_REQ_PARAM]))
operation, params = request[2], request[3]
kwargs = {}
if len(params) > 0:
kwargs = params[0]
try:
return api.base.call(operation, **kwargs)
except TypeError:
LOG.error(traceback.format_exc())
raise ApiException(desc='Invalid type for RPC parameter.')
def _send_success_response(rpc_session, socket, request, result):
response = rpc_session.create_success_response(request[RPC_IDX_MSG_ID],
result)
socket.sendall(response)
def _send_error_response(rpc_session, socket, request, emsg):
response = rpc_session.create_error_response(request[RPC_IDX_MSG_ID],
str(emsg))
socket.sendall(response)
# Network controller singleton
NET_CONTROLLER = _NetworkController()
| apache-2.0 |
jrha/release | src/documentation_builder/lib/quattordocbuild/builder.py | 2 | 5872 | """Build documentation from quattor sources."""
import os
import sys
import re
import codecs
from multiprocessing import Pool
from vsc.utils import fancylogger
from sourcehandler import get_source_files
from rsthandler import generate_rst_from_repository
from config import build_repository_map
logger = fancylogger.getLogger()
RESULTS = []
def build_documentation(repository_location, output_location, singlet=False):
"""Build the whole documentation from quattor repositories."""
if not check_input(repository_location, output_location):
sys.exit(1)
if not check_commands():
sys.exit(1)
repository_map = build_repository_map(repository_location)
if not repository_map:
sys.exit(1)
if singlet:
for repository in repository_map:
repository = build_docs(repository)
RESULTS.append(repository)
else:
pool = Pool()
for repository in repository_map:
logger.debug("Starting worker for %s.", repository.name)
pool.apply_async(build_docs, args=(repository, ), callback=log_result)
pool.close()
pool.join()
site_pages = build_site_structure(RESULTS)
# site_pages = make_interlinks(site_pages) # disabled for now
write_site(site_pages, output_location, "docs")
return True
def log_result(repository):
"""Catch results given by subprocesses."""
logger.info('Received %s from worker.', repository.name)
RESULTS.append(repository)
def build_docs(repository):
logger.info("Building documentation for %s.", repository.name)
logger.debug(repository)
repository = get_source_files(repository)
logger.debug("Repository: %s", repository)
repository = generate_rst_from_repository(repository)
return repository
def which(command):
"""Check if given command is available for the current user on this system."""
found = False
for direct in os.getenv("PATH").split(':'):
if os.path.exists(os.path.join(direct, command)):
found = True
return found
def check_input(sourceloc, outputloc):
"""Check input and locations."""
logger.info("Checking if the given paths exist.")
if not sourceloc:
logger.error("Repo location not specified.")
return False
if not outputloc:
logger.error("output location not specified")
return False
if not os.path.exists(sourceloc):
logger.error("Repo location %s does not exist", sourceloc)
return False
if not os.path.exists(outputloc):
logger.error("Output location %s does not exist", outputloc)
return False
if not os.listdir(outputloc) == []:
logger.error("Output location %s is not empty.", outputloc)
return False
return True
def check_commands():
"""Check required binaries."""
if not which("mvn"):
logger.error("The command mvn is not available on this system, please install maven.")
return False
if not which("pod2rst"):
logger.error("The command pod2rst is not available on this system, please install pod2rst.")
return False
return True
def build_site_structure(repository_map):
"""Make a mapping of files with their new names for the website."""
sitepages = {}
for repo in repository_map:
sitepages[repo.sitesection] = {}
for sourcepage in repo.sources:
filename = '%s.rst' % sourcepage.title
filename = filename.replace('\::', '_')
filename = filename.replace(' - ', '_')
logger.debug("filename will be: %s", filename)
sitepages[repo.sitesection][filename] = sourcepage.rstcontent
logger.debug("sitepages: %s", sitepages)
return sitepages
def make_interlinks(pages):
"""Make links in the content based on pagenames."""
logger.info("Creating interlinks.")
newpages = pages
for subdir in pages:
for page in pages[subdir]:
basename = os.path.splitext(page)[0]
link = '../%s/%s' % (subdir, page)
regxs = []
regxs.append("`%s`" % basename)
regxs.append("`%s::%s`" % (subdir, basename))
cpans = "https://metacpan.org/pod/"
if subdir == 'CCM':
regxs.append(r"\[{2}::{0}\]\({1}{2}::{0}\)".format(basename, cpans, "EDG::WP4::CCM"))
if subdir == 'Unittest':
regxs.append(r"\[{2}::{0}\]\({1}{2}::{0}\)".format(basename, cpans, "Test"))
if subdir in ['components', 'components-grid']:
regxs.append(r"\[{2}::{0}\]\({1}{2}::{0}\)".format(basename, cpans, "NCM::Component"))
regxs.append(r"`ncm-%s`" % basename)
regxs.append(r"ncm-%s" % basename)
for regex in regxs:
newpages = replace_regex_link(newpages, regex, basename, link)
return newpages
def replace_regex_link(pages, regex, basename, link):
"""Replace links in a bunch of pages based on a regex."""
regex = r'( |^|\n)%s([,. $])' % regex
for subdir in pages:
for page in pages[subdir]:
content = pages[subdir][page]
if (basename not in page or basename == "Quattor") and basename in content:
content = re.sub(regex, r"\g<1>[%s](%s)\g<2>" % (basename, link), content)
pages[subdir][page] = content
return pages
def write_site(sitepages, location, docsdir):
"""Write the pages for the website to disk."""
for subdir, pages in sitepages.iteritems():
fullsubdir = os.path.join(location, docsdir, subdir)
if not os.path.exists(fullsubdir):
os.makedirs(fullsubdir)
for pagename, content in pages.iteritems():
with codecs.open(os.path.join(fullsubdir, pagename), 'w', encoding='utf-8') as fih:
fih.write(content)
| apache-2.0 |
nelango/ViralityAnalysis | model/lib/pandas/tests/test_msgpack/test_except.py | 15 | 1043 | #!/usr/bin/env python
# coding: utf-8
import unittest
import nose
import datetime
from pandas.msgpack import packb, unpackb
class DummyException(Exception):
pass
class TestExceptions(unittest.TestCase):
def test_raise_on_find_unsupported_value(self):
import datetime
self.assertRaises(TypeError, packb, datetime.datetime.now())
def test_raise_from_object_hook(self):
def hook(obj):
raise DummyException
self.assertRaises(DummyException, unpackb, packb({}), object_hook=hook)
self.assertRaises(DummyException, unpackb, packb({'fizz': 'buzz'}), object_hook=hook)
self.assertRaises(DummyException, unpackb, packb({'fizz': 'buzz'}), object_pairs_hook=hook)
self.assertRaises(DummyException, unpackb, packb({'fizz': {'buzz': 'spam'}}), object_hook=hook)
self.assertRaises(DummyException, unpackb, packb({'fizz': {'buzz': 'spam'}}), object_pairs_hook=hook)
def test_invalidvalue(self):
self.assertRaises(ValueError, unpackb, b'\xd9\x97#DL_')
| mit |
chandolia/python-social-auth | social/tests/backends/test_steam.py | 80 | 5412 | import json
import datetime
from httpretty import HTTPretty
from social.p3 import urlencode
from social.exceptions import AuthFailed
from social.tests.backends.open_id import OpenIdTest
INFO_URL = 'http://api.steampowered.com/ISteamUser/GetPlayerSummaries/v0002/?'
JANRAIN_NONCE = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')
class SteamOpenIdTest(OpenIdTest):
backend_path = 'social.backends.steam.SteamOpenId'
expected_username = 'foobar'
discovery_body = ''.join([
'<?xml version="1.0" encoding="UTF-8"?>',
'<xrds:XRDS xmlns:xrds="xri://$xrds" xmlns="xri://$xrd*($v*2.0)">',
'<XRD>',
'<Service priority="0">',
'<Type>http://specs.openid.net/auth/2.0/server</Type>',
'<URI>https://steamcommunity.com/openid/login</URI>',
'</Service>',
'</XRD>',
'</xrds:XRDS>'
])
user_discovery_body = ''.join([
'<?xml version="1.0" encoding="UTF-8"?>',
'<xrds:XRDS xmlns:xrds="xri://$xrds" xmlns="xri://$xrd*($v*2.0)">',
'<XRD>',
'<Service priority="0">',
'<Type>http://specs.openid.net/auth/2.0/signon</Type> ',
'<URI>https://steamcommunity.com/openid/login</URI>',
'</Service>',
'</XRD>',
'</xrds:XRDS>'
])
server_response = urlencode({
'janrain_nonce': JANRAIN_NONCE,
'openid.ns': 'http://specs.openid.net/auth/2.0',
'openid.mode': 'id_res',
'openid.op_endpoint': 'https://steamcommunity.com/openid/login',
'openid.claimed_id': 'https://steamcommunity.com/openid/id/123',
'openid.identity': 'https://steamcommunity.com/openid/id/123',
'openid.return_to': 'http://myapp.com/complete/steam/?'
'janrain_nonce=' + JANRAIN_NONCE,
'openid.response_nonce':
JANRAIN_NONCE + 'oD4UZ3w9chOAiQXk0AqDipqFYRA=',
'openid.assoc_handle': '1234567890',
'openid.signed': 'signed,op_endpoint,claimed_id,identity,return_to,'
'response_nonce,assoc_handle',
'openid.sig': '1az53vj9SVdiBwhk8%2BFQ68R2plo=',
})
player_details = json.dumps({
'response': {
'players': [{
'steamid': '123',
'primaryclanid': '1234',
'timecreated': 1360768416,
'personaname': 'foobar',
'personastate': 0,
'communityvisibilitystate': 3,
'profileurl': 'http://steamcommunity.com/profiles/123/',
'avatar': 'http://media.steampowered.com/steamcommunity/'
'public/images/avatars/fe/fef49e7fa7e1997310d7'
'05b2a6158ff8dc1cdfeb.jpg',
'avatarfull': 'http://media.steampowered.com/steamcommunity/'
'public/images/avatars/fe/fef49e7fa7e1997310d7'
'05b2a6158ff8dc1cdfeb_full.jpg',
'avatarmedium': 'http://media.steampowered.com/steamcommunity/'
'public/images/avatars/fe/fef49e7fa7e1997310d7'
'05b2a6158ff8dc1cdfeb_medium.jpg',
'lastlogoff': 1360790014
}]
}
})
def _login_setup(self, user_url=None):
self.strategy.set_settings({
'SOCIAL_AUTH_STEAM_API_KEY': '123abc'
})
HTTPretty.register_uri(HTTPretty.POST,
'https://steamcommunity.com/openid/login',
status=200,
body=self.server_response)
HTTPretty.register_uri(
HTTPretty.GET,
user_url or 'https://steamcommunity.com/openid/id/123',
status=200,
body=self.user_discovery_body
)
HTTPretty.register_uri(HTTPretty.GET,
INFO_URL,
status=200,
body=self.player_details)
def test_login(self):
self._login_setup()
self.do_login()
def test_partial_pipeline(self):
self._login_setup()
self.do_partial_pipeline()
class SteamOpenIdMissingSteamIdTest(SteamOpenIdTest):
server_response = urlencode({
'janrain_nonce': JANRAIN_NONCE,
'openid.ns': 'http://specs.openid.net/auth/2.0',
'openid.mode': 'id_res',
'openid.op_endpoint': 'https://steamcommunity.com/openid/login',
'openid.claimed_id': 'https://steamcommunity.com/openid/BROKEN',
'openid.identity': 'https://steamcommunity.com/openid/BROKEN',
'openid.return_to': 'http://myapp.com/complete/steam/?'
'janrain_nonce=' + JANRAIN_NONCE,
'openid.response_nonce':
JANRAIN_NONCE + 'oD4UZ3w9chOAiQXk0AqDipqFYRA=',
'openid.assoc_handle': '1234567890',
'openid.signed': 'signed,op_endpoint,claimed_id,identity,return_to,'
'response_nonce,assoc_handle',
'openid.sig': '1az53vj9SVdiBwhk8%2BFQ68R2plo=',
})
def test_login(self):
self._login_setup(user_url='https://steamcommunity.com/openid/BROKEN')
with self.assertRaises(AuthFailed):
self.do_login()
def test_partial_pipeline(self):
self._login_setup(user_url='https://steamcommunity.com/openid/BROKEN')
with self.assertRaises(AuthFailed):
self.do_partial_pipeline()
| bsd-3-clause |
40223136/w11-2 | static/Brython3.1.0-20150301-090019/Lib/site-packages/pygame/sprite.py | 603 | 55779 | ## pygame - Python Game Library
## Copyright (C) 2000-2003, 2007 Pete Shinners
## (C) 2004 Joe Wreschnig
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Library General Public
## License as published by the Free Software Foundation; either
## version 2 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Library General Public License for more details.
##
## You should have received a copy of the GNU Library General Public
## License along with this library; if not, write to the Free
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##
## Pete Shinners
## pete@shinners.org
"""pygame module with basic game object classes
This module contains several simple classes to be used within games. There
are the main Sprite class and several Group classes that contain Sprites.
The use of these classes is entirely optional when using Pygame. The classes
are fairly lightweight and only provide a starting place for the code
that is common to most games.
The Sprite class is intended to be used as a base class for the different
types of objects in the game. There is also a base Group class that simply
stores sprites. A game could create new types of Group classes that operate
on specially customized Sprite instances they contain.
The basic Sprite class can draw the Sprites it contains to a Surface. The
Group.draw() method requires that each Sprite have a Surface.image attribute
and a Surface.rect. The Group.clear() method requires these same attributes
and can be used to erase all the Sprites with background. There are also
more advanced Groups: pygame.sprite.RenderUpdates() and
pygame.sprite.OrderedUpdates().
Lastly, this module contains several collision functions. These help find
sprites inside multiple groups that have intersecting bounding rectangles.
To find the collisions, the Sprites are required to have a Surface.rect
attribute assigned.
The groups are designed for high efficiency in removing and adding Sprites
to them. They also allow cheap testing to see if a Sprite already exists in
a Group. A given Sprite can exist in any number of groups. A game could use
some groups to control object rendering, and a completely separate set of
groups to control interaction or player movement. Instead of adding type
attributes or bools to a derived Sprite class, consider keeping the
Sprites inside organized Groups. This will allow for easier lookup later
in the game.
Sprites and Groups manage their relationships with the add() and remove()
methods. These methods can accept a single or multiple group arguments for
membership. The default initializers for these classes also take a
single group or list of groups as argments for initial membership. It is safe
to repeatedly add and remove the same Sprite from a Group.
While it is possible to design sprite and group classes that don't derive
from the Sprite and AbstractGroup classes below, it is strongly recommended
that you extend those when you create a new Sprite or Group class.
Sprites are not thread safe, so lock them yourself if using threads.
"""
##todo
## a group that holds only the 'n' most recent elements.
## sort of like the GroupSingle class, but holding more
## than one sprite
##
## drawing groups that can 'automatically' store the area
## underneath so they can "clear" without needing a background
## function. obviously a little slower than normal, but nice
## to use in many situations. (also remember it must "clear"
## in the reverse order that it draws :])
##
## the drawing groups should also be able to take a background
## function, instead of just a background surface. the function
## would take a surface and a rectangle on that surface to erase.
##
## perhaps more types of collision functions? the current two
## should handle just about every need, but perhaps more optimized
## specific ones that aren't quite so general but fit into common
## specialized cases.
import pygame
from pygame.rect import Rect
from pygame.time import get_ticks
from operator import truth
# Python 3 does not have the callable function, but an equivalent can be made
# with the hasattr function.
#if 'callable' not in dir(__builtins__):
callable = lambda obj: hasattr(obj, '__call__')
# Don't depend on pygame.mask if it's not there...
try:
from pygame.mask import from_surface
except:
pass
class Sprite(object):
"""simple base class for visible game objects
pygame.sprite.Sprite(*groups): return Sprite
The base class for visible game objects. Derived classes will want to
override the Sprite.update() method and assign Sprite.image and Sprite.rect
attributes. The initializer can accept any number of Group instances that
the Sprite will become a member of.
When subclassing the Sprite class, be sure to call the base initializer
before adding the Sprite to Groups.
"""
def __init__(self, *groups):
self.__g = {} # The groups the sprite is in
if groups:
self.add(*groups)
def add(self, *groups):
"""add the sprite to groups
Sprite.add(*groups): return None
Any number of Group instances can be passed as arguments. The
Sprite will be added to the Groups it is not already a member of.
"""
has = self.__g.__contains__
for group in groups:
if hasattr(group, '_spritegroup'):
if not has(group):
group.add_internal(self)
self.add_internal(group)
else:
self.add(*group)
def remove(self, *groups):
"""remove the sprite from groups
Sprite.remove(*groups): return None
Any number of Group instances can be passed as arguments. The Sprite
will be removed from the Groups it is currently a member of.
"""
has = self.__g.__contains__
for group in groups:
if hasattr(group, '_spritegroup'):
if has(group):
group.remove_internal(self)
self.remove_internal(group)
else:
self.remove(*group)
def add_internal(self, group):
self.__g[group] = 0
def remove_internal(self, group):
del self.__g[group]
def update(self, *args):
"""method to control sprite behavior
Sprite.update(*args):
The default implementation of this method does nothing; it's just a
convenient "hook" that you can override. This method is called by
Group.update() with whatever arguments you give it.
There is no need to use this method if not using the convenience
method by the same name in the Group class.
"""
pass
def kill(self):
"""remove the Sprite from all Groups
Sprite.kill(): return None
The Sprite is removed from all the Groups that contain it. This won't
change anything about the state of the Sprite. It is possible to
continue to use the Sprite after this method has been called, including
adding it to Groups.
"""
for c in self.__g:
c.remove_internal(self)
self.__g.clear()
def groups(self):
"""list of Groups that contain this Sprite
Sprite.groups(): return group_list
Returns a list of all the Groups that contain this Sprite.
"""
return list(self.__g)
def alive(self):
"""does the sprite belong to any groups
Sprite.alive(): return bool
Returns True when the Sprite belongs to one or more Groups.
"""
return truth(self.__g)
def __repr__(self):
return "<%s sprite(in %d groups)>" % (self.__class__.__name__, len(self.__g))
class DirtySprite(Sprite):
"""a more featureful subclass of Sprite with more attributes
pygame.sprite.DirtySprite(*groups): return DirtySprite
Extra DirtySprite attributes with their default values:
dirty = 1
If set to 1, it is repainted and then set to 0 again.
If set to 2, it is always dirty (repainted each frame;
flag is not reset).
If set to 0, it is not dirty and therefore not repainted again.
blendmode = 0
It's the special_flags argument of Surface.blit; see the blendmodes in
the Surface.blit documentation
source_rect = None
This is the source rect to use. Remember that it is relative to the top
left corner (0, 0) of self.image.
visible = 1
Normally this is 1. If set to 0, it will not be repainted. (If you
change visible to 1, you must set dirty to 1 for it to be erased from
the screen.)
_layer = 0
A READ ONLY value, it is read when adding it to the LayeredUpdates
group. For details see documentation of sprite.LayeredUpdates.
"""
def __init__(self, *groups):
self.dirty = 1
self.blendmode = 0 # pygame 1.8, referred to as special_flags in
# the documentation of Surface.blit
self._visible = 1
self._layer = 0 # READ ONLY by LayeredUpdates or LayeredDirty
self.source_rect = None
Sprite.__init__(self, *groups)
def _set_visible(self, val):
"""set the visible value (0 or 1) and makes the sprite dirty"""
self._visible = val
if self.dirty < 2:
self.dirty = 1
def _get_visible(self):
"""return the visible value of that sprite"""
return self._visible
visible = property(lambda self: self._get_visible(),
lambda self, value: self._set_visible(value),
doc="you can make this sprite disappear without "
"removing it from the group,\n"
"assign 0 for invisible and 1 for visible")
def __repr__(self):
return "<%s DirtySprite(in %d groups)>" % \
(self.__class__.__name__, len(self.groups()))
class AbstractGroup(object):
"""base class for containers of sprites
AbstractGroup does everything needed to behave as a normal group. You can
easily subclass a new group class from this or the other groups below if
you want to add more features.
Any AbstractGroup-derived sprite groups act like sequences and support
iteration, len, and so on.
"""
# dummy val to identify sprite groups, and avoid infinite recursion
_spritegroup = True
def __init__(self):
self.spritedict = {}
self.lostsprites = []
def sprites(self):
"""get a list of sprites in the group
Group.sprite(): return list
Returns an object that can be looped over with a 'for' loop. (For now,
it is always a list, but this could change in a future version of
pygame.) Alternatively, you can get the same information by iterating
directly over the sprite group, e.g. 'for sprite in group'.
"""
return list(self.spritedict)
def add_internal(self, sprite):
self.spritedict[sprite] = 0
def remove_internal(self, sprite):
r = self.spritedict[sprite]
if r:
self.lostsprites.append(r)
del self.spritedict[sprite]
def has_internal(self, sprite):
return sprite in self.spritedict
def copy(self):
"""copy a group with all the same sprites
Group.copy(): return Group
Returns a copy of the group that is an instance of the same class
and has the same sprites in it.
"""
return self.__class__(self.sprites())
def __iter__(self):
return iter(self.sprites())
def __contains__(self, sprite):
return self.has(sprite)
def add(self, *sprites):
"""add sprite(s) to group
Group.add(sprite, list, group, ...): return None
Adds a sprite or sequence of sprites to a group.
"""
for sprite in sprites:
# It's possible that some sprite is also an iterator.
# If this is the case, we should add the sprite itself,
# and not the iterator object.
if isinstance(sprite, Sprite):
if not self.has_internal(sprite):
self.add_internal(sprite)
sprite.add_internal(self)
else:
try:
# See if sprite is an iterator, like a list or sprite
# group.
self.add(*sprite)
except (TypeError, AttributeError):
# Not iterable. This is probably a sprite that is not an
# instance of the Sprite class or is not an instance of a
# subclass of the Sprite class. Alternately, it could be an
# old-style sprite group.
if hasattr(sprite, '_spritegroup'):
for spr in sprite.sprites():
if not self.has_internal(spr):
self.add_internal(spr)
spr.add_internal(self)
elif not self.has_internal(sprite):
self.add_internal(sprite)
sprite.add_internal(self)
def remove(self, *sprites):
"""remove sprite(s) from group
Group.remove(sprite, list, or group, ...): return None
Removes a sprite or sequence of sprites from a group.
"""
# This function behaves essentially the same as Group.add. It first
# tries to handle each argument as an instance of the Sprite class. If
# that failes, then it tries to handle the argument as an iterable
# object. If that failes, then it tries to handle the argument as an
# old-style sprite group. Lastly, if that fails, it assumes that the
# normal Sprite methods should be used.
for sprite in sprites:
if isinstance(sprite, Sprite):
if self.has_internal(sprite):
self.remove_internal(sprite)
sprite.remove_internal(self)
else:
try:
self.remove(*sprite)
except (TypeError, AttributeError):
if hasattr(sprite, '_spritegroup'):
for spr in sprite.sprites():
if self.has_internal(spr):
self.remove_internal(spr)
spr.remove_internal(self)
elif self.has_internal(sprite):
self.remove_internal(sprite)
sprite.remove_internal(self)
def has(self, *sprites):
"""ask if group has a sprite or sprites
Group.has(sprite or group, ...): return bool
Returns True if the given sprite or sprites are contained in the
group. Alternatively, you can get the same information using the
'in' operator, e.g. 'sprite in group', 'subgroup in group'.
"""
return_value = False
for sprite in sprites:
if isinstance(sprite, Sprite):
# Check for Sprite instance's membership in this group
if self.has_internal(sprite):
return_value = True
else:
return False
else:
try:
if self.has(*sprite):
return_value = True
else:
return False
except (TypeError, AttributeError):
if hasattr(sprite, '_spritegroup'):
for spr in sprite.sprites():
if self.has_internal(spr):
return_value = True
else:
return False
else:
if self.has_internal(sprite):
return_value = True
else:
return False
return return_value
def update(self, *args):
"""call the update method of every member sprite
Group.update(*args): return None
Calls the update method of every member sprite. All arguments that
were passed to this method are passed to the Sprite update function.
"""
for s in self.sprites():
s.update(*args)
def draw(self, surface):
"""draw all sprites onto the surface
Group.draw(surface): return None
Draws all of the member sprites onto the given surface.
"""
#from javascript import console
sprites = self.sprites()
surface_blit = surface.blit
for spr in sprites:
#console.log(spr.image, spr.rect)
#console.log(spr.image._canvas.width, spr.image._canvas.height)
self.spritedict[spr] = surface_blit(spr.image, spr.rect)
self.lostsprites = []
def clear(self, surface, bgd):
"""erase the previous position of all sprites
Group.clear(surface, bgd): return None
Clears the area under every drawn sprite in the group. The bgd
argument should be Surface which is the same dimensions as the
screen surface. The bgd could also be a function which accepts
the given surface and the area to be cleared as arguments.
"""
if callable(bgd):
for r in self.lostsprites:
bgd(surface, r)
for r in self.spritedict.values():
if r:
bgd(surface, r)
else:
surface_blit = surface.blit
for r in self.lostsprites:
surface_blit(bgd, r, r)
for r in self.spritedict.values():
if r:
surface_blit(bgd, r, r)
def empty(self):
"""remove all sprites
Group.empty(): return None
Removes all the sprites from the group.
"""
for s in self.sprites():
self.remove_internal(s)
s.remove_internal(self)
def __nonzero__(self):
return truth(self.sprites())
def __len__(self):
"""return number of sprites in group
Group.len(group): return int
Returns the number of sprites contained in the group.
"""
return len(self.sprites())
def __repr__(self):
return "<%s(%d sprites)>" % (self.__class__.__name__, len(self))
class Group(AbstractGroup):
"""container class for many Sprites
pygame.sprite.Group(*sprites): return Group
A simple container for Sprite objects. This class can be subclassed to
create containers with more specific behaviors. The constructor takes any
number of Sprite arguments to add to the Group. The group supports the
following standard Python operations:
in test if a Sprite is contained
len the number of Sprites contained
bool test if any Sprites are contained
iter iterate through all the Sprites
The Sprites in the Group are not ordered, so the Sprites are drawn and
iterated over in no particular order.
"""
def __init__(self, *sprites):
AbstractGroup.__init__(self)
self.add(*sprites)
RenderPlain = Group
RenderClear = Group
class RenderUpdates(Group):
"""Group class that tracks dirty updates
pygame.sprite.RenderUpdates(*sprites): return RenderUpdates
This class is derived from pygame.sprite.Group(). It has an enhanced draw
method that tracks the changed areas of the screen.
"""
def draw(self, surface):
spritedict = self.spritedict
surface_blit = surface.blit
dirty = self.lostsprites
self.lostsprites = []
dirty_append = dirty.append
for s in self.sprites():
r = spritedict[s]
newrect = surface_blit(s.image, s.rect)
if r:
if newrect.colliderect(r):
dirty_append(newrect.union(r))
else:
dirty_append(newrect)
dirty_append(r)
else:
dirty_append(newrect)
spritedict[s] = newrect
return dirty
class OrderedUpdates(RenderUpdates):
"""RenderUpdates class that draws Sprites in order of addition
pygame.sprite.OrderedUpdates(*spites): return OrderedUpdates
This class derives from pygame.sprite.RenderUpdates(). It maintains
the order in which the Sprites were added to the Group for rendering.
This makes adding and removing Sprites from the Group a little
slower than regular Groups.
"""
def __init__(self, *sprites):
self._spritelist = []
RenderUpdates.__init__(self, *sprites)
def sprites(self):
return list(self._spritelist)
def add_internal(self, sprite):
RenderUpdates.add_internal(self, sprite)
self._spritelist.append(sprite)
def remove_internal(self, sprite):
RenderUpdates.remove_internal(self, sprite)
self._spritelist.remove(sprite)
class LayeredUpdates(AbstractGroup):
"""LayeredUpdates Group handles layers, which are drawn like OrderedUpdates
pygame.sprite.LayeredUpdates(*spites, **kwargs): return LayeredUpdates
This group is fully compatible with pygame.sprite.Sprite.
New in pygame 1.8.0
"""
_init_rect = Rect(0, 0, 0, 0)
def __init__(self, *sprites, **kwargs):
"""initialize an instance of LayeredUpdates with the given attributes
You can set the default layer through kwargs using 'default_layer'
and an integer for the layer. The default layer is 0.
If the sprite you add has an attribute _layer, then that layer will be
used. If **kwarg contains 'layer', then the passed sprites will be
added to that layer (overriding the sprite._layer attribute). If
neither the sprite nor **kwarg has a 'layer', then the default layer is
used to add the sprites.
"""
self._spritelayers = {}
self._spritelist = []
AbstractGroup.__init__(self)
self._default_layer = kwargs.get('default_layer', 0)
self.add(*sprites, **kwargs)
def add_internal(self, sprite, layer=None):
"""Do not use this method directly.
It is used by the group to add a sprite internally.
"""
self.spritedict[sprite] = self._init_rect
if layer is None:
try:
layer = sprite._layer
except AttributeError:
layer = sprite._layer = self._default_layer
elif hasattr(sprite, '_layer'):
sprite._layer = layer
sprites = self._spritelist # speedup
sprites_layers = self._spritelayers
sprites_layers[sprite] = layer
# add the sprite at the right position
# bisect algorithmus
leng = len(sprites)
low = mid = 0
high = leng - 1
while low <= high:
mid = low + (high - low) // 2
if sprites_layers[sprites[mid]] <= layer:
low = mid + 1
else:
high = mid - 1
# linear search to find final position
while mid < leng and sprites_layers[sprites[mid]] <= layer:
mid += 1
sprites.insert(mid, sprite)
def add(self, *sprites, **kwargs):
"""add a sprite or sequence of sprites to a group
LayeredUpdates.add(*sprites, **kwargs): return None
If the sprite you add has an attribute _layer, then that layer will be
used. If **kwarg contains 'layer', then the passed sprites will be
added to that layer (overriding the sprite._layer attribute). If
neither the sprite nor **kwarg has a 'layer', then the default layer is
used to add the sprites.
"""
if not sprites:
return
if 'layer' in kwargs:
layer = kwargs['layer']
else:
layer = None
for sprite in sprites:
# It's possible that some sprite is also an iterator.
# If this is the case, we should add the sprite itself,
# and not the iterator object.
if isinstance(sprite, Sprite):
if not self.has_internal(sprite):
self.add_internal(sprite, layer)
sprite.add_internal(self)
else:
try:
# See if sprite is an iterator, like a list or sprite
# group.
self.add(*sprite, **kwargs)
except (TypeError, AttributeError):
# Not iterable. This is probably a sprite that is not an
# instance of the Sprite class or is not an instance of a
# subclass of the Sprite class. Alternately, it could be an
# old-style sprite group.
if hasattr(sprite, '_spritegroup'):
for spr in sprite.sprites():
if not self.has_internal(spr):
self.add_internal(spr, layer)
spr.add_internal(self)
elif not self.has_internal(sprite):
self.add_internal(sprite, layer)
sprite.add_internal(self)
def remove_internal(self, sprite):
"""Do not use this method directly.
The group uses it to add a sprite.
"""
self._spritelist.remove(sprite)
# these dirty rects are suboptimal for one frame
r = self.spritedict[sprite]
if r is not self._init_rect:
self.lostsprites.append(r) # dirty rect
if hasattr(sprite, 'rect'):
self.lostsprites.append(sprite.rect) # dirty rect
del self.spritedict[sprite]
del self._spritelayers[sprite]
def sprites(self):
"""return a ordered list of sprites (first back, last top).
LayeredUpdates.sprites(): return sprites
"""
return list(self._spritelist)
def draw(self, surface):
"""draw all sprites in the right order onto the passed surface
LayeredUpdates.draw(surface): return Rect_list
"""
spritedict = self.spritedict
surface_blit = surface.blit
dirty = self.lostsprites
self.lostsprites = []
dirty_append = dirty.append
init_rect = self._init_rect
for spr in self.sprites():
rec = spritedict[spr]
newrect = surface_blit(spr.image, spr.rect)
if rec is init_rect:
dirty_append(newrect)
else:
if newrect.colliderect(rec):
dirty_append(newrect.union(rec))
else:
dirty_append(newrect)
dirty_append(rec)
spritedict[spr] = newrect
return dirty
def get_sprites_at(self, pos):
"""return a list with all sprites at that position
LayeredUpdates.get_sprites_at(pos): return colliding_sprites
Bottom sprites are listed first; the top ones are listed last.
"""
_sprites = self._spritelist
rect = Rect(pos, (0, 0))
colliding_idx = rect.collidelistall(_sprites)
colliding = [_sprites[i] for i in colliding_idx]
return colliding
def get_sprite(self, idx):
"""return the sprite at the index idx from the groups sprites
LayeredUpdates.get_sprite(idx): return sprite
Raises IndexOutOfBounds if the idx is not within range.
"""
return self._spritelist[idx]
def remove_sprites_of_layer(self, layer_nr):
"""remove all sprites from a layer and return them as a list
LayeredUpdates.remove_sprites_of_layer(layer_nr): return sprites
"""
sprites = self.get_sprites_from_layer(layer_nr)
self.remove(*sprites)
return sprites
#---# layer methods
def layers(self):
"""return a list of unique defined layers defined.
LayeredUpdates.layers(): return layers
"""
return sorted(set(self._spritelayers.values()))
def change_layer(self, sprite, new_layer):
"""change the layer of the sprite
LayeredUpdates.change_layer(sprite, new_layer): return None
The sprite must have been added to the renderer already. This is not
checked.
"""
sprites = self._spritelist # speedup
sprites_layers = self._spritelayers # speedup
sprites.remove(sprite)
sprites_layers.pop(sprite)
# add the sprite at the right position
# bisect algorithmus
leng = len(sprites)
low = mid = 0
high = leng - 1
while low <= high:
mid = low + (high - low) // 2
if sprites_layers[sprites[mid]] <= new_layer:
low = mid + 1
else:
high = mid - 1
# linear search to find final position
while mid < leng and sprites_layers[sprites[mid]] <= new_layer:
mid += 1
sprites.insert(mid, sprite)
if hasattr(sprite, 'layer'):
sprite.layer = new_layer
# add layer info
sprites_layers[sprite] = new_layer
def get_layer_of_sprite(self, sprite):
"""return the layer that sprite is currently in
If the sprite is not found, then it will return the default layer.
"""
return self._spritelayers.get(sprite, self._default_layer)
def get_top_layer(self):
"""return the top layer
LayeredUpdates.get_top_layer(): return layer
"""
return self._spritelayers[self._spritelist[-1]]
def get_bottom_layer(self):
"""return the bottom layer
LayeredUpdates.get_bottom_layer(): return layer
"""
return self._spritelayers[self._spritelist[0]]
def move_to_front(self, sprite):
"""bring the sprite to front layer
LayeredUpdates.move_to_front(sprite): return None
Brings the sprite to front by changing the sprite layer to the top-most
layer. The sprite is added at the end of the list of sprites in that
top-most layer.
"""
self.change_layer(sprite, self.get_top_layer())
def move_to_back(self, sprite):
"""move the sprite to the bottom layer
LayeredUpdates.move_to_back(sprite): return None
Moves the sprite to the bottom layer by moving it to a new layer below
the current bottom layer.
"""
self.change_layer(sprite, self.get_bottom_layer() - 1)
def get_top_sprite(self):
"""return the topmost sprite
LayeredUpdates.get_top_sprite(): return Sprite
"""
return self._spritelist[-1]
def get_sprites_from_layer(self, layer):
"""return all sprites from a layer ordered as they where added
LayeredUpdates.get_sprites_from_layer(layer): return sprites
Returns all sprites from a layer. The sprites are ordered in the
sequence that they where added. (The sprites are not removed from the
layer.
"""
sprites = []
sprites_append = sprites.append
sprite_layers = self._spritelayers
for spr in self._spritelist:
if sprite_layers[spr] == layer:
sprites_append(spr)
elif sprite_layers[spr] > layer:# break after because no other will
# follow with same layer
break
return sprites
def switch_layer(self, layer1_nr, layer2_nr):
"""switch the sprites from layer1_nr to layer2_nr
LayeredUpdates.switch_layer(layer1_nr, layer2_nr): return None
The layers number must exist. This method does not check for the
existence of the given layers.
"""
sprites1 = self.remove_sprites_of_layer(layer1_nr)
for spr in self.get_sprites_from_layer(layer2_nr):
self.change_layer(spr, layer1_nr)
self.add(layer=layer2_nr, *sprites1)
class LayeredDirty(LayeredUpdates):
"""LayeredDirty Group is for DirtySprites; subclasses LayeredUpdates
pygame.sprite.LayeredDirty(*spites, **kwargs): return LayeredDirty
This group requires pygame.sprite.DirtySprite or any sprite that
has the following attributes:
image, rect, dirty, visible, blendmode (see doc of DirtySprite).
It uses the dirty flag technique and is therefore faster than
pygame.sprite.RenderUpdates if you have many static sprites. It
also switches automatically between dirty rect updating and full
screen drawing, so you do no have to worry which would be faster.
As with the pygame.sprite.Group, you can specify some additional attributes
through kwargs:
_use_update: True/False (default is False)
_default_layer: default layer where the sprites without a layer are
added
_time_threshold: treshold time for switching between dirty rect mode
and fullscreen mode; defaults to updating at 80 frames per second,
which is equal to 1000.0 / 80.0
New in pygame 1.8.0
"""
def __init__(self, *sprites, **kwargs):
"""initialize group.
pygame.sprite.LayeredDirty(*spites, **kwargs): return LayeredDirty
You can specify some additional attributes through kwargs:
_use_update: True/False (default is False)
_default_layer: default layer where the sprites without a layer are
added
_time_threshold: treshold time for switching between dirty rect
mode and fullscreen mode; defaults to updating at 80 frames per
second, which is equal to 1000.0 / 80.0
"""
LayeredUpdates.__init__(self, *sprites, **kwargs)
self._clip = None
self._use_update = False
self._time_threshold = 1000.0 / 80.0 # 1000.0 / fps
self._bgd = None
for key, val in kwargs.items():
if key in ['_use_update', '_time_threshold', '_default_layer']:
if hasattr(self, key):
setattr(self, key, val)
def add_internal(self, sprite, layer=None):
"""Do not use this method directly.
It is used by the group to add a sprite internally.
"""
# check if all needed attributes are set
if not hasattr(sprite, 'dirty'):
raise AttributeError()
if not hasattr(sprite, 'visible'):
raise AttributeError()
if not hasattr(sprite, 'blendmode'):
raise AttributeError()
if not isinstance(sprite, DirtySprite):
raise TypeError()
if sprite.dirty == 0: # set it dirty if it is not
sprite.dirty = 1
LayeredUpdates.add_internal(self, sprite, layer)
def draw(self, surface, bgd=None):
"""draw all sprites in the right order onto the given surface
LayeredDirty.draw(surface, bgd=None): return Rect_list
You can pass the background too. If a self.bgd is already set to some
value that is not None, then the bgd argument has no effect.
"""
# speedups
_orig_clip = surface.get_clip()
_clip = self._clip
if _clip is None:
_clip = _orig_clip
_surf = surface
_sprites = self._spritelist
_old_rect = self.spritedict
_update = self.lostsprites
_update_append = _update.append
_ret = None
_surf_blit = _surf.blit
_rect = Rect
if bgd is not None:
self._bgd = bgd
_bgd = self._bgd
init_rect = self._init_rect
_surf.set_clip(_clip)
# -------
# 0. decide whether to render with update or flip
start_time = get_ticks()
if self._use_update: # dirty rects mode
# 1. find dirty area on screen and put the rects into _update
# still not happy with that part
for spr in _sprites:
if 0 < spr.dirty:
# chose the right rect
if spr.source_rect:
_union_rect = _rect(spr.rect.topleft,
spr.source_rect.size)
else:
_union_rect = _rect(spr.rect)
_union_rect_collidelist = _union_rect.collidelist
_union_rect_union_ip = _union_rect.union_ip
i = _union_rect_collidelist(_update)
while -1 < i:
_union_rect_union_ip(_update[i])
del _update[i]
i = _union_rect_collidelist(_update)
_update_append(_union_rect.clip(_clip))
if _old_rect[spr] is not init_rect:
_union_rect = _rect(_old_rect[spr])
_union_rect_collidelist = _union_rect.collidelist
_union_rect_union_ip = _union_rect.union_ip
i = _union_rect_collidelist(_update)
while -1 < i:
_union_rect_union_ip(_update[i])
del _update[i]
i = _union_rect_collidelist(_update)
_update_append(_union_rect.clip(_clip))
# can it be done better? because that is an O(n**2) algorithm in
# worst case
# clear using background
if _bgd is not None:
for rec in _update:
_surf_blit(_bgd, rec, rec)
# 2. draw
for spr in _sprites:
if 1 > spr.dirty:
if spr._visible:
# sprite not dirty; blit only the intersecting part
_spr_rect = spr.rect
if spr.source_rect is not None:
_spr_rect = Rect(spr.rect.topleft,
spr.source_rect.size)
_spr_rect_clip = _spr_rect.clip
for idx in _spr_rect.collidelistall(_update):
# clip
clip = _spr_rect_clip(_update[idx])
_surf_blit(spr.image,
clip,
(clip[0] - _spr_rect[0],
clip[1] - _spr_rect[1],
clip[2],
clip[3]),
spr.blendmode)
else: # dirty sprite
if spr._visible:
_old_rect[spr] = _surf_blit(spr.image,
spr.rect,
spr.source_rect,
spr.blendmode)
if spr.dirty == 1:
spr.dirty = 0
_ret = list(_update)
else: # flip, full screen mode
if _bgd is not None:
_surf_blit(_bgd, (0, 0))
for spr in _sprites:
if spr._visible:
_old_rect[spr] = _surf_blit(spr.image,
spr.rect,
spr.source_rect,
spr.blendmode)
_ret = [_rect(_clip)] # return only the part of the screen changed
# timing for switching modes
# How may a good threshold be found? It depends on the hardware.
end_time = get_ticks()
if end_time-start_time > self._time_threshold:
self._use_update = False
else:
self._use_update = True
## # debug
## print " check: using dirty rects:", self._use_update
# emtpy dirty rects list
_update[:] = []
# -------
# restore original clip
_surf.set_clip(_orig_clip)
return _ret
def clear(self, surface, bgd):
"""use to set background
Group.clear(surface, bgd): return None
"""
self._bgd = bgd
def repaint_rect(self, screen_rect):
"""repaint the given area
LayeredDirty.repaint_rect(screen_rect): return None
screen_rect is in screen coordinates.
"""
if self._clip:
self.lostsprites.append(screen_rect.clip(self._clip))
else:
self.lostsprites.append(Rect(screen_rect))
def set_clip(self, screen_rect=None):
"""clip the area where to draw; pass None (default) to reset the clip
LayeredDirty.set_clip(screen_rect=None): return None
"""
if screen_rect is None:
self._clip = pygame.display.get_surface().get_rect()
else:
self._clip = screen_rect
self._use_update = False
def get_clip(self):
"""get the area where drawing will occur
LayeredDirty.get_clip(): return Rect
"""
return self._clip
def change_layer(self, sprite, new_layer):
"""change the layer of the sprite
LayeredUpdates.change_layer(sprite, new_layer): return None
The sprite must have been added to the renderer already. This is not
checked.
"""
LayeredUpdates.change_layer(self, sprite, new_layer)
if sprite.dirty == 0:
sprite.dirty = 1
def set_timing_treshold(self, time_ms):
"""set the treshold in milliseconds
set_timing_treshold(time_ms): return None
Defaults to 1000.0 / 80.0. This means that the screen will be painted
using the flip method rather than the update method if the update
method is taking so long to update the screen that the frame rate falls
below 80 frames per second.
"""
self._time_threshold = time_ms
class GroupSingle(AbstractGroup):
"""A group container that holds a single most recent item.
This class works just like a regular group, but it only keeps a single
sprite in the group. Whatever sprite has been added to the group last will
be the only sprite in the group.
You can access its one sprite as the .sprite attribute. Assigning to this
attribute will properly remove the old sprite and then add the new one.
"""
def __init__(self, sprite=None):
AbstractGroup.__init__(self)
self.__sprite = None
if sprite is not None:
self.add(sprite)
def copy(self):
return GroupSingle(self.__sprite)
def sprites(self):
if self.__sprite is not None:
return [self.__sprite]
else:
return []
def add_internal(self, sprite):
if self.__sprite is not None:
self.__sprite.remove_internal(self)
self.remove_internal(self.__sprite)
self.__sprite = sprite
def __nonzero__(self):
return self.__sprite is not None
def _get_sprite(self):
return self.__sprite
def _set_sprite(self, sprite):
self.add_internal(sprite)
sprite.add_internal(self)
return sprite
sprite = property(_get_sprite,
_set_sprite,
None,
"The sprite contained in this group")
def remove_internal(self, sprite):
if sprite is self.__sprite:
self.__sprite = None
if sprite in self.spritedict:
AbstractGroup.remove_internal(self, sprite)
def has_internal(self, sprite):
return self.__sprite is sprite
# Optimizations...
def __contains__(self, sprite):
return self.__sprite is sprite
# Some different collision detection functions that could be used.
def collide_rect(left, right):
"""collision detection between two sprites, using rects.
pygame.sprite.collide_rect(left, right): return bool
Tests for collision between two sprites. Uses the pygame.Rect colliderect
function to calculate the collision. It is intended to be passed as a
collided callback function to the *collide functions. Sprites must have
"rect" attributes.
New in pygame 1.8.0
"""
return left.rect.colliderect(right.rect)
class collide_rect_ratio:
"""A callable class that checks for collisions using scaled rects
The class checks for collisions between two sprites using a scaled version
of the sprites' rects. Is created with a ratio; the instance is then
intended to be passed as a collided callback function to the *collide
functions.
New in pygame 1.8.1
"""
def __init__(self, ratio):
"""create a new collide_rect_ratio callable
Ratio is expected to be a floating point value used to scale
the underlying sprite rect before checking for collisions.
"""
self.ratio = ratio
def __call__(self, left, right):
"""detect collision between two sprites using scaled rects
pygame.sprite.collide_rect_ratio(ratio)(left, right): return bool
Tests for collision between two sprites. Uses the pygame.Rect
colliderect function to calculate the collision after scaling the rects
by the stored ratio. Sprites must have "rect" attributes.
"""
ratio = self.ratio
leftrect = left.rect
width = leftrect.width
height = leftrect.height
leftrect = leftrect.inflate(width * ratio - width,
height * ratio - height)
rightrect = right.rect
width = rightrect.width
height = rightrect.height
rightrect = rightrect.inflate(width * ratio - width,
height * ratio - height)
return leftrect.colliderect(rightrect)
def collide_circle(left, right):
"""detect collision between two sprites using circles
pygame.sprite.collide_circle(left, right): return bool
Tests for collision between two sprites by testing whether two circles
centered on the sprites overlap. If the sprites have a "radius" attribute,
then that radius is used to create the circle; otherwise, a circle is
created that is big enough to completely enclose the sprite's rect as
given by the "rect" attribute. This function is intended to be passed as
a collided callback function to the *collide functions. Sprites must have a
"rect" and an optional "radius" attribute.
New in pygame 1.8.0
"""
xdistance = left.rect.centerx - right.rect.centerx
ydistance = left.rect.centery - right.rect.centery
distancesquared = xdistance ** 2 + ydistance ** 2
if hasattr(left, 'radius'):
leftradius = left.radius
else:
leftrect = left.rect
# approximating the radius of a square by using half of the diagonal,
# might give false positives (especially if its a long small rect)
leftradius = 0.5 * ((leftrect.width ** 2 + leftrect.height ** 2) ** 0.5)
# store the radius on the sprite for next time
setattr(left, 'radius', leftradius)
if hasattr(right, 'radius'):
rightradius = right.radius
else:
rightrect = right.rect
# approximating the radius of a square by using half of the diagonal
# might give false positives (especially if its a long small rect)
rightradius = 0.5 * ((rightrect.width ** 2 + rightrect.height ** 2) ** 0.5)
# store the radius on the sprite for next time
setattr(right, 'radius', rightradius)
return distancesquared <= (leftradius + rightradius) ** 2
class collide_circle_ratio(object):
"""detect collision between two sprites using scaled circles
This callable class checks for collisions between two sprites using a
scaled version of a sprite's radius. It is created with a ratio as the
argument to the constructor. The instance is then intended to be passed as
a collided callback function to the *collide functions.
New in pygame 1.8.1
"""
def __init__(self, ratio):
"""creates a new collide_circle_ratio callable instance
The given ratio is expected to be a floating point value used to scale
the underlying sprite radius before checking for collisions.
When the ratio is ratio=1.0, then it behaves exactly like the
collide_circle method.
"""
self.ratio = ratio
def __call__(self, left, right):
"""detect collision between two sprites using scaled circles
pygame.sprite.collide_circle_radio(ratio)(left, right): return bool
Tests for collision between two sprites by testing whether two circles
centered on the sprites overlap after scaling the circle's radius by
the stored ratio. If the sprites have a "radius" attribute, that is
used to create the circle; otherwise, a circle is created that is big
enough to completely enclose the sprite's rect as given by the "rect"
attribute. Intended to be passed as a collided callback function to the
*collide functions. Sprites must have a "rect" and an optional "radius"
attribute.
"""
ratio = self.ratio
xdistance = left.rect.centerx - right.rect.centerx
ydistance = left.rect.centery - right.rect.centery
distancesquared = xdistance ** 2 + ydistance ** 2
if hasattr(left, "radius"):
leftradius = left.radius * ratio
else:
leftrect = left.rect
leftradius = ratio * 0.5 * ((leftrect.width ** 2 + leftrect.height ** 2) ** 0.5)
# store the radius on the sprite for next time
setattr(left, 'radius', leftradius)
if hasattr(right, "radius"):
rightradius = right.radius * ratio
else:
rightrect = right.rect
rightradius = ratio * 0.5 * ((rightrect.width ** 2 + rightrect.height ** 2) ** 0.5)
# store the radius on the sprite for next time
setattr(right, 'radius', rightradius)
return distancesquared <= (leftradius + rightradius) ** 2
def collide_mask(left, right):
"""collision detection between two sprites, using masks.
pygame.sprite.collide_mask(SpriteLeft, SpriteRight): bool
Tests for collision between two sprites by testing if their bitmasks
overlap. If the sprites have a "mask" attribute, that is used as the mask;
otherwise, a mask is created from the sprite image. Intended to be passed
as a collided callback function to the *collide functions. Sprites must
have a "rect" and an optional "mask" attribute.
New in pygame 1.8.0
"""
xoffset = right.rect[0] - left.rect[0]
yoffset = right.rect[1] - left.rect[1]
try:
leftmask = left.mask
except AttributeError:
leftmask = from_surface(left.image)
try:
rightmask = right.mask
except AttributeError:
rightmask = from_surface(right.image)
return leftmask.overlap(rightmask, (xoffset, yoffset))
def spritecollide(sprite, group, dokill, collided=None):
"""find Sprites in a Group that intersect another Sprite
pygame.sprite.spritecollide(sprite, group, dokill, collided=None):
return Sprite_list
Return a list containing all Sprites in a Group that intersect with another
Sprite. Intersection is determined by comparing the Sprite.rect attribute
of each Sprite.
The dokill argument is a bool. If set to True, all Sprites that collide
will be removed from the Group.
The collided argument is a callback function used to calculate if two
sprites are colliding. it should take two sprites as values, and return a
bool value indicating if they are colliding. If collided is not passed, all
sprites must have a "rect" value, which is a rectangle of the sprite area,
which will be used to calculate the collision.
"""
if dokill:
crashed = []
append = crashed.append
if collided:
for s in group.sprites():
if collided(sprite, s):
s.kill()
append(s)
else:
spritecollide = sprite.rect.colliderect
for s in group.sprites():
if spritecollide(s.rect):
s.kill()
append(s)
return crashed
elif collided:
return [s for s in group if collided(sprite, s)]
else:
spritecollide = sprite.rect.colliderect
return [s for s in group if spritecollide(s.rect)]
def groupcollide(groupa, groupb, dokilla, dokillb, collided=None):
"""detect collision between a group and another group
pygame.sprite.groupcollide(groupa, groupb, dokilla, dokillb):
return dict
Given two groups, this will find the intersections between all sprites in
each group. It returns a dictionary of all sprites in the first group that
collide. The value for each item in the dictionary is a list of the sprites
in the second group it collides with. The two dokill arguments control if
the sprites from either group will be automatically removed from all
groups. Collided is a callback function used to calculate if two sprites
are colliding. it should take two sprites as values, and return a bool
value indicating if they are colliding. If collided is not passed, all
sprites must have a "rect" value, which is a rectangle of the sprite area
that will be used to calculate the collision.
"""
crashed = {}
SC = spritecollide
if dokilla:
for s in groupa.sprites():
c = SC(s, groupb, dokillb, collided)
if c:
crashed[s] = c
s.kill()
else:
for s in groupa:
c = SC(s, groupb, dokillb, collided)
if c:
crashed[s] = c
return crashed
def spritecollideany(sprite, group, collided=None):
"""finds any sprites in a group that collide with the given sprite
pygame.sprite.spritecollideany(sprite, group): return sprite
Given a sprite and a group of sprites, this will return return any single
sprite that collides with with the given sprite. If there are no
collisions, then this returns None.
If you don't need all the features of the spritecollide function, this
function will be a bit quicker.
Collided is a callback function used to calculate if two sprites are
colliding. It should take two sprites as values and return a bool value
indicating if they are colliding. If collided is not passed, then all
sprites must have a "rect" value, which is a rectangle of the sprite area,
which will be used to calculate the collision.
"""
if collided:
for s in group:
if collided(sprite, s):
return s
else:
# Special case old behaviour for speed.
spritecollide = sprite.rect.colliderect
for s in group:
if spritecollide(s.rect):
return s
return None
| gpl-3.0 |
s20121035/rk3288_android5.1_repo | external/chromium_org/tools/resources/find_used_resources.py | 55 | 1754 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import sys
usage = """find_used_resources.py
Prints out (to stdout) the sorted list of resource ids that are part of unknown
pragma warning in the given build log (via stdin).
This script is used to find the resources that are actually compiled in Chrome
in order to only include the needed strings/images in Chrome PAK files. The
script parses out the list of used resource ids. These resource ids show up in
the build output after building Chrome with gyp variable
enable_resource_whitelist_generation set to 1. This gyp flag causes the compiler
to print out a UnknownPragma message every time a resource id is used. E.g.:
foo.cc:22:0: warning: ignoring #pragma whitelisted_resource_12345
[-Wunknown-pragmas]
On Windows, the message is simply a message via __pragma(message(...)).
"""
def GetResourceIdsInPragmaWarnings(input):
"""Returns sorted set of resource ids that are inside unknown pragma warnings
for the given input.
"""
used_resources = set()
unknown_pragma_warning_pattern = re.compile(
'whitelisted_resource_(?P<resource_id>[0-9]+)')
for ln in input:
match = unknown_pragma_warning_pattern.search(ln)
if match:
resource_id = int(match.group('resource_id'))
used_resources.add(resource_id)
return sorted(used_resources)
def Main():
if len(sys.argv) != 1:
sys.stderr.write(usage)
sys.exit(1)
else:
used_resources = GetResourceIdsInPragmaWarnings(sys.stdin)
for resource_id in used_resources:
sys.stdout.write('%d\n' % resource_id)
if __name__ == '__main__':
Main()
| gpl-3.0 |
bart-h/linux-kernel-wm8505 | Documentation/networking/cxacru-cf.py | 14668 | 1626 | #!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
| gpl-2.0 |
bsmelo/soletta | data/scripts/sol-flow-node-type-validate.py | 9 | 10615 | #!/usr/bin/env python3
# This file is part of the Soletta (TM) Project
#
# Copyright (C) 2015 Intel Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import jsonschema
import json
import re
import sys
# NOTE: this function is replicated in other files, update all copies!
def json_load_and_check(data_file, schema_file, context_lines=3, schema_max_depth=2, check_schema=True):
"""Full check of JSON, with meaningful error messages.
This function will open data_file and schema_file, doing full
validation and returning the object on success, or raising an
exception on failure.
On failures, a meaningful message is printed taking previous
context_lines in log messages, making people able to figure out
what was wrong.
Messages are printed in the standard format:
file:line:column: message
making it possible for other tools (ie: editors) to locate it for
users.
"""
# json is bad and does not provide location as exception fields,
# but at least it uses 2 fixed formats in its error messages
# see json/decoder.py errmsg()
re_single_loc = re.compile(r"^(?P<msg>.*): line (?P<line>\d+) column (?P<column>\d+) [(]char .*[)]$")
re_range_loc = re.compile(r"^(?P<msg>.*): line (?P<line_start>\d+) column (?P<column_start>\d+) - line (?P<line_end>\d+) column (?P<column_end>\d+) [(]char .*[)]$")
def show_file_context(lines, filename, lineno, colno, linefmt_size=0):
if linefmt_size < 1:
linefmt_size = len("%d" % (lineno,))
for i in range(max(lineno - context_lines, 0), lineno):
sys.stderr.write("%s:%0*d: %s\n" %
(filename, linefmt_size, i + 1, lines[i]))
sys.stderr.write("%s:%0*d: %s^\n" %
(filename, linefmt_size, lineno, '-' * (colno - 1)))
def show_json_load_exception(exc, contents, filename):
excstr = str(exc)
re_match = re_range_loc.match(excstr)
lines = contents.split('\n')
if re_match:
lineno_start = int(re_match.group("line_start"))
colno_start = int(re_match.group("column_start"))
lineno_end = int(re_match.group("line_end"))
colno_end = int(re_match.group("column_end"))
colfmt_size = len("%d" % (max(colno_start, colno_end),))
linefmt_size = len("%d" % (lineno_end,))
msg = re_match.group("msg")
show_file_context(lines, filename, lineno_start, colno_start,
linefmt_size=linefmt_size)
sys.stderr.write("%s:%0*d:%0*d: error: start of %s\n" % (
filename,
linefmt_size, lineno_start,
colfmt_size, colno_start, msg))
show_file_context(lines, filename, lineno_end, colno_end,
linefmt_size=linefmt_size)
sys.stderr.write("%s:%0*d:%0*d: error: end of %s\n" % (
filename,
linefmt_size, lineno_end,
colfmt_size, colno_end, msg))
return
re_match = re_single_loc.match(excstr)
if re_match:
lineno = int(re_match.group("line"))
colno = int(re_match.group("column"))
location = "%s:%d:%d" % (filename, lineno, colno)
msg = re_match.group("msg")
char = lines[lineno - 1][colno - 1]
show_file_context(lines, filename, lineno, colno)
sys.stderr.write("%s: error: %s\n" % (location, msg))
if (msg == "Expecting property name enclosed in double quotes" and char == '}') \
or (msg == "Expecting value" and char == ']'):
sys.stderr.write("%s: error: maybe trailing ',' is dangling prior to closing braces?\n" % (location))
return
else:
sys.stderr.write("%s: error: %s\n" % (filename, excstr))
return
def load_json(file):
contents = file.read()
try:
return json.loads(contents)
except ValueError as e:
show_json_load_exception(e, contents, file.name)
raise
def show_schema_exception(exc, filename):
if not exc.context:
sys.stderr.write("%s: %s\n" % (filename, exc.message))
return
def path_to_str(path, varname="json"):
s = "%s" % (varname,)
for p in path:
s += '[%r]' % p
return s
def show_obj(msg, obj, abspath):
abspathstr = path_to_str(abspath)
if isinstance(obj, dict):
sys.stderr.write("%s: %s at %s = {\n" %
(filename, msg, abspathstr))
for k in sorted(obj.keys()):
klen = len(k)
val = json.dumps(obj[k], sort_keys=True)
if len(val) + klen > 50:
maxlen = max(50 - klen, 10)
val = "%s...%s" % (val[:maxlen], val[-1])
sys.stderr.write("%s: %r: %s\n" % (filename, k, val))
sys.stderr.write("%s: }\n" % (filename,))
elif isinstance(obj, list):
sys.stderr.write("%s: %s at %s = [\n" %
(filename, msg, abspathstr))
fmtlen = len("%d" % len(obj))
for i, val in enumerate(obj):
val = json.dumps(val, sort_keys=True)
if len(val) > 50:
val = "%s...%s" % (val[:50], val[-1])
sys.stderr.write("%s: %0*d: %s\n" %
(filename, fmtlen, i, val))
sys.stderr.write("%s: ]\n" % (filename,))
else:
parent_path = list(abspath)[:-1]
parent_obj = exc.instance
for p in parent_path:
parent_obj = parent_obj[p]
show_obj("parent of " + msg, parent_obj, parent_path)
def show_schema(schemaobj, abspath):
abspathstr = path_to_str(abspath)
sys.stderr.write("%s: schema at %s:\n" % (filename, abspathstr))
def show_list(lst, indent=1):
if schema_max_depth > 0 and indent > schema_max_depth:
return
indentstr = " " * indent
for i, v in enumerate(lst):
if isinstance(v, dict):
show_dict(v, indent + 1)
elif isinstance(v, list):
show_list(v, indent + 1)
else:
sys.stderr.write("%s: %s%r\n" % (filename, indentstr, v))
def show_dict(obj, indent=1):
if schema_max_depth > 0 and indent > schema_max_depth:
return
indentstr = " " * indent
for k in sorted(obj.keys()):
sys.stderr.write("%s: %s%s: " % (filename, indentstr, k))
v = obj[k]
if isinstance(v, str) and k == "$ref":
with validator.resolver.resolving(v) as resolved:
sys.stderr.write("%s (expanded below)\n" % (v,))
show_dict(resolved, indent + 1)
elif isinstance(v, dict):
sys.stderr.write("\n")
show_dict(v, indent + 1)
elif isinstance(v, list):
sys.stderr.write("\n")
show_list(v, indent + 1)
else:
sys.stderr.write("%r\n" % (v,))
for k in sorted(schemaobj.keys()):
v = schemaobj[k]
sys.stderr.write("%s: %s: " % (filename, k))
if isinstance(v, list):
sys.stderr.write("\n")
show_list(v)
elif isinstance(v, dict):
sys.stderr.write("\n")
show_dict(v)
else:
sys.stderr.write("%s\n" % (v,))
ctx = exc.context[-1]
abspathstr = path_to_str(ctx.absolute_path)
obj = ctx.instance
show_obj("faulty object", obj, ctx.absolute_path)
if schema_max_depth != 0:
show_schema(ctx.schema, ctx.absolute_schema_path)
sys.stderr.write("%s: error: %s: %s\n" % (filename, abspathstr, ctx.message))
data = load_json(data_file)
schema = load_json(schema_file)
validator_cls = jsonschema.validators.validator_for(schema)
try:
if check_schema:
validator_cls.check_schema(schema)
except jsonschema.SchemaError as e:
show_schema_exception(e, schema_file.name)
raise
validator = validator_cls(schema)
e = None
for e in sorted(validator.descend(data, schema), key=lambda e: e.schema_path):
show_schema_exception(e, data_file.name)
if e:
raise e
return data
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--context-lines",
help="How many lines of context to show on JSON parsing errors",
default=3,
type=int)
parser.add_argument("--schema-max-depth",
help="Depth to print on JSON Schema validation errors. 0 disables, -1 shows all.",
default=2,
type=int)
parser.add_argument("schema",
help="JSON Schema to use for validation",
type=argparse.FileType('r'))
parser.add_argument("input",
help="Input description file in JSON format",
type=argparse.FileType('r'))
args = parser.parse_args()
try:
json_load_and_check(args.input, args.schema,
args.context_lines, args.schema_max_depth)
except (ValueError, jsonschema.ValidationError, jsonschema.SchemaError):
exit(1)
| apache-2.0 |
chhao91/QGIS | python/ext-libs/pygments/lexers/_lassobuiltins.py | 77 | 137633 | # -*- coding: utf-8 -*-
"""
pygments.lexers._lassobuiltins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Built-in Lasso types, traits, and methods.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
BUILTINS = {
'Types': [
'null',
'void',
'tag',
'trait',
'integer',
'decimal',
'boolean',
'capture',
'string',
'bytes',
'keyword',
'custom',
'staticarray',
'signature',
'memberstream',
'dsinfo',
'sourcefile',
'array',
'pair',
'opaque',
'filedesc',
'dirdesc',
'locale',
'ucal',
'xml_domimplementation',
'xml_node',
'xml_characterdata',
'xml_document',
'xml_element',
'xml_attr',
'xml_text',
'xml_cdatasection',
'xml_entityreference',
'xml_entity',
'xml_processinginstruction',
'xml_comment',
'xml_documenttype',
'xml_documentfragment',
'xml_notation',
'xml_nodelist',
'xml_namednodemap',
'xml_namednodemap_ht',
'xml_namednodemap_attr',
'xmlstream',
'sqlite3',
'sqlite3_stmt',
'mime_reader',
'curltoken',
'regexp',
'zip_impl',
'zip_file_impl',
'library_thread_loader_thread$',
'library_thread_loader',
'generateforeachunkeyed',
'generateforeachkeyed',
'eacher',
'queriable_where',
'queriable_select',
'queriable_selectmany',
'queriable_groupby',
'queriable_join',
'queriable_groupjoin',
'queriable_orderby',
'queriable_orderbydescending',
'queriable_thenby',
'queriable_thenbydescending',
'queriable_skip',
'queriable_take',
'queriable_grouping',
'generateseries',
'tie',
'pairup',
'delve',
'repeat',
'pair_compare',
'serialization_object_identity_compare',
'serialization_element',
'serialization_writer_standin',
'serialization_writer_ref',
'serialization_writer',
'serialization_reader',
'tree_nullnode',
'tree_node',
'tree_base',
'map_node',
'map',
'file',
'dir',
'magick_image',
'ldap',
'os_process',
'java_jnienv',
'jobject',
'jmethodid',
'jfieldid',
'database_registry',
'sqlite_db',
'sqlite_results',
'sqlite_currentrow',
'sqlite_table',
'sqlite_column',
'curl',
'date',
'debugging_stack',
'dbgp_server',
'dbgp_packet',
'duration',
'inline_type',
'json_literal',
'json_object',
'list_node',
'list',
'jchar',
'jchararray',
'jbyte',
'jbytearray',
'jfloat',
'jint',
'jshort',
'currency',
'scientific',
'percent',
'dateandtime',
'timeonly',
'net_tcp',
'net_tcpssl',
'net_named_pipe',
'net_udppacket',
'net_udp',
'pdf_typebase',
'pdf_doc',
'pdf_color',
'pdf_barcode',
'pdf_font',
'pdf_image',
'pdf_list',
'pdf_read',
'pdf_table',
'pdf_text',
'pdf_hyphenator',
'pdf_chunk',
'pdf_phrase',
'pdf_paragraph',
'queue',
'set',
'sys_process',
'worker_pool',
'zip_file',
'zip',
'cache_server_element',
'cache_server',
'dns_response',
'component_render_state',
'component',
'component_container',
'document_base',
'document_body',
'document_header',
'text_document',
'data_document',
'email_compose',
'email_pop',
'email_parse',
'email_queue_impl_base',
'email_stage_impl_base',
'fcgi_record',
'web_request_impl',
'fcgi_request',
'include_cache_thread$',
'include_cache',
'atbegin_thread$',
'atbegin',
'fastcgi_each_fcgi_param',
'fastcgi_server',
'filemaker_datasource',
'http_document',
'http_document_header',
'http_header_field',
'html_document_head',
'html_document_body',
'raw_document_body',
'bytes_document_body',
'html_attr',
'html_atomic_element',
'html_container_element',
'http_error',
'html_script',
'html_text',
'html_raw',
'html_binary',
'html_json',
'html_cdata',
'html_eol',
'html_div',
'html_span',
'html_br',
'html_hr',
'html_h1',
'html_h2',
'html_h3',
'html_h4',
'html_h5',
'html_h6',
'html_meta',
'html_link',
'html_object',
'html_style',
'html_base',
'html_table',
'html_tr',
'html_td',
'html_th',
'html_img',
'html_form',
'html_fieldset',
'html_legend',
'html_input',
'html_label',
'html_option',
'html_select',
'http_server_web_connection',
'http_server',
'http_server_connection_handler',
'image',
'lassoapp_installer',
'lassoapp_content_rep_halt',
'lassoapp_dirsrc_fileresource',
'lassoapp_dirsrc_appsource',
'lassoapp_livesrc_fileresource',
'lassoapp_livesrc_appsource',
'lassoapp_long_expiring_bytes',
'lassoapp_zip_file_server_thread$',
'lassoapp_zip_file_server',
'lassoapp_zipsrc_fileresource',
'lassoapp_zipsrc_appsource',
'lassoapp_compiledsrc_fileresource',
'lassoapp_compiledsrc_appsource',
'lassoapp_manualsrc_appsource',
'log_impl_base',
'portal_impl',
'security_registry',
'memory_session_driver_impl_entry',
'memory_session_driver_impl',
'sqlite_session_driver_impl_entry',
'sqlite_session_driver_impl',
'mysql_session_driver_impl',
'odbc_session_driver_impl',
'session_delete_expired_thread_thread$',
'session_delete_expired_thread',
'email_smtp',
'client_address',
'client_ip',
'web_node_base',
'web_node_root',
'web_node_content_representation_xhr_container',
'web_node_content_representation_html_specialized',
'web_node_content_representation_css_specialized',
'web_node_content_representation_js_specialized',
'web_node_echo',
'web_error_atend',
'web_response_impl',
'web_router'
],
'Traits': [
'trait_asstring',
'any',
'trait_generator',
'trait_decompose_assignment',
'trait_foreach',
'trait_generatorcentric',
'trait_foreachtextelement',
'trait_finite',
'trait_finiteforeach',
'trait_keyed',
'trait_keyedfinite',
'trait_keyedforeach',
'trait_frontended',
'trait_backended',
'trait_doubleended',
'trait_positionallykeyed',
'trait_expandable',
'trait_frontexpandable',
'trait_backexpandable',
'trait_contractible',
'trait_frontcontractible',
'trait_backcontractible',
'trait_fullymutable',
'trait_keyedmutable',
'trait_endedfullymutable',
'trait_setoperations',
'trait_searchable',
'trait_positionallysearchable',
'trait_pathcomponents',
'trait_readbytes',
'trait_writebytes',
'trait_setencoding',
'trait_readstring',
'trait_writestring',
'trait_hashable',
'trait_each_sub',
'trait_stack',
'trait_list',
'trait_array',
'trait_map',
'trait_close',
'trait_file',
'trait_scalar',
'trait_queriablelambda',
'trait_queriable',
'queriable_asstring',
'trait_serializable',
'trait_treenode',
'trait_json_serialize',
'formattingbase',
'trait_net',
'trait_xml_elementcompat',
'trait_xml_nodecompat',
'web_connection',
'html_element_coreattrs',
'html_element_i18nattrs',
'html_element_eventsattrs',
'html_attributed',
'lassoapp_resource',
'lassoapp_source',
'lassoapp_capabilities',
'session_driver',
'web_node_content_json_specialized',
'web_node',
'web_node_container',
'web_node_content_representation',
'web_node_content',
'web_node_content_document',
'web_node_postable',
'web_node_content_html_specialized',
'web_node_content_css_specialized',
'web_node_content_js_specialized'
],
'Methods': [
'fail_now',
'staticarray',
'integer',
'decimal',
'string',
'bytes',
'keyword',
'signature',
'register',
'register_thread',
'escape_tag',
'handle',
'handle_failure',
'protect_now',
'threadvar_get',
'threadvar_set',
'threadvar_set_asrt',
'threadvar_find',
'abort_now',
'abort_clear',
'failure_clear',
'var_keys',
'var_values',
'null',
'trait',
'staticarray_join',
'suspend',
'main_thread_only',
'split_thread',
'capture_nearestloopcount',
'capture_nearestloopcontinue',
'capture_nearestloopabort',
'pair',
'io_file_o_rdonly',
'io_file_o_wronly',
'io_file_o_rdwr',
'io_file_o_nonblock',
'io_file_o_sync',
'io_file_o_shlock',
'io_file_o_exlock',
'io_file_o_async',
'io_file_o_fsync',
'io_file_o_nofollow',
'io_file_s_irwxu',
'io_file_s_irusr',
'io_file_s_iwusr',
'io_file_s_ixusr',
'io_file_s_irwxg',
'io_file_s_irgrp',
'io_file_s_iwgrp',
'io_file_s_ixgrp',
'io_file_s_irwxo',
'io_file_s_iroth',
'io_file_s_iwoth',
'io_file_s_ixoth',
'io_file_s_isuid',
'io_file_s_isgid',
'io_file_s_isvtx',
'io_file_s_ifmt',
'io_file_s_ifchr',
'io_file_s_ifdir',
'io_file_s_ifreg',
'io_file_o_append',
'io_file_o_creat',
'io_file_o_trunc',
'io_file_o_excl',
'io_file_seek_set',
'io_file_seek_cur',
'io_file_seek_end',
'io_file_s_ififo',
'io_file_s_ifblk',
'io_file_s_iflnk',
'io_file_s_ifsock',
'io_net_shut_rd',
'io_net_shut_wr',
'io_net_shut_rdwr',
'io_net_sock_stream',
'io_net_sock_dgram',
'io_net_sock_raw',
'io_net_sock_rdm',
'io_net_sock_seqpacket',
'io_net_so_debug',
'io_net_so_acceptconn',
'io_net_so_reuseaddr',
'io_net_so_keepalive',
'io_net_so_dontroute',
'io_net_so_broadcast',
'io_net_so_useloopback',
'io_net_so_linger',
'io_net_so_oobinline',
'io_net_so_timestamp',
'io_net_so_sndbuf',
'io_net_so_rcvbuf',
'io_net_so_sndlowat',
'io_net_so_rcvlowat',
'io_net_so_sndtimeo',
'io_net_so_rcvtimeo',
'io_net_so_error',
'io_net_so_type',
'io_net_sol_socket',
'io_net_af_unix',
'io_net_af_inet',
'io_net_af_inet6',
'io_net_ipproto_ip',
'io_net_ipproto_udp',
'io_net_msg_peek',
'io_net_msg_oob',
'io_net_msg_waitall',
'io_file_fioclex',
'io_file_fionclex',
'io_file_fionread',
'io_file_fionbio',
'io_file_fioasync',
'io_file_fiosetown',
'io_file_fiogetown',
'io_file_fiodtype',
'io_file_f_dupfd',
'io_file_f_getfd',
'io_file_f_setfd',
'io_file_f_getfl',
'io_file_f_setfl',
'io_file_f_getlk',
'io_file_f_setlk',
'io_file_f_setlkw',
'io_file_fd_cloexec',
'io_file_f_rdlck',
'io_file_f_unlck',
'io_file_f_wrlck',
'io_dir_dt_unknown',
'io_dir_dt_fifo',
'io_dir_dt_chr',
'io_dir_dt_blk',
'io_dir_dt_reg',
'io_dir_dt_sock',
'io_dir_dt_wht',
'io_dir_dt_lnk',
'io_dir_dt_dir',
'io_file_access',
'io_file_chdir',
'io_file_getcwd',
'io_file_chown',
'io_file_lchown',
'io_file_truncate',
'io_file_link',
'io_file_pipe',
'io_file_rmdir',
'io_file_symlink',
'io_file_unlink',
'io_file_remove',
'io_file_rename',
'io_file_tempnam',
'io_file_mkstemp',
'io_file_dirname',
'io_file_realpath',
'io_file_chmod',
'io_file_mkdir',
'io_file_mkfifo',
'io_file_umask',
'io_net_socket',
'io_net_bind',
'io_net_connect',
'io_net_listen',
'io_net_recv',
'io_net_recvfrom',
'io_net_accept',
'io_net_send',
'io_net_sendto',
'io_net_shutdown',
'io_net_getpeername',
'io_net_getsockname',
'io_net_ssl_begin',
'io_net_ssl_end',
'io_net_ssl_shutdown',
'io_net_ssl_setverifylocations',
'io_net_ssl_usecertificatechainfile',
'io_net_ssl_useprivatekeyfile',
'io_net_ssl_connect',
'io_net_ssl_accept',
'io_net_ssl_error',
'io_net_ssl_errorstring',
'io_net_ssl_liberrorstring',
'io_net_ssl_funcerrorstring',
'io_net_ssl_reasonerrorstring',
'io_net_ssl_setconnectstate',
'io_net_ssl_setacceptstate',
'io_net_ssl_read',
'io_net_ssl_write',
'io_file_stat_size',
'io_file_stat_mode',
'io_file_stat_mtime',
'io_file_stat_atime',
'io_file_lstat_size',
'io_file_lstat_mode',
'io_file_lstat_mtime',
'io_file_lstat_atime',
'io_file_readlink',
'io_file_lockf',
'io_file_f_ulock',
'io_file_f_tlock',
'io_file_f_test',
'dirdesc',
'io_file_stdin',
'io_file_stdout',
'io_file_stderr',
'filedesc',
'uchar_alphabetic',
'uchar_ascii_hex_digit',
'uchar_bidi_control',
'uchar_bidi_mirrored',
'uchar_dash',
'uchar_default_ignorable_code_point',
'uchar_deprecated',
'uchar_diacritic',
'uchar_extender',
'uchar_full_composition_exclusion',
'uchar_grapheme_base',
'uchar_grapheme_extend',
'uchar_grapheme_link',
'uchar_hex_digit',
'uchar_hyphen',
'uchar_id_continue',
'uchar_ideographic',
'uchar_ids_binary_operator',
'uchar_ids_trinary_operator',
'uchar_join_control',
'uchar_logical_order_exception',
'uchar_lowercase',
'uchar_math',
'uchar_noncharacter_code_point',
'uchar_quotation_mark',
'uchar_radical',
'uchar_soft_dotted',
'uchar_terminal_punctuation',
'uchar_unified_ideograph',
'uchar_uppercase',
'uchar_white_space',
'uchar_xid_continue',
'uchar_case_sensitive',
'uchar_s_term',
'uchar_variation_selector',
'uchar_nfd_inert',
'uchar_nfkd_inert',
'uchar_nfc_inert',
'uchar_nfkc_inert',
'uchar_segment_starter',
'uchar_pattern_syntax',
'uchar_pattern_white_space',
'uchar_posix_alnum',
'uchar_posix_blank',
'uchar_posix_graph',
'uchar_posix_print',
'uchar_posix_xdigit',
'uchar_bidi_class',
'uchar_block',
'uchar_canonical_combining_class',
'uchar_decomposition_type',
'uchar_east_asian_width',
'uchar_general_category',
'uchar_joining_group',
'uchar_joining_type',
'uchar_line_break',
'uchar_numeric_type',
'uchar_script',
'uchar_hangul_syllable_type',
'uchar_nfd_quick_check',
'uchar_nfkd_quick_check',
'uchar_nfc_quick_check',
'uchar_nfkc_quick_check',
'uchar_lead_canonical_combining_class',
'uchar_trail_canonical_combining_class',
'uchar_grapheme_cluster_break',
'uchar_sentence_break',
'uchar_word_break',
'uchar_general_category_mask',
'uchar_numeric_value',
'uchar_age',
'uchar_bidi_mirroring_glyph',
'uchar_case_folding',
'uchar_iso_comment',
'uchar_lowercase_mapping',
'uchar_name',
'uchar_simple_case_folding',
'uchar_simple_lowercase_mapping',
'uchar_simple_titlecase_mapping',
'uchar_simple_uppercase_mapping',
'uchar_titlecase_mapping',
'uchar_unicode_1_name',
'uchar_uppercase_mapping',
'u_wb_other',
'u_wb_aletter',
'u_wb_format',
'u_wb_katakana',
'u_wb_midletter',
'u_wb_midnum',
'u_wb_numeric',
'u_wb_extendnumlet',
'u_sb_other',
'u_sb_aterm',
'u_sb_close',
'u_sb_format',
'u_sb_lower',
'u_sb_numeric',
'u_sb_oletter',
'u_sb_sep',
'u_sb_sp',
'u_sb_sterm',
'u_sb_upper',
'u_lb_unknown',
'u_lb_ambiguous',
'u_lb_alphabetic',
'u_lb_break_both',
'u_lb_break_after',
'u_lb_break_before',
'u_lb_mandatory_break',
'u_lb_contingent_break',
'u_lb_close_punctuation',
'u_lb_combining_mark',
'u_lb_carriage_return',
'u_lb_exclamation',
'u_lb_glue',
'u_lb_hyphen',
'u_lb_ideographic',
'u_lb_inseparable',
'u_lb_infix_numeric',
'u_lb_line_feed',
'u_lb_nonstarter',
'u_lb_numeric',
'u_lb_open_punctuation',
'u_lb_postfix_numeric',
'u_lb_prefix_numeric',
'u_lb_quotation',
'u_lb_complex_context',
'u_lb_surrogate',
'u_lb_space',
'u_lb_break_symbols',
'u_lb_zwspace',
'u_lb_next_line',
'u_lb_word_joiner',
'u_lb_h2',
'u_lb_h3',
'u_lb_jl',
'u_lb_jt',
'u_lb_jv',
'u_nt_none',
'u_nt_decimal',
'u_nt_digit',
'u_nt_numeric',
'locale',
'locale_english',
'locale_french',
'locale_german',
'locale_italian',
'locale_japanese',
'locale_korean',
'locale_chinese',
'locale_simplifiedchinese',
'locale_traditionalchinese',
'locale_france',
'locale_germany',
'locale_italy',
'locale_japan',
'locale_korea',
'locale_china',
'locale_prc',
'locale_taiwan',
'locale_uk',
'locale_us',
'locale_canada',
'locale_canadafrench',
'locale_default',
'locale_setdefault',
'locale_isocountries',
'locale_isolanguages',
'locale_availablelocales',
'ucal_listtimezones',
'ucal',
'ucal_era',
'ucal_year',
'ucal_month',
'ucal_weekofyear',
'ucal_weekofmonth',
'ucal_dayofmonth',
'ucal_dayofyear',
'ucal_dayofweek',
'ucal_dayofweekinmonth',
'ucal_ampm',
'ucal_hour',
'ucal_hourofday',
'ucal_minute',
'ucal_second',
'ucal_millisecond',
'ucal_zoneoffset',
'ucal_dstoffset',
'ucal_yearwoy',
'ucal_dowlocal',
'ucal_extendedyear',
'ucal_julianday',
'ucal_millisecondsinday',
'ucal_lenient',
'ucal_firstdayofweek',
'ucal_daysinfirstweek',
'xml_domimplementation',
'sys_sigalrm',
'sys_sighup',
'sys_sigkill',
'sys_sigpipe',
'sys_sigquit',
'sys_sigusr1',
'sys_sigusr2',
'sys_sigchld',
'sys_sigcont',
'sys_sigstop',
'sys_sigtstp',
'sys_sigttin',
'sys_sigttou',
'sys_sigbus',
'sys_sigprof',
'sys_sigsys',
'sys_sigtrap',
'sys_sigurg',
'sys_sigvtalrm',
'sys_sigxcpu',
'sys_sigxfsz',
'sys_wcontinued',
'sys_wnohang',
'sys_wuntraced',
'sys_sigabrt',
'sys_sigfpe',
'sys_sigill',
'sys_sigint',
'sys_sigsegv',
'sys_sigterm',
'sys_exit',
'sys_fork',
'sys_kill',
'sys_waitpid',
'sys_getegid',
'sys_geteuid',
'sys_getgid',
'sys_getlogin',
'sys_getpid',
'sys_getppid',
'sys_getuid',
'sys_setuid',
'sys_setgid',
'sys_setsid',
'sys_errno',
'sys_strerror',
'sys_time',
'sys_difftime',
'sys_getpwuid',
'sys_getpwnam',
'sys_getgrnam',
'sys_drand48',
'sys_erand48',
'sys_jrand48',
'sys_lcong48',
'sys_lrand48',
'sys_mrand48',
'sys_nrand48',
'sys_srand48',
'sys_random',
'sys_srandom',
'sys_seed48',
'sys_rand',
'sys_srand',
'sys_environ',
'sys_getenv',
'sys_setenv',
'sys_unsetenv',
'sys_uname',
'uuid_compare',
'uuid_copy',
'uuid_generate',
'uuid_generate_random',
'uuid_generate_time',
'uuid_is_null',
'uuid_parse',
'uuid_unparse',
'uuid_unparse_lower',
'uuid_unparse_upper',
'sys_credits',
'sleep',
'sys_dll_ext',
'sys_listtypes',
'sys_listtraits',
'sys_listunboundmethods',
'sys_getthreadcount',
'sys_growheapby',
'sys_getheapsize',
'sys_getheapfreebytes',
'sys_getbytessincegc',
'sys_garbagecollect',
'sys_clock',
'sys_getstartclock',
'sys_clockspersec',
'sys_pointersize',
'sys_loadlibrary',
'sys_getchar',
'sys_chroot',
'sys_exec',
'sys_kill_exec',
'sys_wait_exec',
'sys_test_exec',
'sys_detach_exec',
'sys_pid_exec',
'wifexited',
'wexitstatus',
'wifsignaled',
'wtermsig',
'wifstopped',
'wstopsig',
'wifcontinued',
'sys_eol',
'sys_iswindows',
'sys_is_windows',
'sys_isfullpath',
'sys_is_full_path',
'lcapi_loadmodule',
'lcapi_listdatasources',
'dsinfo',
'encrypt_blowfish',
'decrypt_blowfish',
'cipher_digest',
'cipher_encrypt',
'cipher_decrypt',
'cipher_list',
'cipher_keylength',
'cipher_hmac',
'cipher_seal',
'cipher_open',
'cipher_sign',
'cipher_verify',
'cipher_decrypt_private',
'cipher_decrypt_public',
'cipher_encrypt_private',
'cipher_encrypt_public',
'cipher_generate_key',
'xmlstream',
'sourcefile',
'tag',
'tag_exists',
'mime_reader',
'curl_easy_init',
'curl_easy_duphandle',
'curl_easy_cleanup',
'curl_easy_getinfo',
'curl_multi_perform',
'curl_multi_result',
'curl_easy_reset',
'curl_easy_setopt',
'curl_easy_strerror',
'curl_getdate',
'curl_version',
'curl_version_info',
'curlinfo_effective_url',
'curlinfo_content_type',
'curlinfo_response_code',
'curlinfo_header_size',
'curlinfo_request_size',
'curlinfo_ssl_verifyresult',
'curlinfo_filetime',
'curlinfo_redirect_count',
'curlinfo_http_connectcode',
'curlinfo_httpauth_avail',
'curlinfo_proxyauth_avail',
'curlinfo_os_errno',
'curlinfo_num_connects',
'curlinfo_total_time',
'curlinfo_namelookup_time',
'curlinfo_connect_time',
'curlinfo_pretransfer_time',
'curlinfo_size_upload',
'curlinfo_size_download',
'curlinfo_speed_download',
'curlinfo_speed_upload',
'curlinfo_content_length_download',
'curlinfo_content_length_upload',
'curlinfo_starttransfer_time',
'curlinfo_redirect_time',
'curlinfo_ssl_engines',
'curlopt_url',
'curlopt_postfields',
'curlopt_cainfo',
'curlopt_capath',
'curlopt_cookie',
'curlopt_cookiefile',
'curlopt_cookiejar',
'curlopt_customrequest',
'curlopt_egdsocket',
'curlopt_encoding',
'curlopt_ftp_account',
'curlopt_ftpport',
'curlopt_interface',
'curlopt_krb4level',
'curlopt_netrc_file',
'curlopt_proxy',
'curlopt_proxyuserpwd',
'curlopt_random_file',
'curlopt_range',
'curlopt_readdata',
'curlopt_referer',
'curlopt_ssl_cipher_list',
'curlopt_sslcert',
'curlopt_sslcerttype',
'curlopt_sslengine',
'curlopt_sslkey',
'curlopt_sslkeypasswd',
'curlopt_sslkeytype',
'curlopt_useragent',
'curlopt_userpwd',
'curlopt_postfieldsize',
'curlopt_autoreferer',
'curlopt_buffersize',
'curlopt_connecttimeout',
'curlopt_cookiesession',
'curlopt_crlf',
'curlopt_dns_use_global_cache',
'curlopt_failonerror',
'curlopt_filetime',
'curlopt_followlocation',
'curlopt_forbid_reuse',
'curlopt_fresh_connect',
'curlopt_ftp_create_missing_dirs',
'curlopt_ftp_response_timeout',
'curlopt_ftp_ssl',
'curlopt_use_ssl',
'curlopt_ftp_use_eprt',
'curlopt_ftp_use_epsv',
'curlopt_ftpappend',
'curlopt_ftplistonly',
'curlopt_ftpsslauth',
'curlopt_header',
'curlopt_http_version',
'curlopt_httpauth',
'curlopt_httpget',
'curlopt_httpproxytunnel',
'curlopt_infilesize',
'curlopt_ipresolve',
'curlopt_low_speed_limit',
'curlopt_low_speed_time',
'curlopt_maxconnects',
'curlopt_maxfilesize',
'curlopt_maxredirs',
'curlopt_netrc',
'curlopt_nobody',
'curlopt_noprogress',
'curlopt_port',
'curlopt_post',
'curlopt_proxyauth',
'curlopt_proxyport',
'curlopt_proxytype',
'curlopt_put',
'curlopt_resume_from',
'curlopt_ssl_verifyhost',
'curlopt_ssl_verifypeer',
'curlopt_sslengine_default',
'curlopt_sslversion',
'curlopt_tcp_nodelay',
'curlopt_timecondition',
'curlopt_timeout',
'curlopt_timevalue',
'curlopt_transfertext',
'curlopt_unrestricted_auth',
'curlopt_upload',
'curlopt_verbose',
'curlopt_infilesize_large',
'curlopt_maxfilesize_large',
'curlopt_postfieldsize_large',
'curlopt_resume_from_large',
'curlopt_http200aliases',
'curlopt_httpheader',
'curlopt_postquote',
'curlopt_prequote',
'curlopt_quote',
'curlopt_httppost',
'curlopt_writedata',
'curl_version_ipv6',
'curl_version_kerberos4',
'curl_version_ssl',
'curl_version_libz',
'curl_version_ntlm',
'curl_version_gssnegotiate',
'curl_version_debug',
'curl_version_asynchdns',
'curl_version_spnego',
'curl_version_largefile',
'curl_version_idn',
'curl_netrc_ignored',
'curl_netrc_optional',
'curl_netrc_required',
'curl_http_version_none',
'curl_http_version_1_0',
'curl_http_version_1_1',
'curl_ipresolve_whatever',
'curl_ipresolve_v4',
'curl_ipresolve_v6',
'curlftpssl_none',
'curlftpssl_try',
'curlftpssl_control',
'curlftpssl_all',
'curlftpssl_last',
'curlftpauth_default',
'curlftpauth_ssl',
'curlftpauth_tls',
'curlauth_none',
'curlauth_basic',
'curlauth_digest',
'curlauth_gssnegotiate',
'curlauth_ntlm',
'curlauth_any',
'curlauth_anysafe',
'curlproxy_http',
'curlproxy_socks4',
'curlproxy_socks5',
'curle_ok',
'curle_unsupported_protocol',
'curle_failed_init',
'curle_url_malformat',
'curle_url_malformat_user',
'curle_couldnt_resolve_proxy',
'curle_couldnt_resolve_host',
'curle_couldnt_connect',
'curle_ftp_weird_server_reply',
'curle_ftp_access_denied',
'curle_ftp_user_password_incorrect',
'curle_ftp_weird_pass_reply',
'curle_ftp_weird_user_reply',
'curle_ftp_weird_pasv_reply',
'curle_ftp_weird_227_format',
'curle_ftp_cant_get_host',
'curle_ftp_cant_reconnect',
'curle_ftp_couldnt_set_binary',
'curle_partial_file',
'curle_ftp_couldnt_retr_file',
'curle_ftp_write_error',
'curle_ftp_quote_error',
'curle_http_returned_error',
'curle_write_error',
'curle_malformat_user',
'curle_read_error',
'curle_out_of_memory',
'curle_operation_timeouted',
'curle_ftp_couldnt_set_ascii',
'curle_ftp_port_failed',
'curle_ftp_couldnt_use_rest',
'curle_ftp_couldnt_get_size',
'curle_http_range_error',
'curle_http_post_error',
'curle_ssl_connect_error',
'curle_bad_download_resume',
'curle_file_couldnt_read_file',
'curle_ldap_cannot_bind',
'curle_ldap_search_failed',
'curle_library_not_found',
'curle_function_not_found',
'curle_aborted_by_callback',
'curle_bad_function_argument',
'curle_bad_calling_order',
'curle_interface_failed',
'curle_bad_password_entered',
'curle_too_many_redirects',
'curle_unknown_telnet_option',
'curle_telnet_option_syntax',
'curle_obsolete',
'curle_ssl_peer_certificate',
'curle_got_nothing',
'curle_ssl_engine_notfound',
'curle_ssl_engine_setfailed',
'curle_send_error',
'curle_recv_error',
'curle_share_in_use',
'curle_ssl_certproblem',
'curle_ssl_cipher',
'curle_ssl_cacert',
'curle_bad_content_encoding',
'curle_ldap_invalid_url',
'curle_filesize_exceeded',
'curle_ftp_ssl_failed',
'curle_send_fail_rewind',
'curle_ssl_engine_initfailed',
'curle_login_denied',
'curlmsg_done',
'regexp',
'array',
'boolean',
'zip_open',
'zip_name_locate',
'zip_fopen',
'zip_fopen_index',
'zip_fread',
'zip_fclose',
'zip_close',
'zip_stat',
'zip_stat_index',
'zip_get_archive_comment',
'zip_get_file_comment',
'zip_get_name',
'zip_get_num_files',
'zip_add',
'zip_replace',
'zip_add_dir',
'zip_set_file_comment',
'zip_rename',
'zip_delete',
'zip_unchange',
'zip_unchange_all',
'zip_unchange_archive',
'zip_set_archive_comment',
'zip_error_to_str',
'zip_file_strerror',
'zip_strerror',
'zip_error_get',
'zip_file_error_get',
'zip_error_get_sys_type',
'zlib_version',
'fastcgi_initiate_request',
'debugging_enabled',
'debugging_stop',
'evdns_resolve_ipv4',
'evdns_resolve_ipv6',
'evdns_resolve_reverse',
'evdns_resolve_reverse_ipv6',
'library_thread_loader',
'stdout',
'stdoutnl',
'fail',
'fail_if',
'fail_ifnot',
'error_code',
'error_msg',
'error_obj',
'error_stack',
'error_push',
'error_pop',
'error_reset',
'error_msg_invalidparameter',
'error_code_invalidparameter',
'error_msg_networkerror',
'error_code_networkerror',
'error_msg_runtimeassertion',
'error_code_runtimeassertion',
'error_msg_methodnotfound',
'error_code_methodnotfound',
'error_msg_resnotfound',
'error_code_resnotfound',
'error_msg_filenotfound',
'error_code_filenotfound',
'error_msg_aborted',
'error_code_aborted',
'error_msg_dividebyzero',
'error_code_dividebyzero',
'error_msg_noerror',
'error_code_noerror',
'abort',
'protect',
'trait_asstring',
'any',
'trait_generator',
'trait_decompose_assignment',
'trait_foreach',
'trait_generatorcentric',
'generateforeach',
'generateforeachunkeyed',
'generateforeachkeyed',
'trait_foreachtextelement',
'trait_finite',
'trait_finiteforeach',
'trait_keyed',
'trait_keyedfinite',
'trait_keyedforeach',
'trait_frontended',
'trait_backended',
'trait_doubleended',
'trait_positionallykeyed',
'trait_expandable',
'trait_frontexpandable',
'trait_backexpandable',
'trait_contractible',
'trait_frontcontractible',
'trait_backcontractible',
'trait_fullymutable',
'trait_keyedmutable',
'trait_endedfullymutable',
'trait_setoperations',
'trait_searchable',
'trait_positionallysearchable',
'trait_pathcomponents',
'trait_readbytes',
'trait_writebytes',
'trait_setencoding',
'trait_readstring',
'trait_writestring',
'trait_hashable',
'eacher',
'trait_each_sub',
'trait_stack',
'trait_list',
'trait_array',
'trait_map',
'trait_close',
'trait_file',
'trait_scalar',
'method_name',
'trait_queriablelambda',
'trait_queriable',
'queriable_asstring',
'queriable_where',
'queriable_do',
'queriable_sum',
'queriable_average',
'queriable_min',
'queriable_max',
'queriable_select',
'queriable_selectmany',
'queriable_groupby',
'queriable_join',
'queriable_groupjoin',
'queriable_orderby',
'queriable_orderbydescending',
'queriable_thenby',
'queriable_thenbydescending',
'queriable_skip',
'queriable_take',
'queriable_grouping',
'queriable_internal_combinebindings',
'queriable_defaultcompare',
'queriable_reversecompare',
'queriable_qsort',
'generateseries',
'timer',
'tie',
'pairup',
'delve',
'repeat',
'thread_var_push',
'thread_var_pop',
'thread_var_get',
'loop_value',
'loop_value_push',
'loop_value_pop',
'loop_key',
'loop_key_push',
'loop_key_pop',
'loop_push',
'loop_pop',
'loop_count',
'loop_continue',
'loop_abort',
'loop',
'sys_while',
'sys_iterate',
'pair_compare',
'serialization_object_identity_compare',
'serialization_element',
'trait_serializable',
'serialization_writer_standin',
'serialization_writer_ref',
'serialization_writer',
'serialization_reader',
'string_validcharset',
'eol',
'encoding_utf8',
'encoding_iso88591',
'trait_treenode',
'tree_nullnode',
'tree_node',
'tree_base',
'map_node',
'map',
'integer_random',
'integer_bitor',
'millis',
'micros',
'max',
'min',
'range',
'median',
'decimal_random',
'pi',
'lcapi_datasourceinit',
'lcapi_datasourceterm',
'lcapi_datasourcenames',
'lcapi_datasourcetablenames',
'lcapi_datasourcesearch',
'lcapi_datasourceadd',
'lcapi_datasourceupdate',
'lcapi_datasourcedelete',
'lcapi_datasourceinfo',
'lcapi_datasourceexecsql',
'lcapi_datasourcerandom',
'lcapi_datasourceschemanames',
'lcapi_datasourcecloseconnection',
'lcapi_datasourcetickle',
'lcapi_datasourceduplicate',
'lcapi_datasourcescripts',
'lcapi_datasourceimage',
'lcapi_datasourcefindall',
'lcapi_datasourcematchesname',
'lcapi_datasourcepreparesql',
'lcapi_datasourceunpreparesql',
'lcapi_datasourcenothing',
'lcapi_fourchartointeger',
'lcapi_datasourcetypestring',
'lcapi_datasourcetypeinteger',
'lcapi_datasourcetypeboolean',
'lcapi_datasourcetypeblob',
'lcapi_datasourcetypedecimal',
'lcapi_datasourcetypedate',
'lcapi_datasourceprotectionnone',
'lcapi_datasourceprotectionreadonly',
'lcapi_datasourceopgt',
'lcapi_datasourceopgteq',
'lcapi_datasourceopeq',
'lcapi_datasourceopneq',
'lcapi_datasourceoplt',
'lcapi_datasourceoplteq',
'lcapi_datasourceopbw',
'lcapi_datasourceopew',
'lcapi_datasourceopct',
'lcapi_datasourceopnct',
'lcapi_datasourceopnbw',
'lcapi_datasourceopnew',
'lcapi_datasourceopand',
'lcapi_datasourceopor',
'lcapi_datasourceopnot',
'lcapi_datasourceopno',
'lcapi_datasourceopany',
'lcapi_datasourceopin',
'lcapi_datasourceopnin',
'lcapi_datasourceopft',
'lcapi_datasourceoprx',
'lcapi_datasourceopnrx',
'lcapi_datasourcesortascending',
'lcapi_datasourcesortdescending',
'lcapi_datasourcesortcustom',
'lcapi_loadmodules',
'lasso_version',
'lasso_uniqueid',
'usage',
'file_defaultencoding',
'file_copybuffersize',
'file_modeline',
'file_modechar',
'file_forceroot',
'file_tempfile',
'file',
'file_stdin',
'file_stdout',
'file_stderr',
'lasso_tagexists',
'lasso_methodexists',
'output',
'if_empty',
'if_null',
'if_true',
'if_false',
'process',
'treemap',
'locale_format',
'compress',
'uncompress',
'decompress',
'tag_name',
'series',
'nslookup',
'all',
'bw',
'cn',
'eq',
'ew',
'ft',
'gt',
'gte',
'lt',
'lte',
'neq',
'nrx',
'rx',
'none',
'minimal',
'full',
'output_none',
'lasso_executiontimelimit',
'namespace_global',
'namespace_using',
'namespace_import',
'site_id',
'site_name',
'sys_homepath',
'sys_masterhomepath',
'sys_supportpath',
'sys_librariespath',
'sys_databasespath',
'sys_usercapimodulepath',
'sys_appspath',
'sys_userstartuppath',
'dir',
'magick_image',
'ldap',
'ldap_scope_base',
'ldap_scope_onelevel',
'ldap_scope_subtree',
'mysqlds',
'os_process',
'odbc',
'sqliteconnector',
'sqlite_createdb',
'sqlite_setsleepmillis',
'sqlite_setsleeptries',
'java_jvm_getenv',
'java_jvm_create',
'java_jdbc_load',
'database_database',
'database_table_datasources',
'database_table_datasource_hosts',
'database_table_datasource_databases',
'database_table_database_tables',
'database_table_table_fields',
'database_qs',
'database_initialize',
'database_util_cleanpath',
'database_adddefaultsqlitehost',
'database_registry',
'sqlite_ok',
'sqlite_error',
'sqlite_internal',
'sqlite_perm',
'sqlite_abort',
'sqlite_busy',
'sqlite_locked',
'sqlite_nomem',
'sqlite_readonly',
'sqlite_interrupt',
'sqlite_ioerr',
'sqlite_corrupt',
'sqlite_notfound',
'sqlite_full',
'sqlite_cantopen',
'sqlite_protocol',
'sqlite_empty',
'sqlite_schema',
'sqlite_toobig',
'sqlite_constraint',
'sqlite_mismatch',
'sqlite_misuse',
'sqlite_nolfs',
'sqlite_auth',
'sqlite_format',
'sqlite_range',
'sqlite_notadb',
'sqlite_row',
'sqlite_done',
'sqlite_integer',
'sqlite_float',
'sqlite_blob',
'sqlite_null',
'sqlite_text',
'sqlite3',
'sqlite_db',
'sqlite_results',
'sqlite_currentrow',
'sqlite_table',
'sqlite_column',
'bom_utf16be',
'bom_utf16le',
'bom_utf32be',
'bom_utf32le',
'bom_utf8',
'curl',
'include_url',
'ftp_getdata',
'ftp_getfile',
'ftp_getlisting',
'ftp_putdata',
'ftp_putfile',
'ftp_deletefile',
'date',
'debugging_step_in',
'debugging_get_stack',
'debugging_get_context',
'debugging_detach',
'debugging_step_over',
'debugging_step_out',
'debugging_run',
'debugging_break',
'debugging_breakpoint_set',
'debugging_breakpoint_get',
'debugging_breakpoint_remove',
'debugging_breakpoint_list',
'debugging_breakpoint_update',
'debugging_terminate',
'debugging_context_locals',
'debugging_context_vars',
'debugging_context_self',
'debugging_stack',
'dbgp_stop_stack_name',
'dbgp_server',
'dbgp_packet',
'duration',
'encrypt_md5',
'inline_columninfo_pos',
'inline_resultrows_pos',
'inline_foundcount_pos',
'inline_colinfo_name_pos',
'inline_colinfo_valuelist_pos',
'inline_scopeget',
'inline_scopepush',
'inline_scopepop',
'inline_namedget',
'inline_namedput',
'inline',
'inline_type',
'resultset_count',
'resultset',
'resultsets',
'rows',
'rows_impl',
'records',
'column',
'field',
'column_names',
'field_names',
'column_name',
'field_name',
'found_count',
'shown_count',
'shown_first',
'shown_last',
'action_statement',
'lasso_currentaction',
'maxrecords_value',
'skiprecords_value',
'action_param',
'action_params',
'admin_authorization',
'admin_currentgroups',
'admin_currentuserid',
'admin_currentusername',
'database_name',
'table_name',
'layout_name',
'schema_name',
'keycolumn_name',
'keyfield_name',
'keycolumn_value',
'keyfield_value',
'inline_colinfo_type_pos',
'column_type',
'rows_array',
'records_array',
'records_map',
'trait_json_serialize',
'json_serialize',
'json_consume_string',
'json_consume_token',
'json_consume_array',
'json_consume_object',
'json_deserialize',
'json_literal',
'json_object',
'json_rpccall',
'list_node',
'list',
'jchar',
'jchararray',
'jbyte',
'jbytearray',
'jfloat',
'jint',
'jshort',
'ljapi_initialize',
'formattingbase',
'currency',
'scientific',
'percent',
'dateandtime',
'timeonly',
'locale_format_style_full',
'locale_format_style_long',
'locale_format_style_medium',
'locale_format_style_short',
'locale_format_style_default',
'locale_format_style_none',
'locale_format_style_date_time',
'net_connectinprogress',
'net_connectok',
'net_typessl',
'net_typessltcp',
'net_typessludp',
'net_typetcp',
'net_typeudp',
'net_waitread',
'net_waittimeout',
'net_waitwrite',
'trait_net',
'net_tcp',
'net_tcpssl',
'net_named_pipe',
'net_udppacket',
'net_udp',
'admin_initialize',
'admin_getpref',
'admin_setpref',
'admin_removepref',
'admin_userexists',
'admin_lassoservicepath',
'pdf_package',
'pdf_rectangle',
'pdf_typebase',
'pdf_doc',
'pdf_color',
'pdf_barcode',
'pdf_font',
'pdf_image',
'pdf_list',
'pdf_read',
'pdf_table',
'pdf_text',
'pdf_hyphenator',
'pdf_chunk',
'pdf_phrase',
'pdf_paragraph',
'pdf_serve',
'queue',
'random_seed',
'set',
'sys_process',
'worker_pool',
'xml',
'trait_xml_elementcompat',
'trait_xml_nodecompat',
'xml_transform',
'zip_create',
'zip_excl',
'zip_checkcons',
'zip_fl_nocase',
'zip_fl_nodir',
'zip_fl_compressed',
'zip_fl_unchanged',
'zip_er_ok',
'zip_er_multidisk',
'zip_er_rename',
'zip_er_close',
'zip_er_seek',
'zip_er_read',
'zip_er_write',
'zip_er_crc',
'zip_er_zipclosed',
'zip_er_noent',
'zip_er_exists',
'zip_er_open',
'zip_er_tmpopen',
'zip_er_zlib',
'zip_er_memory',
'zip_er_changed',
'zip_er_compnotsupp',
'zip_er_eof',
'zip_er_inval',
'zip_er_nozip',
'zip_er_internal',
'zip_er_incons',
'zip_er_remove',
'zip_er_deleted',
'zip_et_none',
'zip_et_sys',
'zip_et_zlib',
'zip_cm_default',
'zip_cm_store',
'zip_cm_shrink',
'zip_cm_reduce_1',
'zip_cm_reduce_2',
'zip_cm_reduce_3',
'zip_cm_reduce_4',
'zip_cm_implode',
'zip_cm_deflate',
'zip_cm_deflate64',
'zip_cm_pkware_implode',
'zip_cm_bzip2',
'zip_em_none',
'zip_em_trad_pkware',
'zip_em_des',
'zip_em_rc2_old',
'zip_em_3des_168',
'zip_em_3des_112',
'zip_em_aes_128',
'zip_em_aes_192',
'zip_em_aes_256',
'zip_em_rc2',
'zip_em_rc4',
'zip_em_unknown',
'zip_file',
'zip',
'cache_server_element',
'cache_server',
'dns_response',
'dns_lookup',
'dns_default',
'string_charfromname',
'string_concatenate',
'string_endswith',
'string_extract',
'string_findposition',
'string_findregexp',
'string_getunicodeversion',
'string_insert',
'string_isalpha',
'string_isalphanumeric',
'string_isdigit',
'string_ishexdigit',
'string_islower',
'string_isnumeric',
'string_ispunctuation',
'string_isspace',
'string_isupper',
'string_length',
'string_remove',
'string_removeleading',
'string_removetrailing',
'string_replace',
'string_replaceregexp',
'string_todecimal',
'string_tointeger',
'string_uppercase',
'string_lowercase',
'document',
'component_render_state',
'component',
'component_container',
'document_base',
'document_body',
'document_header',
'text_document',
'data_document',
'email_attachment_mime_type',
'email_translatebreakstocrlf',
'email_findemails',
'email_fix_address',
'email_fix_address_list',
'email_compose',
'email_send',
'email_queue',
'email_immediate',
'email_result',
'email_status',
'email_token',
'email_merge',
'email_batch',
'encode_qheader',
'email_pop',
'email_parse',
'email_safeemail',
'email_extract',
'email_pop_priv_substring',
'email_pop_priv_extract',
'email_digestchallenge',
'email_pop_priv_quote',
'email_digestresponse',
'encrypt_hmac',
'encrypt_crammd5',
'email_queue_impl_base',
'email_fs_error_clean',
'email_stage_impl_base',
'email_initialize',
'email_mxlookup',
'lasso_errorreporting',
'fcgi_version_1',
'fcgi_null_request_id',
'fcgi_begin_request',
'fcgi_abort_request',
'fcgi_end_request',
'fcgi_params',
'fcgi_stdin',
'fcgi_stdout',
'fcgi_stderr',
'fcgi_data',
'fcgi_get_values',
'fcgi_get_values_result',
'fcgi_unknown_type',
'fcgi_keep_conn',
'fcgi_responder',
'fcgi_authorize',
'fcgi_filter',
'fcgi_request_complete',
'fcgi_cant_mpx_conn',
'fcgi_overloaded',
'fcgi_unknown_role',
'fcgi_max_conns',
'fcgi_max_reqs',
'fcgi_mpxs_conns',
'fcgi_read_timeout_seconds',
'fcgi_record',
'fcgi_makeendrequestbody',
'fcgi_bodychunksize',
'fcgi_makestdoutbody',
'fcgi_readparam',
'web_connection',
'web_request_impl',
'web_request',
'fcgi_request',
'include_cache_compare',
'include_cache',
'atbegin',
'fastcgi_initialize',
'fastcgi_handlecon',
'fastcgi_handlereq',
'fastcgi_each_fcgi_param',
'fastcgi_createfcgirequest',
'fastcgi_server',
'web_handlefcgirequest',
'filemaker_datasource',
'filemakerds_initialize',
'filemakerds',
'value_listitem',
'valuelistitem',
'selected',
'checked',
'value_list',
'http_document',
'http_document_header',
'http_header_field',
'html_document_head',
'html_document_body',
'raw_document_body',
'bytes_document_body',
'html_element_coreattrs',
'html_element_i18nattrs',
'html_element_eventsattrs',
'html_attributed',
'html_attr',
'html_atomic_element',
'html_container_element',
'http_error',
'html_script',
'html_text',
'html_raw',
'html_binary',
'html_json',
'html_cdata',
'html_eol',
'html_div',
'html_span',
'html_br',
'html_hr',
'html_h1',
'html_h2',
'html_h3',
'html_h4',
'html_h5',
'html_h6',
'html_meta',
'html_link',
'html_object',
'html_style',
'html_base',
'html_table',
'html_tr',
'html_td',
'html_th',
'html_img',
'html_form',
'html_fieldset',
'html_legend',
'html_input',
'html_label',
'html_option',
'html_select',
'http_char_space',
'http_char_htab',
'http_char_cr',
'http_char_lf',
'http_char_question',
'http_char_colon',
'http_read_timeout_secs',
'http_server_web_connection',
'http_server',
'http_server_connection_handler',
'image',
'jdbc_initialize',
'lassoapp_settingsdb',
'lassoapp_resource',
'lassoapp_format_mod_date',
'lassoapp_include_current',
'lassoapp_include',
'lassoapp_find_missing_file',
'lassoapp_source',
'lassoapp_capabilities',
'lassoapp_get_capabilities_name',
'lassoapp_exists',
'lassoapp_path_to_method_name',
'lassoapp_invoke_resource',
'lassoapp_installer',
'lassoapp_initialize_db',
'lassoapp_initialize',
'lassoapp_content_rep_halt',
'lassoapp_issourcefileextension',
'lassoapp_dirsrc_fileresource',
'lassoapp_dirsrc_appsource',
'lassoapp_livesrc_fileresource',
'lassoapp_livesrc_appsource',
'lassoapp_long_expiring_bytes',
'lassoapp_zip_file_server',
'lassoapp_zipsrc_fileresource',
'lassoapp_zipsrc_appsource',
'lassoapp_compiledsrc_fileresource',
'lassoapp_compiledsrc_appsource',
'lassoapp_manualsrc_appsource',
'lassoapp_current_include',
'lassoapp_current_app',
'lassoapp_do_with_include',
'lassoapp_link',
'lassoapp_load_module',
'lassoapp_mime_type_html',
'lassoapp_mime_type_lasso',
'lassoapp_mime_type_xml',
'lassoapp_mime_type_ppt',
'lassoapp_mime_type_js',
'lassoapp_mime_type_txt',
'lassoapp_mime_type_jpg',
'lassoapp_mime_type_png',
'lassoapp_mime_type_gif',
'lassoapp_mime_type_css',
'lassoapp_mime_type_csv',
'lassoapp_mime_type_tif',
'lassoapp_mime_type_ico',
'lassoapp_mime_type_rss',
'lassoapp_mime_type_xhr',
'lassoapp_mime_type_pdf',
'lassoapp_mime_type_docx',
'lassoapp_mime_type_doc',
'lassoapp_mime_type_zip',
'lassoapp_mime_type_svg',
'lassoapp_mime_type_ttf',
'lassoapp_mime_type_woff',
'lassoapp_mime_get',
'log_level_critical',
'log_level_warning',
'log_level_detail',
'log_level_sql',
'log_level_deprecated',
'log_destination_console',
'log_destination_file',
'log_destination_database',
'log',
'log_setdestination',
'log_always',
'log_critical',
'log_warning',
'log_detail',
'log_sql',
'log_deprecated',
'log_max_file_size',
'log_trim_file_size',
'log_impl_base',
'log_initialize',
'portal_impl',
'portal',
'security_database',
'security_table_groups',
'security_table_users',
'security_table_ug_map',
'security_default_realm',
'security_initialize',
'security_registry',
'session_driver',
'session_initialize',
'session_getdefaultdriver',
'session_setdefaultdriver',
'session_start',
'session_addvar',
'session_removevar',
'session_end',
'session_id',
'session_abort',
'session_result',
'session_deleteexpired',
'memory_session_driver_impl_entry',
'memory_session_driver_impl',
'sqlite_session_driver_impl_entry',
'sqlite_session_driver_impl',
'mysql_session_driver_impl',
'odbc_session_driver_mssql',
'odbc_session_driver_impl',
'session_decorate',
'session_delete_expired_thread',
'email_smtp',
'auth_admin',
'auth_check',
'auth_custom',
'auth_group',
'auth_prompt',
'auth_user',
'client_address',
'client_addr',
'client_authorization',
'client_browser',
'client_contentlength',
'client_contenttype',
'client_cookielist',
'client_cookies',
'client_encoding',
'client_formmethod',
'client_getargs',
'client_getparams',
'client_getparam',
'client_headers',
'client_integertoip',
'client_ip',
'client_iptointeger',
'client_password',
'client_postargs',
'client_postparams',
'client_postparam',
'client_type',
'client_username',
'client_url',
'referer_url',
'referrer_url',
'content_type',
'content_encoding',
'cookie',
'cookie_set',
'include',
'include_currentpath',
'include_filepath',
'include_localpath',
'include_once',
'include_path',
'include_raw',
'includes',
'library',
'library_once',
'response_filepath',
'response_localpath',
'response_path',
'response_realm',
'response_root',
'redirect_url',
'server_admin',
'server_name',
'server_ip',
'server_port',
'server_protocol',
'server_signature',
'server_software',
'server_push',
'token_value',
'wap_isenabled',
'wap_maxbuttons',
'wap_maxhorzpixels',
'wap_maxvertpixels',
'wap_maxcolumns',
'wap_maxrows',
'define_atbegin',
'define_atend',
'content_header',
'content_addheader',
'content_replaceheader',
'content_body',
'html_comment',
'web_node_content_json_specialized',
'web_node',
'web_node_container',
'web_node_content_representation',
'web_node_content',
'web_node_content_document',
'web_node_postable',
'web_node_base',
'web_node_forpath',
'web_nodes_requesthandler',
'web_nodes_normalizeextension',
'web_nodes_processcontentnode',
'web_node_root',
'web_nodes_initialize',
'web_node_content_representation_xhr_container',
'web_node_content_representation_xhr',
'web_node_content_html_specialized',
'web_node_content_representation_html_specialized',
'web_node_content_representation_html',
'web_node_content_css_specialized',
'web_node_content_representation_css_specialized',
'web_node_content_representation_css',
'web_node_content_js_specialized',
'web_node_content_representation_js_specialized',
'web_node_content_representation_js',
'web_node_echo',
'web_response_nodesentry',
'web_error_atend',
'web_response_impl',
'web_response',
'web_router_database',
'web_router_initialize',
'web_router',
'asstring',
'isnota',
'isallof',
'isanyof',
'oncompare',
'isa',
'ascopy',
'ascopydeep',
'type',
'invoke',
'atend',
'decomposeassignment',
'asgenerator',
'foreach',
'eachword',
'eachline',
'eachcharacter',
'foreachwordbreak',
'foreachlinebreak',
'foreachcharacter',
'isempty',
'isnotempty',
'ifempty',
'ifnotempty',
'size',
'values',
'asarray',
'aslist',
'asstaticarray',
'join',
'get',
'keys',
'askeyedgenerator',
'eachpair',
'eachkey',
'foreachpair',
'foreachkey',
'front',
'first',
'back',
'last',
'second',
'insert',
'insertfront',
'insertfirst',
'insertback',
'insertfrom',
'insertlast',
'remove',
'removeall',
'removefront',
'removefirst',
'removeback',
'removelast',
'difference',
'intersection',
'union',
'contains',
'find',
'findposition',
'componentdelimiter',
'extensiondelimiter',
'lastcomponent',
'foreachpathcomponent',
'eachcomponent',
'striplastcomponent',
'firstcomponent',
'stripfirstcomponent',
'splitextension',
'hastrailingcomponent',
'isfullpath',
'findlast',
'sub',
'readsomebytes',
'readbytesfully',
'readbytes',
'writebytes',
'encoding',
'readstring',
'writestring',
'hash',
'foreachsub',
'eachsub',
'push',
'pop',
'top',
'dowithclose',
'close',
'fd',
'do',
'sum',
'average',
'where',
'select',
'selectmany',
'groupby',
'groupjoin',
'orderby',
'orderbydescending',
'thenby',
'thenbydescending',
'skip',
'take',
'serialize',
'serializationelements',
'acceptdeserializedelement',
'left',
'right',
'up',
'value',
'bind',
'listen',
'localaddress',
'remoteaddress',
'shutdownrd',
'shutdownwr',
'shutdownrdwr',
'setname',
'contents',
'tagname',
'foreachchild',
'eachchild',
'foreachmatch',
'eachmatch',
'haschildnodes',
'childnodes',
'extract',
'connection',
'requestparams',
'stdin',
'mimes',
'setstatus',
'getstatus',
'writeheaderline',
'writeheaderbytes',
'writebodybytes',
'id',
'class',
'style',
'title',
'gethtmlattr',
'lang',
'onclick',
'ondblclick',
'onmousedown',
'onmouseup',
'onmouseover',
'onmousemove',
'onmouseout',
'onkeypress',
'onkeydown',
'onkeyup',
'sethtmlattr',
'gethtmlattrstring',
'hashtmlattr',
'addcomponent',
'attributes',
'issourcefile',
'resourceinvokable',
'resourcename',
'fullpath',
'appname',
'srcpath',
'resources',
'foo',
'startup',
'validatesessionstable',
'createtable',
'fetchdata',
'savedata',
'init',
'kill',
'expire',
'jsonlabel',
'jsonhtml',
'jsonisleaf',
'delim',
'name',
'path',
'nodelist',
'subnode',
'subnodes',
'representnoderesult',
'mime',
'extensions',
'representnode',
'defaultcontentrepresentation',
'supportscontentrepresentation',
'acceptpost',
'htmlcontent',
'csscontent',
'jscontent',
'escape_member',
'sameas',
'parent',
'settrait',
'oncreate',
'listmethods',
'hasmethod',
'addtrait',
'gettype',
'istype',
'doccomment',
'requires',
'provides',
'subtraits',
'description',
'hosttonet16',
'hosttonet32',
'nettohost16',
'nettohost32',
'nettohost64',
'hosttonet64',
'bitset',
'bittest',
'bitflip',
'bitclear',
'bitor',
'bitand',
'bitxor',
'bitnot',
'bitshiftleft',
'bitshiftright',
'abs',
'div',
'dereferencepointer',
'asdecimal',
'deg2rad',
'asstringhex',
'asstringoct',
'acos',
'asin',
'atan',
'atan2',
'ceil',
'cos',
'cosh',
'exp',
'fabs',
'floor',
'frexp',
'ldexp',
'log10',
'modf',
'pow',
'sin',
'sinh',
'sqrt',
'tan',
'tanh',
'erf',
'erfc',
'gamma',
'hypot',
'j0',
'j1',
'jn',
'lgamma',
'y0',
'y1',
'yn',
'isnan',
'acosh',
'asinh',
'atanh',
'cbrt',
'expm1',
'nextafter',
'scalb',
'ilogb',
'log1p',
'logb',
'remainder',
'rint',
'asinteger',
'self',
'detach',
'restart',
'resume',
'continuation',
'home',
'callsite_file',
'callsite_line',
'callsite_col',
'callstack',
'splitthread',
'threadreaddesc',
'givenblock',
'autocollectbuffer',
'calledname',
'methodname',
'invokeuntil',
'invokewhile',
'invokeautocollect',
'asasync',
'append',
'appendchar',
'private_find',
'private_findlast',
'length',
'chardigitvalue',
'private_compare',
'charname',
'chartype',
'decompose',
'normalize',
'digit',
'foldcase',
'private_merge',
'unescape',
'trim',
'titlecase',
'reverse',
'getisocomment',
'getnumericvalue',
'totitle',
'toupper',
'tolower',
'lowercase',
'uppercase',
'isalnum',
'isalpha',
'isbase',
'iscntrl',
'isdigit',
'isxdigit',
'islower',
'isprint',
'isspace',
'istitle',
'ispunct',
'isgraph',
'isblank',
'isualphabetic',
'isulowercase',
'isupper',
'isuuppercase',
'isuwhitespace',
'iswhitespace',
'encodehtml',
'decodehtml',
'encodexml',
'decodexml',
'encodehtmltoxml',
'getpropertyvalue',
'hasbinaryproperty',
'asbytes',
'equals',
'compare',
'comparecodepointorder',
'padleading',
'padtrailing',
'merge',
'split',
'removeleading',
'removetrailing',
'beginswith',
'endswith',
'replace',
'eachwordbreak',
'encodesql92',
'encodesql',
'substring',
'setsize',
'reserve',
'getrange',
'private_setrange',
'importas',
'import8bits',
'import32bits',
'import64bits',
'import16bits',
'importbytes',
'importpointer',
'export8bits',
'export16bits',
'export32bits',
'export64bits',
'exportbytes',
'exportsigned8bits',
'exportsigned16bits',
'exportsigned32bits',
'exportsigned64bits',
'marker',
'swapbytes',
'encodeurl',
'decodeurl',
'encodebase64',
'decodebase64',
'encodeqp',
'decodeqp',
'encodemd5',
'encodehex',
'decodehex',
'detectcharset',
'bestcharset',
'crc',
'importstring',
'setrange',
'exportas',
'exportstring',
'exportpointerbits',
'foreachbyte',
'eachbyte',
'typename',
'returntype',
'restname',
'paramdescs',
'action',
'statement',
'inputcolumns',
'keycolumns',
'returncolumns',
'sortcolumns',
'skiprows',
'maxrows',
'rowsfound',
'statementonly',
'lop',
'databasename',
'tablename',
'schemaname',
'hostid',
'hostdatasource',
'hostname',
'hostport',
'hostusername',
'hostpassword',
'hostschema',
'hosttableencoding',
'hostextra',
'hostisdynamic',
'refobj',
'prepared',
'getset',
'addset',
'numsets',
'addrow',
'addcolumninfo',
'forcedrowid',
'makeinheritedcopy',
'filename',
'expose',
'recover',
'count',
'exchange',
'findindex',
'sort',
'family',
'isvalid',
'isssl',
'open',
'read',
'write',
'ioctl',
'seek',
'mode',
'mtime',
'atime',
'dup',
'dup2',
'fchdir',
'fchown',
'fsync',
'ftruncate',
'fchmod',
'sendfd',
'receivefd',
'readobject',
'tryreadobject',
'writeobject',
'leaveopen',
'rewind',
'tell',
'language',
'script',
'country',
'variant',
'displaylanguage',
'displayscript',
'displaycountry',
'displayvariant',
'displayname',
'basename',
'keywords',
'iso3language',
'iso3country',
'formatas',
'formatnumber',
'parsenumber',
'parseas',
'format',
'parse',
'add',
'roll',
'getattr',
'setattr',
'clear',
'isset',
'settimezone',
'timezone',
'time',
'indaylighttime',
'createdocument',
'parsedocument',
'hasfeature',
'createdocumenttype',
'nodename',
'nodevalue',
'nodetype',
'parentnode',
'firstchild',
'lastchild',
'previoussibling',
'nextsibling',
'ownerdocument',
'namespaceuri',
'prefix',
'localname',
'insertbefore',
'replacechild',
'removechild',
'appendchild',
'clonenode',
'issupported',
'hasattributes',
'extractone',
'transform',
'data',
'substringdata',
'appenddata',
'insertdata',
'deletedata',
'replacedata',
'doctype',
'implementation',
'documentelement',
'createelement',
'createdocumentfragment',
'createtextnode',
'createcomment',
'createcdatasection',
'createprocessinginstruction',
'createattribute',
'createentityreference',
'getelementsbytagname',
'importnode',
'createelementns',
'createattributens',
'getelementsbytagnamens',
'getelementbyid',
'getattribute',
'setattribute',
'removeattribute',
'getattributenode',
'setattributenode',
'removeattributenode',
'getattributens',
'setattributens',
'removeattributens',
'getattributenodens',
'setattributenodens',
'hasattribute',
'hasattributens',
'specified',
'ownerelement',
'splittext',
'notationname',
'publicid',
'systemid',
'target',
'entities',
'notations',
'internalsubset',
'item',
'getnameditem',
'getnameditemns',
'setnameditem',
'setnameditemns',
'removenameditem',
'removenameditemns',
'next',
'readattributevalue',
'attributecount',
'baseuri',
'depth',
'hasvalue',
'isemptyelement',
'xmllang',
'getattributenamespace',
'lookupnamespace',
'movetoattribute',
'movetoattributenamespace',
'movetofirstattribute',
'movetonextattribute',
'movetoelement',
'prepare',
'last_insert_rowid',
'total_changes',
'interrupt',
'errcode',
'errmsg',
'addmathfunctions',
'finalize',
'step',
'bind_blob',
'bind_double',
'bind_int',
'bind_null',
'bind_text',
'bind_parameter_index',
'reset',
'column_count',
'column_decltype',
'column_blob',
'column_double',
'column_int64',
'column_text',
'ismultipart',
'gotfileupload',
'setmaxfilesize',
'getparts',
'trackingid',
'currentfile',
'addtobuffer',
'input',
'replacepattern',
'findpattern',
'ignorecase',
'setinput',
'setreplacepattern',
'setfindpattern',
'setignorecase',
'appendreplacement',
'matches',
'private_replaceall',
'appendtail',
'groupcount',
'matchposition',
'matchesstart',
'private_replacefirst',
'private_split',
'matchstring',
'replaceall',
'replacefirst',
'findall',
'findcount',
'findfirst',
'findsymbols',
'loadlibrary',
'getlibrary',
'f',
'r',
'form',
'gen',
'callfirst',
'key',
'by',
'from',
'to',
'd',
't',
'object',
'inneroncompare',
'members',
'writeid',
'addmember',
'refid',
'index',
'objects',
'tabs',
'trunk',
'trace',
'asxml',
'tabstr',
'toxmlstring',
'idmap',
'readidobjects',
'red',
'root',
'getnode',
'firstnode',
'lastnode',
'nextnode',
'private_rebalanceforremove',
'private_rotateleft',
'private_rotateright',
'private_rebalanceforinsert',
'eachnode',
'foreachnode',
'resolvelinks',
'parentdir',
'aslazystring',
'openread',
'openwrite',
'openwriteonly',
'openappend',
'opentruncate',
'exists',
'modificationtime',
'lastaccesstime',
'modificationdate',
'lastaccessdate',
'delete',
'moveto',
'copyto',
'linkto',
'flush',
'chmod',
'chown',
'isopen',
'position',
'setmarker',
'setposition',
'setmode',
'foreachline',
'lock',
'unlock',
'trylock',
'testlock',
'perms',
'islink',
'isdir',
'realpath',
'openwith',
'create',
'setcwd',
'foreachentry',
'eachpath',
'eachfilepath',
'eachdirpath',
'each',
'eachfile',
'eachdir',
'eachpathrecursive',
'eachfilepathrecursive',
'eachdirpathrecursive',
'eachentry',
'makefullpath',
'annotate',
'blur',
'command',
'composite',
'contrast',
'convert',
'crop',
'execute',
'enhance',
'flipv',
'fliph',
'modulate',
'rotate',
'save',
'scale',
'sharpen',
'addcomment',
'comments',
'describe',
'height',
'pixel',
'resolutionv',
'resolutionh',
'width',
'setcolorspace',
'colorspace',
'debug',
'histogram',
'imgptr',
'appendimagetolist',
'fx',
'applyheatcolors',
'authenticate',
'search',
'searchurl',
'readerror',
'readline',
'setencoding',
'closewrite',
'exitcode',
'getversion',
'findclass',
'throw',
'thrownew',
'exceptionoccurred',
'exceptiondescribe',
'exceptionclear',
'fatalerror',
'newglobalref',
'deleteglobalref',
'deletelocalref',
'issameobject',
'allocobject',
'newobject',
'getobjectclass',
'isinstanceof',
'getmethodid',
'callobjectmethod',
'callbooleanmethod',
'callbytemethod',
'callcharmethod',
'callshortmethod',
'callintmethod',
'calllongmethod',
'callfloatmethod',
'calldoublemethod',
'callvoidmethod',
'callnonvirtualobjectmethod',
'callnonvirtualbooleanmethod',
'callnonvirtualbytemethod',
'callnonvirtualcharmethod',
'callnonvirtualshortmethod',
'callnonvirtualintmethod',
'callnonvirtuallongmethod',
'callnonvirtualfloatmethod',
'callnonvirtualdoublemethod',
'callnonvirtualvoidmethod',
'getfieldid',
'getobjectfield',
'getbooleanfield',
'getbytefield',
'getcharfield',
'getshortfield',
'getintfield',
'getlongfield',
'getfloatfield',
'getdoublefield',
'setobjectfield',
'setbooleanfield',
'setbytefield',
'setcharfield',
'setshortfield',
'setintfield',
'setlongfield',
'setfloatfield',
'setdoublefield',
'getstaticmethodid',
'callstaticobjectmethod',
'callstaticbooleanmethod',
'callstaticbytemethod',
'callstaticcharmethod',
'callstaticshortmethod',
'callstaticintmethod',
'callstaticlongmethod',
'callstaticfloatmethod',
'callstaticdoublemethod',
'callstaticvoidmethod',
'getstaticfieldid',
'getstaticobjectfield',
'getstaticbooleanfield',
'getstaticbytefield',
'getstaticcharfield',
'getstaticshortfield',
'getstaticintfield',
'getstaticlongfield',
'getstaticfloatfield',
'getstaticdoublefield',
'setstaticobjectfield',
'setstaticbooleanfield',
'setstaticbytefield',
'setstaticcharfield',
'setstaticshortfield',
'setstaticintfield',
'setstaticlongfield',
'setstaticfloatfield',
'setstaticdoublefield',
'newstring',
'getstringlength',
'getstringchars',
'getarraylength',
'newobjectarray',
'getobjectarrayelement',
'setobjectarrayelement',
'newbooleanarray',
'newbytearray',
'newchararray',
'newshortarray',
'newintarray',
'newlongarray',
'newfloatarray',
'newdoublearray',
'getbooleanarrayelements',
'getbytearrayelements',
'getchararrayelements',
'getshortarrayelements',
'getintarrayelements',
'getlongarrayelements',
'getfloatarrayelements',
'getdoublearrayelements',
'getbooleanarrayregion',
'getbytearrayregion',
'getchararrayregion',
'getshortarrayregion',
'getintarrayregion',
'getlongarrayregion',
'getfloatarrayregion',
'getdoublearrayregion',
'setbooleanarrayregion',
'setbytearrayregion',
'setchararrayregion',
'setshortarrayregion',
'setintarrayregion',
'setlongarrayregion',
'setfloatarrayregion',
'setdoublearrayregion',
'monitorenter',
'monitorexit',
'fromreflectedmethod',
'fromreflectedfield',
'toreflectedmethod',
'toreflectedfield',
'exceptioncheck',
'dbtablestable',
'dstable',
'dsdbtable',
'dshoststable',
'fieldstable',
'sql',
'adddatasource',
'loaddatasourceinfo',
'loaddatasourcehostinfo',
'getdatasource',
'getdatasourceid',
'getdatasourcename',
'listdatasources',
'listactivedatasources',
'removedatasource',
'listdatasourcehosts',
'listhosts',
'adddatasourcehost',
'getdatasourcehost',
'removedatasourcehost',
'getdatabasehost',
'gethostdatabase',
'listalldatabases',
'listdatasourcedatabases',
'listhostdatabases',
'getdatasourcedatabase',
'getdatasourcedatabasebyid',
'getdatabasebyname',
'getdatabasebyid',
'getdatabasebyalias',
'adddatasourcedatabase',
'removedatasourcedatabase',
'listalltables',
'listdatabasetables',
'getdatabasetable',
'getdatabasetablebyalias',
'getdatabasetablebyid',
'gettablebyid',
'adddatabasetable',
'removedatabasetable',
'removefield',
'maybevalue',
'getuniquealiasname',
'makecolumnlist',
'makecolumnmap',
'datasourcecolumns',
'datasourcemap',
'hostcolumns',
'hostmap',
'hostcolumns2',
'hostmap2',
'databasecolumns',
'databasemap',
'tablecolumns',
'tablemap',
'databasecolumnnames',
'hostcolumnnames',
'hostcolumnnames2',
'datasourcecolumnnames',
'tablecolumnnames',
'bindcount',
'db',
'tables',
'hastable',
'tablehascolumn',
'eachrow',
'bindparam',
'foreachrow',
'executelazy',
'executenow',
'lastinsertid',
'table',
'bindone',
'src',
'stat',
'colmap',
'getcolumn',
'locals',
'getcolumns',
'bodybytes',
'headerbytes',
'ready',
'token',
'url',
'done',
'header',
'result',
'statuscode',
'raw',
'version',
'perform',
'performonce',
'asraw',
'rawdiff',
'getformat',
'setformat',
'subtract',
'gmt',
'dst',
'era',
'year',
'month',
'week',
'weekofyear',
'weekofmonth',
'day',
'dayofmonth',
'dayofyear',
'dayofweek',
'dayofweekinmonth',
'ampm',
'am',
'pm',
'hour',
'hourofday',
'hourofampm',
'minute',
'millisecond',
'zoneoffset',
'dstoffset',
'yearwoy',
'dowlocal',
'extendedyear',
'julianday',
'millisecondsinday',
'firstdayofweek',
'fixformat',
'minutesbetween',
'hoursbetween',
'secondsbetween',
'daysbetween',
'businessdaysbetween',
'pdifference',
'getfield',
's',
'linediffers',
'sourceline',
'sourcecolumn',
'continuationpacket',
'continuationpoint',
'continuationstack',
'features',
'lastpoint',
'net',
'running',
'source',
'run',
'pathtouri',
'sendpacket',
'readpacket',
'handlefeatureset',
'handlefeatureget',
'handlestdin',
'handlestdout',
'handlestderr',
'isfirststep',
'handlecontinuation',
'ensurestopped',
'handlestackget',
'handlecontextnames',
'formatcontextelements',
'formatcontextelement',
'bptypetostr',
'bptoxml',
'handlebreakpointlist',
'handlebreakpointget',
'handlebreakpointremove',
'condtoint',
'inttocond',
'handlebreakpointupdate',
'handlebreakpointset',
'handlecontextget',
'handlesource',
'error',
'stoprunning',
'pollide',
'polldbg',
'runonce',
'arguments',
'argumentvalue',
'end',
'start',
'days',
'foreachday',
'padzero',
'actionparams',
'capi',
'doclose',
'isnothing',
'named',
'workinginputcolumns',
'workingkeycolumns',
'workingreturncolumns',
'workingsortcolumns',
'workingkeyfield_name',
'scanfordatasource',
'configureds',
'configuredskeys',
'scrubkeywords',
'closeprepared',
'filterinputcolumn',
'prev',
'head',
'removenode',
'listnode',
'accept',
'connect',
'foreachaccept',
'writeobjecttcp',
'readobjecttcp',
'begintls',
'endtls',
'loadcerts',
'sslerrfail',
'fromname',
'fromport',
'env',
'getclass',
'jobjectisa',
'new',
'callvoid',
'callint',
'callfloat',
'callboolean',
'callobject',
'callstring',
'callstaticobject',
'callstaticstring',
'callstaticint',
'callstaticboolean',
'chk',
'makecolor',
'realdoc',
'addbarcode',
'addchapter',
'addcheckbox',
'addcombobox',
'addhiddenfield',
'addimage',
'addlist',
'addpage',
'addparagraph',
'addpasswordfield',
'addphrase',
'addradiobutton',
'addradiogroup',
'addresetbutton',
'addsection',
'addselectlist',
'addsubmitbutton',
'addtable',
'addtextarea',
'addtextfield',
'addtext',
'arc',
'circle',
'closepath',
'curveto',
'drawtext',
'getcolor',
'getheader',
'getheaders',
'getmargins',
'getpagenumber',
'getsize',
'insertpage',
'line',
'rect',
'setcolor',
'setfont',
'setlinewidth',
'setpagenumber',
'conventionaltop',
'lowagiefont',
'jcolor',
'jbarcode',
'generatechecksum',
'getbarheight',
'getbarmultiplier',
'getbarwidth',
'getbaseline',
'getcode',
'getfont',
'gettextalignment',
'gettextsize',
'setbarheight',
'setbarmultiplier',
'setbarwidth',
'setbaseline',
'setcode',
'setgeneratechecksum',
'setshowchecksum',
'settextalignment',
'settextsize',
'showchecksum',
'showcode39startstop',
'showeanguardbars',
'jfont',
'getencoding',
'getface',
'getfullfontname',
'getpsfontname',
'getsupportedencodings',
'istruetype',
'getstyle',
'getbold',
'getitalic',
'getunderline',
'setface',
'setunderline',
'setbold',
'setitalic',
'textwidth',
'jimage',
'ontop',
'jlist',
'jread',
'addjavascript',
'exportfdf',
'extractimage',
'fieldnames',
'fieldposition',
'fieldtype',
'fieldvalue',
'gettext',
'importfdf',
'javascript',
'pagecount',
'pagerotation',
'pagesize',
'setfieldvalue',
'setpagerange',
'jtable',
'getabswidth',
'getalignment',
'getbordercolor',
'getborderwidth',
'getcolumncount',
'getpadding',
'getrowcount',
'getspacing',
'setalignment',
'setbordercolor',
'setborderwidth',
'setpadding',
'setspacing',
'jtext',
'element',
'foreachspool',
'unspool',
'err',
'in',
'out',
'pid',
'wait',
'testexitcode',
'maxworkers',
'tasks',
'workers',
'startone',
'addtask',
'waitforcompletion',
'scanworkers',
'scantasks',
'z',
'addfile',
'adddir',
'adddirpath',
'foreachfile',
'foreachfilename',
'eachfilename',
'filenames',
'getfile',
'meta',
'criteria',
'valid',
'lazyvalue',
'qdcount',
'qdarray',
'answer',
'bitformat',
'consume_rdata',
'consume_string',
'consume_label',
'consume_domain',
'consume_message',
'errors',
'warnings',
'addwarning',
'adderror',
'renderbytes',
'renderstring',
'components',
'addcomponents',
'body',
'renderdocumentbytes',
'contenttype',
'mime_boundary',
'mime_contenttype',
'mime_hdrs',
'addtextpart',
'addhtmlpart',
'addattachment',
'addpart',
'recipients',
'pop_capa',
'pop_debug',
'pop_err',
'pop_get',
'pop_ids',
'pop_index',
'pop_log',
'pop_mode',
'pop_net',
'pop_res',
'pop_server',
'pop_timeout',
'pop_token',
'pop_cmd',
'user',
'pass',
'apop',
'auth',
'quit',
'rset',
'uidl',
'retr',
'dele',
'noop',
'capa',
'stls',
'authorize',
'retrieve',
'headers',
'uniqueid',
'capabilities',
'cancel',
'results',
'lasterror',
'parse_body',
'parse_boundary',
'parse_charset',
'parse_content_disposition',
'parse_content_transfer_encoding',
'parse_content_type',
'parse_hdrs',
'parse_mode',
'parse_msg',
'parse_parts',
'parse_rawhdrs',
'rawheaders',
'content_transfer_encoding',
'content_disposition',
'boundary',
'charset',
'cc',
'subject',
'bcc',
'pause',
'continue',
'touch',
'refresh',
'status',
'queue_status',
'active_tick',
'getprefs',
'initialize',
'queue_maintenance',
'queue_messages',
'content',
'rectype',
'requestid',
'cachedappprefix',
'cachedroot',
'cookiesary',
'fcgireq',
'fileuploadsary',
'headersmap',
'httpauthorization',
'postparamsary',
'queryparamsary',
'documentroot',
'appprefix',
'httpconnection',
'httpcookie',
'httphost',
'httpuseragent',
'httpcachecontrol',
'httpreferer',
'httpreferrer',
'contentlength',
'pathtranslated',
'remoteaddr',
'remoteport',
'requestmethod',
'requesturi',
'scriptfilename',
'scriptname',
'scripturi',
'scripturl',
'serveraddr',
'serveradmin',
'servername',
'serverport',
'serverprotocol',
'serversignature',
'serversoftware',
'pathinfo',
'gatewayinterface',
'httpaccept',
'httpacceptencoding',
'httpacceptlanguage',
'ishttps',
'cookies',
'rawheader',
'queryparam',
'postparam',
'param',
'queryparams',
'querystring',
'postparams',
'poststring',
'params',
'fileuploads',
'isxhr',
'reqid',
'statusmsg',
'cap',
'n',
'proxying',
'stop',
'printsimplemsg',
'handleevalexpired',
'handlenormalconnection',
'handledevconnection',
'splittoprivatedev',
'getmode',
'novaluelists',
'makeurl',
'choosecolumntype',
'getdatabasetablepart',
'getlcapitype',
'buildquery',
'getsortfieldspart',
'endjs',
'addjs',
'addjstext',
'addendjs',
'addendjstext',
'addcss',
'addfavicon',
'attrs',
'dtdid',
'xhtml',
'code',
'msg',
'scripttype',
'defer',
'httpequiv',
'scheme',
'href',
'hreflang',
'linktype',
'rel',
'rev',
'media',
'declare',
'classid',
'codebase',
'objecttype',
'codetype',
'archive',
'standby',
'usemap',
'tabindex',
'styletype',
'method',
'enctype',
'accept_charset',
'onsubmit',
'onreset',
'accesskey',
'inputtype',
'maxlength',
'for',
'label',
'multiple',
'buff',
'wroteheaders',
'pullrequest',
'pullrawpost',
'shouldclose',
'pullurlpost',
'pullmimepost',
'pullhttpheader',
'pulloneheaderline',
'parseoneheaderline',
'addoneheaderline',
'safeexport8bits',
'writeheader',
'connhandler',
'port',
'connectionhandler',
'acceptconnections',
'gotconnection',
'failnoconnectionhandler',
'splitconnection',
'scriptextensions',
'sendfile',
'probemimetype',
'inits',
'installs',
'rootmap',
'install',
'getappsource',
'preflight',
'splituppath',
'handleresource',
'handledefinitionhead',
'handledefinitionbody',
'handledefinitionresource',
'execinstalls',
'execinits',
'payload',
'eligiblepath',
'eligiblepaths',
'expiresminutes',
'moddatestr',
'zips',
'addzip',
'getzipfilebytes',
'resourcedata',
'zipfile',
'zipname',
'zipfilename',
'rawinvokable',
'route',
'setdestination',
'encodepassword',
'checkuser',
'needinitialization',
'adduser',
'getuserid',
'getuser',
'getuserbykey',
'removeuser',
'listusers',
'listusersbygroup',
'countusersbygroup',
'addgroup',
'updategroup',
'getgroupid',
'getgroup',
'removegroup',
'listgroups',
'listgroupsbyuser',
'addusertogroup',
'removeuserfromgroup',
'removeuserfromallgroups',
'md5hex',
'usercolumns',
'groupcolumns',
'expireminutes',
'lasttouched',
'hasexpired',
'idealinmemory',
'maxinmemory',
'nextprune',
'nextprunedelta',
'sessionsdump',
'prune',
'entry',
'host',
'tb',
'setdefaultstorage',
'getdefaultstorage',
'onconvert',
'send',
'addsubnode',
'removesubnode',
'nodeforpath',
'jsonfornode',
'appmessage',
'appstatus',
'atends',
'chunked',
'cookiesarray',
'didinclude',
'errstack',
'headersarray',
'includestack',
'outputencoding',
'sessionsmap',
'htmlizestacktrace',
'respond',
'sendresponse',
'sendchunk',
'makecookieyumyum',
'includeonce',
'includelibrary',
'includelibraryonce',
'includebytes',
'addatend',
'setcookie',
'addheader',
'replaceheader',
'setheaders',
'rawcontent',
'redirectto',
'htmlizestacktracelink',
'doatbegins',
'handlelassoappcontent',
'handlelassoappresponse',
'domainbody',
'establisherrorstate',
'tryfinderrorfile',
'doatends',
'dosessions',
'makenonrelative',
'pushinclude',
'popinclude',
'findinclude',
'checkdebugging',
'splitdebuggingthread',
'matchtriggers',
'rules',
'shouldabort',
'gettrigger',
'trigger',
'rule'
],
'Lasso 8 Tags': [
'__char',
'__sync_timestamp__',
'_admin_addgroup',
'_admin_adduser',
'_admin_defaultconnector',
'_admin_defaultconnectornames',
'_admin_defaultdatabase',
'_admin_defaultfield',
'_admin_defaultgroup',
'_admin_defaulthost',
'_admin_defaulttable',
'_admin_defaultuser',
'_admin_deleteconnector',
'_admin_deletedatabase',
'_admin_deletefield',
'_admin_deletegroup',
'_admin_deletehost',
'_admin_deletetable',
'_admin_deleteuser',
'_admin_duplicategroup',
'_admin_internaldatabase',
'_admin_listconnectors',
'_admin_listdatabases',
'_admin_listfields',
'_admin_listgroups',
'_admin_listhosts',
'_admin_listtables',
'_admin_listusers',
'_admin_refreshconnector',
'_admin_refreshsecurity',
'_admin_servicepath',
'_admin_updateconnector',
'_admin_updatedatabase',
'_admin_updatefield',
'_admin_updategroup',
'_admin_updatehost',
'_admin_updatetable',
'_admin_updateuser',
'_chartfx_activation_string',
'_chartfx_getchallengestring',
'_chop_args',
'_chop_mimes',
'_client_addr_old',
'_client_address_old',
'_client_ip_old',
'_database_names',
'_datasource_reload',
'_date_current',
'_date_format',
'_date_msec',
'_date_parse',
'_execution_timelimit',
'_file_chmod',
'_initialize',
'_jdbc_acceptsurl',
'_jdbc_debug',
'_jdbc_deletehost',
'_jdbc_driverclasses',
'_jdbc_driverinfo',
'_jdbc_metainfo',
'_jdbc_propertyinfo',
'_jdbc_setdriver',
'_lasso_param',
'_log_helper',
'_proc_noparam',
'_proc_withparam',
'_recursion_limit',
'_request_param',
'_security_binaryexpiration',
'_security_flushcaches',
'_security_isserialized',
'_security_serialexpiration',
'_srand',
'_strict_literals',
'_substring',
'_xmlrpc_exconverter',
'_xmlrpc_inconverter',
'_xmlrpc_xmlinconverter',
'abort',
'accept',
'action_addinfo',
'action_addrecord',
'action_param',
'action_params',
'action_setfoundcount',
'action_setrecordid',
'action_settotalcount',
'action_statement',
'add',
'addattachment',
'addattribute',
'addbarcode',
'addchapter',
'addcheckbox',
'addchild',
'addcombobox',
'addcomment',
'addcontent',
'addhiddenfield',
'addhtmlpart',
'addimage',
'addjavascript',
'addlist',
'addnamespace',
'addnextsibling',
'addpage',
'addparagraph',
'addparenttype',
'addpart',
'addpasswordfield',
'addphrase',
'addprevsibling',
'addradiobutton',
'addradiogroup',
'addresetbutton',
'addsection',
'addselectlist',
'addsibling',
'addsubmitbutton',
'addtable',
'addtext',
'addtextarea',
'addtextfield',
'addtextpart',
'admin_allowedfileroots',
'admin_changeuser',
'admin_createuser',
'admin_currentgroups',
'admin_currentuserid',
'admin_currentusername',
'admin_getpref',
'admin_groupassignuser',
'admin_grouplistusers',
'admin_groupremoveuser',
'admin_lassoservicepath',
'admin_listgroups',
'admin_refreshlicensing',
'admin_refreshsecurity',
'admin_reloaddatasource',
'admin_removepref',
'admin_setpref',
'admin_userexists',
'admin_userlistgroups',
'alarms',
'all',
'and',
'annotate',
'answer',
'append',
'appendreplacement',
'appendtail',
'arc',
'array',
'array_iterator',
'asasync',
'astype',
'atbegin',
'atbottom',
'atend',
'atfarleft',
'atfarright',
'attop',
'attributecount',
'attributes',
'auth',
'auth_admin',
'auth_auth',
'auth_custom',
'auth_group',
'auth_prompt',
'auth_user',
'authenticate',
'authorize',
'backward',
'base64',
'baseuri',
'bcc',
'bean',
'beanproperties',
'beginswith',
'bigint',
'bind',
'bitand',
'bitclear',
'bitflip',
'bitformat',
'bitnot',
'bitor',
'bitset',
'bitshiftleft',
'bitshiftright',
'bittest',
'bitxor',
'blur',
'body',
'bom_utf16be',
'bom_utf16le',
'bom_utf32be',
'bom_utf32le',
'bom_utf8',
'boolean',
'boundary',
'bw',
'bytes',
'cache',
'cache_delete',
'cache_empty',
'cache_exists',
'cache_fetch',
'cache_internal',
'cache_maintenance',
'cache_object',
'cache_preferences',
'cache_store',
'call',
'cancel',
'capabilities',
'case',
'cc',
'chardigitvalue',
'charname',
'charset',
'chartfx',
'chartfx_records',
'chartfx_serve',
'chartype',
'checked',
'children',
'choice_list',
'choice_listitem',
'choicelistitem',
'cipher_decrypt',
'cipher_digest',
'cipher_encrypt',
'cipher_hmac',
'cipher_keylength',
'cipher_list',
'circle',
'click_text',
'client_addr',
'client_address',
'client_authorization',
'client_browser',
'client_contentlength',
'client_contenttype',
'client_cookielist',
'client_cookies',
'client_encoding',
'client_formmethod',
'client_getargs',
'client_getparams',
'client_headers',
'client_ip',
'client_ipfrominteger',
'client_iptointeger',
'client_password',
'client_postargs',
'client_postparams',
'client_type',
'client_url',
'client_username',
'close',
'closepath',
'closewrite',
'cn',
'code',
'colorspace',
'column',
'column_name',
'column_names',
'command',
'comments',
'compare',
'compare_beginswith',
'compare_contains',
'compare_endswith',
'compare_equalto',
'compare_greaterthan',
'compare_greaterthanorequals',
'compare_greaterthanorequls',
'compare_lessthan',
'compare_lessthanorequals',
'compare_notbeginswith',
'compare_notcontains',
'compare_notendswith',
'compare_notequalto',
'compare_notregexp',
'compare_regexp',
'compare_strictequalto',
'compare_strictnotequalto',
'comparecodepointorder',
'compile',
'compiler_removecacheddoc',
'compiler_setdefaultparserflags',
'composite',
'compress',
'connect',
'contains',
'content_body',
'content_disposition',
'content_encoding',
'content_header',
'content_transfer_encoding',
'content_type',
'contents',
'contrast',
'convert',
'cookie',
'cookie_set',
'crop',
'curl_ftp_getfile',
'curl_ftp_getlisting',
'curl_ftp_putfile',
'curl_include_url',
'currency',
'curveto',
'data',
'database_changecolumn',
'database_changefield',
'database_createcolumn',
'database_createfield',
'database_createtable',
'database_fmcontainer',
'database_hostinfo',
'database_inline',
'database_name',
'database_nameitem',
'database_names',
'database_realname',
'database_removecolumn',
'database_removefield',
'database_removetable',
'database_repeating',
'database_repeating_valueitem',
'database_repeatingvalueitem',
'database_schemanameitem',
'database_schemanames',
'database_tablecolumn',
'database_tablenameitem',
'database_tablenames',
'datasource_name',
'datasource_register',
'date',
'date__date_current',
'date__date_format',
'date__date_msec',
'date__date_parse',
'date_add',
'date_date',
'date_difference',
'date_duration',
'date_format',
'date_getcurrentdate',
'date_getday',
'date_getdayofweek',
'date_gethour',
'date_getlocaltimezone',
'date_getminute',
'date_getmonth',
'date_getsecond',
'date_gettime',
'date_getyear',
'date_gmttolocal',
'date_localtogmt',
'date_maximum',
'date_minimum',
'date_msec',
'date_setformat',
'date_subtract',
'day',
'daylights',
'dayofweek',
'dayofyear',
'db_layoutnameitem',
'db_layoutnames',
'db_nameitem',
'db_names',
'db_tablenameitem',
'db_tablenames',
'dbi_column_names',
'dbi_field_names',
'decimal',
'decimal_setglobaldefaultprecision',
'decode_base64',
'decode_bheader',
'decode_hex',
'decode_html',
'decode_json',
'decode_qheader',
'decode_quotedprintable',
'decode_quotedprintablebytes',
'decode_url',
'decode_xml',
'decompress',
'decrement',
'decrypt_blowfish',
'decrypt_blowfish2',
'default',
'define_atbegin',
'define_atend',
'define_constant',
'define_prototype',
'define_tag',
'define_tagp',
'define_type',
'define_typep',
'delete',
'depth',
'describe',
'description',
'deserialize',
'detach',
'detachreference',
'difference',
'digit',
'directory_directorynameitem',
'directory_lister',
'directory_nameitem',
'directorynameitem',
'dns_default',
'dns_lookup',
'dns_response',
'document',
'down',
'drawtext',
'dst',
'dump',
'duration',
'else',
'email_batch',
'email_compose',
'email_digestchallenge',
'email_digestresponse',
'email_extract',
'email_findemails',
'email_immediate',
'email_merge',
'email_mxerror',
'email_mxlookup',
'email_parse',
'email_pop',
'email_queue',
'email_result',
'email_safeemail',
'email_send',
'email_smtp',
'email_status',
'email_token',
'email_translatebreakstocrlf',
'encode_base64',
'encode_bheader',
'encode_break',
'encode_breaks',
'encode_crc32',
'encode_hex',
'encode_html',
'encode_htmltoxml',
'encode_json',
'encode_qheader',
'encode_quotedprintable',
'encode_quotedprintablebytes',
'encode_set',
'encode_smart',
'encode_sql',
'encode_sql92',
'encode_stricturl',
'encode_url',
'encode_xml',
'encrypt_blowfish',
'encrypt_blowfish2',
'encrypt_crammd5',
'encrypt_hmac',
'encrypt_md5',
'endswith',
'enhance',
'eq',
'equals',
'error_adderror',
'error_code',
'error_code_aborted',
'error_code_assert',
'error_code_bof',
'error_code_connectioninvalid',
'error_code_couldnotclosefile',
'error_code_couldnotcreateoropenfile',
'error_code_couldnotdeletefile',
'error_code_couldnotdisposememory',
'error_code_couldnotlockmemory',
'error_code_couldnotreadfromfile',
'error_code_couldnotunlockmemory',
'error_code_couldnotwritetofile',
'error_code_criterianotmet',
'error_code_datasourceerror',
'error_code_directoryfull',
'error_code_diskfull',
'error_code_dividebyzero',
'error_code_eof',
'error_code_failure',
'error_code_fieldrestriction',
'error_code_file',
'error_code_filealreadyexists',
'error_code_filecorrupt',
'error_code_fileinvalid',
'error_code_fileinvalidaccessmode',
'error_code_fileisclosed',
'error_code_fileisopen',
'error_code_filelocked',
'error_code_filenotfound',
'error_code_fileunlocked',
'error_code_httpfilenotfound',
'error_code_illegalinstruction',
'error_code_illegaluseoffrozeninstance',
'error_code_invaliddatabase',
'error_code_invalidfilename',
'error_code_invalidmemoryobject',
'error_code_invalidparameter',
'error_code_invalidpassword',
'error_code_invalidpathname',
'error_code_invalidusername',
'error_code_ioerror',
'error_code_loopaborted',
'error_code_memory',
'error_code_network',
'error_code_nilpointer',
'error_code_noerr',
'error_code_nopermission',
'error_code_outofmemory',
'error_code_outofstackspace',
'error_code_overflow',
'error_code_postconditionfailed',
'error_code_preconditionfailed',
'error_code_resnotfound',
'error_code_resource',
'error_code_streamreaderror',
'error_code_streamwriteerror',
'error_code_syntaxerror',
'error_code_tagnotfound',
'error_code_unknownerror',
'error_code_varnotfound',
'error_code_volumedoesnotexist',
'error_code_webactionnotsupported',
'error_code_webadderror',
'error_code_webdeleteerror',
'error_code_webmodulenotfound',
'error_code_webnosuchobject',
'error_code_webrepeatingrelatedfield',
'error_code_webrequiredfieldmissing',
'error_code_webtimeout',
'error_code_webupdateerror',
'error_columnrestriction',
'error_currenterror',
'error_databaseconnectionunavailable',
'error_databasetimeout',
'error_deleteerror',
'error_fieldrestriction',
'error_filenotfound',
'error_invaliddatabase',
'error_invalidpassword',
'error_invalidusername',
'error_modulenotfound',
'error_msg',
'error_msg_aborted',
'error_msg_assert',
'error_msg_bof',
'error_msg_connectioninvalid',
'error_msg_couldnotclosefile',
'error_msg_couldnotcreateoropenfile',
'error_msg_couldnotdeletefile',
'error_msg_couldnotdisposememory',
'error_msg_couldnotlockmemory',
'error_msg_couldnotreadfromfile',
'error_msg_couldnotunlockmemory',
'error_msg_couldnotwritetofile',
'error_msg_criterianotmet',
'error_msg_datasourceerror',
'error_msg_directoryfull',
'error_msg_diskfull',
'error_msg_dividebyzero',
'error_msg_eof',
'error_msg_failure',
'error_msg_fieldrestriction',
'error_msg_file',
'error_msg_filealreadyexists',
'error_msg_filecorrupt',
'error_msg_fileinvalid',
'error_msg_fileinvalidaccessmode',
'error_msg_fileisclosed',
'error_msg_fileisopen',
'error_msg_filelocked',
'error_msg_filenotfound',
'error_msg_fileunlocked',
'error_msg_httpfilenotfound',
'error_msg_illegalinstruction',
'error_msg_illegaluseoffrozeninstance',
'error_msg_invaliddatabase',
'error_msg_invalidfilename',
'error_msg_invalidmemoryobject',
'error_msg_invalidparameter',
'error_msg_invalidpassword',
'error_msg_invalidpathname',
'error_msg_invalidusername',
'error_msg_ioerror',
'error_msg_loopaborted',
'error_msg_memory',
'error_msg_network',
'error_msg_nilpointer',
'error_msg_noerr',
'error_msg_nopermission',
'error_msg_outofmemory',
'error_msg_outofstackspace',
'error_msg_overflow',
'error_msg_postconditionfailed',
'error_msg_preconditionfailed',
'error_msg_resnotfound',
'error_msg_resource',
'error_msg_streamreaderror',
'error_msg_streamwriteerror',
'error_msg_syntaxerror',
'error_msg_tagnotfound',
'error_msg_unknownerror',
'error_msg_varnotfound',
'error_msg_volumedoesnotexist',
'error_msg_webactionnotsupported',
'error_msg_webadderror',
'error_msg_webdeleteerror',
'error_msg_webmodulenotfound',
'error_msg_webnosuchobject',
'error_msg_webrepeatingrelatedfield',
'error_msg_webrequiredfieldmissing',
'error_msg_webtimeout',
'error_msg_webupdateerror',
'error_noerror',
'error_nopermission',
'error_norecordsfound',
'error_outofmemory',
'error_pop',
'error_push',
'error_reqcolumnmissing',
'error_reqfieldmissing',
'error_requiredcolumnmissing',
'error_requiredfieldmissing',
'error_reset',
'error_seterrorcode',
'error_seterrormessage',
'error_updateerror',
'errors',
'euro',
'eval',
'event_schedule',
'events',
'ew',
'execute',
'export16bits',
'export32bits',
'export64bits',
'export8bits',
'exportfdf',
'exportstring',
'extract',
'extractone',
'fail',
'fail_if',
'false',
'field',
'field_name',
'field_names',
'fieldnames',
'fieldtype',
'fieldvalue',
'file',
'file_autoresolvefullpaths',
'file_chmod',
'file_control',
'file_copy',
'file_create',
'file_creationdate',
'file_currenterror',
'file_delete',
'file_exists',
'file_getlinecount',
'file_getsize',
'file_isdirectory',
'file_listdirectory',
'file_moddate',
'file_modechar',
'file_modeline',
'file_move',
'file_openread',
'file_openreadwrite',
'file_openwrite',
'file_openwriteappend',
'file_openwritetruncate',
'file_probeeol',
'file_processuploads',
'file_read',
'file_readline',
'file_rename',
'file_serve',
'file_setsize',
'file_stream',
'file_streamcopy',
'file_uploads',
'file_waitread',
'file_waittimeout',
'file_waitwrite',
'file_write',
'find',
'find_soap_ops',
'findindex',
'findnamespace',
'findnamespacebyhref',
'findpattern',
'findposition',
'first',
'firstchild',
'fliph',
'flipv',
'flush',
'foldcase',
'foreach',
'form_param',
'format',
'forward',
'found_count',
'freebusies',
'freezetype',
'freezevalue',
'from',
'ft',
'ftp_getfile',
'ftp_getlisting',
'ftp_putfile',
'full',
'fulltype',
'generatechecksum',
'get',
'getabswidth',
'getalignment',
'getattribute',
'getattributenamespace',
'getbarheight',
'getbarmultiplier',
'getbarwidth',
'getbaseline',
'getbordercolor',
'getborderwidth',
'getcode',
'getcolor',
'getcolumncount',
'getencoding',
'getface',
'getfont',
'getformat',
'getfullfontname',
'getheaders',
'getmargins',
'getmethod',
'getnumericvalue',
'getpadding',
'getpagenumber',
'getparams',
'getproperty',
'getpsfontname',
'getrange',
'getrowcount',
'getsize',
'getspacing',
'getsupportedencodings',
'gettextalignment',
'gettextsize',
'gettype',
'global',
'global_defined',
'global_remove',
'global_reset',
'globals',
'gmt',
'groupcount',
'gt',
'gte',
'handle',
'handle_error',
'hasattribute',
'haschildren',
'hasvalue',
'header',
'headers',
'height',
'histogram',
'hosttonet16',
'hosttonet32',
'hour',
'html_comment',
'http_getfile',
'ical_alarm',
'ical_attribute',
'ical_calendar',
'ical_daylight',
'ical_event',
'ical_freebusy',
'ical_item',
'ical_journal',
'ical_parse',
'ical_standard',
'ical_timezone',
'ical_todo',
'id',
'if',
'if_empty',
'if_false',
'if_null',
'if_true',
'ignorecase',
'image',
'image_url',
'img',
'import16bits',
'import32bits',
'import64bits',
'import8bits',
'importfdf',
'importstring',
'include',
'include_cgi',
'include_currentpath',
'include_once',
'include_raw',
'include_url',
'increment',
'inline',
'input',
'insert',
'insertatcurrent',
'insertfirst',
'insertfrom',
'insertlast',
'insertpage',
'integer',
'intersection',
'invoke',
'isa',
'isalnum',
'isalpha',
'isbase',
'iscntrl',
'isdigit',
'isemptyelement',
'islower',
'isopen',
'isprint',
'isspace',
'istitle',
'istruetype',
'isualphabetic',
'isulowercase',
'isupper',
'isuuppercase',
'isuwhitespace',
'iswhitespace',
'iterate',
'iterator',
'java',
'java_bean',
'javascript',
'join',
'journals',
'json_records',
'json_rpccall',
'key',
'keycolumn_name',
'keycolumn_value',
'keyfield_name',
'keyfield_value',
'keys',
'lasso_comment',
'lasso_currentaction',
'lasso_datasourceis',
'lasso_datasourceis4d',
'lasso_datasourceisfilemaker',
'lasso_datasourceisfilemaker7',
'lasso_datasourceisfilemaker9',
'lasso_datasourceisfilemakersa',
'lasso_datasourceisjdbc',
'lasso_datasourceislassomysql',
'lasso_datasourceismysql',
'lasso_datasourceisodbc',
'lasso_datasourceisopenbase',
'lasso_datasourceisoracle',
'lasso_datasourceispostgresql',
'lasso_datasourceisspotlight',
'lasso_datasourceissqlite',
'lasso_datasourceissqlserver',
'lasso_datasourcemodulename',
'lasso_datatype',
'lasso_disableondemand',
'lasso_errorreporting',
'lasso_executiontimelimit',
'lasso_parser',
'lasso_process',
'lasso_sessionid',
'lasso_siteid',
'lasso_siteisrunning',
'lasso_sitename',
'lasso_siterestart',
'lasso_sitestart',
'lasso_sitestop',
'lasso_tagexists',
'lasso_tagmodulename',
'lasso_uniqueid',
'lasso_updatecheck',
'lasso_uptime',
'lasso_version',
'lassoapp_create',
'lassoapp_dump',
'lassoapp_flattendir',
'lassoapp_getappdata',
'lassoapp_link',
'lassoapp_list',
'lassoapp_process',
'lassoapp_unitize',
'last',
'lastchild',
'lasterror',
'layout_name',
'ldap',
'ldap_scope_base',
'ldap_scope_onelevel',
'ldap_scope_subtree',
'ldml',
'ldml_ldml',
'left',
'length',
'library',
'library_once',
'line',
'link',
'link_currentaction',
'link_currentactionparams',
'link_currentactionurl',
'link_currentgroup',
'link_currentgroupparams',
'link_currentgroupurl',
'link_currentrecord',
'link_currentrecordparams',
'link_currentrecordurl',
'link_currentsearch',
'link_currentsearchparams',
'link_currentsearchurl',
'link_detail',
'link_detailparams',
'link_detailurl',
'link_firstgroup',
'link_firstgroupparams',
'link_firstgroupurl',
'link_firstrecord',
'link_firstrecordparams',
'link_firstrecordurl',
'link_lastgroup',
'link_lastgroupparams',
'link_lastgroupurl',
'link_lastrecord',
'link_lastrecordparams',
'link_lastrecordurl',
'link_nextgroup',
'link_nextgroupparams',
'link_nextgroupurl',
'link_nextrecord',
'link_nextrecordparams',
'link_nextrecordurl',
'link_params',
'link_prevgroup',
'link_prevgroupparams',
'link_prevgroupurl',
'link_prevrecord',
'link_prevrecordparams',
'link_prevrecordurl',
'link_setformat',
'link_url',
'list',
'list_additem',
'list_fromlist',
'list_fromstring',
'list_getitem',
'list_itemcount',
'list_iterator',
'list_removeitem',
'list_replaceitem',
'list_reverseiterator',
'list_tostring',
'listen',
'literal',
'ljax_end',
'ljax_hastarget',
'ljax_include',
'ljax_start',
'ljax_target',
'local',
'local_defined',
'local_remove',
'local_reset',
'localaddress',
'locale_format',
'localname',
'locals',
'lock',
'log',
'log_always',
'log_critical',
'log_deprecated',
'log_destination_console',
'log_destination_database',
'log_destination_file',
'log_detail',
'log_level_critical',
'log_level_deprecated',
'log_level_detail',
'log_level_sql',
'log_level_warning',
'log_setdestination',
'log_sql',
'log_warning',
'logicalop_value',
'logicaloperator_value',
'lookupnamespace',
'loop',
'loop_abort',
'loop_continue',
'loop_count',
'lowercase',
'lt',
'lte',
'magick_image',
'map',
'map_iterator',
'marker',
'match_comparator',
'match_notrange',
'match_notregexp',
'match_range',
'match_regexp',
'matches',
'matchesstart',
'matchposition',
'matchstring',
'math_abs',
'math_acos',
'math_add',
'math_asin',
'math_atan',
'math_atan2',
'math_ceil',
'math_converteuro',
'math_cos',
'math_div',
'math_exp',
'math_floor',
'math_internal_rand',
'math_internal_randmax',
'math_internal_srand',
'math_ln',
'math_log',
'math_log10',
'math_max',
'math_min',
'math_mod',
'math_mult',
'math_pow',
'math_random',
'math_range',
'math_rint',
'math_roman',
'math_round',
'math_sin',
'math_sqrt',
'math_sub',
'math_tan',
'maxrecords_value',
'memory_session_driver',
'merge',
'millisecond',
'mime_type',
'minimal',
'minute',
'misc__srand',
'misc_randomnumber',
'misc_roman',
'misc_valid_creditcard',
'mode',
'modulate',
'month',
'moveto',
'movetoattributenamespace',
'movetoelement',
'movetofirstattribute',
'movetonextattribute',
'mysql_session_driver',
'name',
'named_param',
'namespace_current',
'namespace_delimiter',
'namespace_exists',
'namespace_file_fullpathexists',
'namespace_global',
'namespace_import',
'namespace_load',
'namespace_page',
'namespace_unload',
'namespace_using',
'namespaces',
'namespaceuri',
'neq',
'net',
'net_connectinprogress',
'net_connectok',
'net_typessl',
'net_typessltcp',
'net_typessludp',
'net_typetcp',
'net_typeudp',
'net_waitread',
'net_waittimeout',
'net_waitwrite',
'nettohost16',
'nettohost32',
'newchild',
'next',
'nextsibling',
'no_default_output',
'nodetype',
'none',
'noprocess',
'not',
'nrx',
'nslookup',
'null',
'object',
'once',
'oneoff',
'op_logicalvalue',
'open',
'operator_logicalvalue',
'option',
'or',
'os_process',
'output',
'output_none',
'padleading',
'padtrailing',
'pagecount',
'pagesize',
'pair',
'paraminfo',
'params',
'params_up',
'parent',
'path',
'pdf_barcode',
'pdf_color',
'pdf_doc',
'pdf_font',
'pdf_image',
'pdf_list',
'pdf_read',
'pdf_serve',
'pdf_table',
'pdf_text',
'percent',
'pixel',
'portal',
'position',
'postcondition',
'precondition',
'prefix',
'prettyprintingnsmap',
'prettyprintingtypemap',
'previoussibling',
'priorityqueue',
'private',
'proc_convert',
'proc_convertbody',
'proc_convertone',
'proc_extract',
'proc_extractone',
'proc_find',
'proc_first',
'proc_foreach',
'proc_get',
'proc_join',
'proc_lasso',
'proc_last',
'proc_map_entry',
'proc_null',
'proc_regexp',
'proc_xml',
'proc_xslt',
'process',
'properties',
'protect',
'queue',
'rand',
'randomnumber',
'raw',
'rawheaders',
'read',
'readattributevalue',
'readerror',
'readfrom',
'readline',
'readlock',
'readstring',
'readunlock',
'recid_value',
'recipients',
'record_count',
'recordcount',
'recordid_value',
'records',
'records_array',
'records_map',
'rect',
'redirect_url',
'refcount',
'reference',
'referer',
'referer_url',
'referrals',
'referrer',
'referrer_url',
'regexp',
'remoteaddress',
'remove',
'removeall',
'removeattribute',
'removechild',
'removecurrent',
'removefirst',
'removelast',
'removeleading',
'removenamespace',
'removetrailing',
'render',
'repeating',
'repeating_valueitem',
'repeatingvalueitem',
'repetition',
'replace',
'replaceall',
'replacefirst',
'replacepattern',
'replacewith',
'req_column',
'req_field',
'required_column',
'required_field',
'reserve',
'reset',
'resolutionh',
'resolutionv',
'response',
'response_fileexists',
'response_filepath',
'response_localpath',
'response_path',
'response_realm',
'results',
'resultset',
'resultset_count',
'retrieve',
'return',
'return_value',
'returntype',
'reverse',
'reverseiterator',
'right',
'roman',
'rotate',
'row_count',
'rows',
'rows_array',
'run',
'run_children',
'rx',
'save',
'scale',
'schema_name',
'scientific',
'search',
'search_args',
'search_arguments',
'search_columnitem',
'search_fielditem',
'search_operatoritem',
'search_opitem',
'search_valueitem',
'searchfielditem',
'searchoperatoritem',
'searchopitem',
'searchvalueitem',
'second',
'select',
'selected',
'self',
'send',
'serialize',
'series',
'server_date',
'server_day',
'server_ip',
'server_name',
'server_port',
'server_push',
'server_siteisrunning',
'server_sitestart',
'server_sitestop',
'server_time',
'session_abort',
'session_addoutputfilter',
'session_addvar',
'session_addvariable',
'session_deleteexpired',
'session_driver',
'session_end',
'session_id',
'session_removevar',
'session_removevariable',
'session_result',
'session_setdriver',
'session_start',
'set',
'set_iterator',
'set_reverseiterator',
'setalignment',
'setbarheight',
'setbarmultiplier',
'setbarwidth',
'setbaseline',
'setblocking',
'setbordercolor',
'setborderwidth',
'setbytes',
'setcode',
'setcolor',
'setcolorspace',
'setdatatype',
'setencoding',
'setface',
'setfieldvalue',
'setfont',
'setformat',
'setgeneratechecksum',
'setheight',
'setlassodata',
'setlinewidth',
'setmarker',
'setmode',
'setname',
'setpadding',
'setpagenumber',
'setpagerange',
'setposition',
'setproperty',
'setrange',
'setshowchecksum',
'setsize',
'setspacing',
'settemplate',
'settemplatestr',
'settextalignment',
'settextdata',
'settextsize',
'settype',
'setunderline',
'setwidth',
'setxmldata',
'sharpen',
'showchecksum',
'showcode39startstop',
'showeanguardbars',
'shown_count',
'shown_first',
'shown_last',
'signal',
'signalall',
'site_atbegin',
'site_id',
'site_name',
'site_restart',
'size',
'skiprecords_value',
'sleep',
'smooth',
'soap_convertpartstopairs',
'soap_definetag',
'soap_info',
'soap_lastrequest',
'soap_lastresponse',
'soap_stub',
'sort',
'sort_args',
'sort_arguments',
'sort_columnitem',
'sort_fielditem',
'sort_orderitem',
'sortcolumnitem',
'sortfielditem',
'sortorderitem',
'sortwith',
'split',
'sqlite_createdb',
'sqlite_session_driver',
'sqlite_setsleepmillis',
'sqlite_setsleeptries',
'srand',
'stack',
'standards',
'steal',
'stock_quote',
'string',
'string_charfromname',
'string_concatenate',
'string_countfields',
'string_endswith',
'string_extract',
'string_findposition',
'string_findregexp',
'string_fordigit',
'string_getfield',
'string_getunicodeversion',
'string_insert',
'string_isalpha',
'string_isalphanumeric',
'string_isdigit',
'string_ishexdigit',
'string_islower',
'string_isnumeric',
'string_ispunctuation',
'string_isspace',
'string_isupper',
'string_length',
'string_lowercase',
'string_remove',
'string_removeleading',
'string_removetrailing',
'string_replace',
'string_replaceregexp',
'string_todecimal',
'string_tointeger',
'string_uppercase',
'string_validcharset',
'subject',
'substring',
'subtract',
'swapbytes',
'table_name',
'table_realname',
'tag',
'tag_name',
'tags',
'tags_find',
'tags_list',
'tcp_close',
'tcp_open',
'tcp_send',
'tcp_tcp_close',
'tcp_tcp_open',
'tcp_tcp_send',
'textwidth',
'thread_abort',
'thread_atomic',
'thread_event',
'thread_exists',
'thread_getcurrentid',
'thread_getpriority',
'thread_info',
'thread_list',
'thread_lock',
'thread_pipe',
'thread_priority_default',
'thread_priority_high',
'thread_priority_low',
'thread_rwlock',
'thread_semaphore',
'thread_setpriority',
'time',
'timezones',
'titlecase',
'to',
'todos',
'token_value',
'tolower',
'total_records',
'totitle',
'toupper',
'transform',
'treemap',
'treemap_iterator',
'trim',
'true',
'type',
'unescape',
'union',
'uniqueid',
'unlock',
'unserialize',
'up',
'uppercase',
'url_rewrite',
'valid_creditcard',
'valid_date',
'valid_email',
'valid_url',
'value',
'value_list',
'value_listitem',
'valuelistitem',
'values',
'valuetype',
'var',
'var_defined',
'var_remove',
'var_reset',
'var_set',
'variable',
'variable_defined',
'variable_set',
'variables',
'variant_count',
'vars',
'wait',
'wap_isenabled',
'wap_maxbuttons',
'wap_maxcolumns',
'wap_maxhorzpixels',
'wap_maxrows',
'wap_maxvertpixels',
'waskeyword',
'week',
'while',
'width',
'write',
'writelock',
'writeto',
'writeunlock',
'wsdl_extract',
'wsdl_getbinding',
'wsdl_getbindingforoperation',
'wsdl_getbindingoperations',
'wsdl_getmessagenamed',
'wsdl_getmessageparts',
'wsdl_getmessagetriofromporttype',
'wsdl_getopbodystyle',
'wsdl_getopbodyuse',
'wsdl_getoperation',
'wsdl_getoplocation',
'wsdl_getopmessagetypes',
'wsdl_getopsoapaction',
'wsdl_getportaddress',
'wsdl_getportsforservice',
'wsdl_getporttype',
'wsdl_getporttypeoperation',
'wsdl_getservicedocumentation',
'wsdl_getservices',
'wsdl_gettargetnamespace',
'wsdl_issoapoperation',
'wsdl_listoperations',
'wsdl_maketest',
'xml',
'xml_extract',
'xml_rpc',
'xml_rpccall',
'xml_rw',
'xml_serve',
'xml_transform',
'xml_xml',
'xml_xmlstream',
'xmllang',
'xmlschematype',
'xmlstream',
'xsd_attribute',
'xsd_blankarraybase',
'xsd_blankbase',
'xsd_buildtype',
'xsd_cache',
'xsd_checkcardinality',
'xsd_continueall',
'xsd_continueannotation',
'xsd_continueany',
'xsd_continueanyattribute',
'xsd_continueattribute',
'xsd_continueattributegroup',
'xsd_continuechoice',
'xsd_continuecomplexcontent',
'xsd_continuecomplextype',
'xsd_continuedocumentation',
'xsd_continueextension',
'xsd_continuegroup',
'xsd_continuekey',
'xsd_continuelist',
'xsd_continuerestriction',
'xsd_continuesequence',
'xsd_continuesimplecontent',
'xsd_continuesimpletype',
'xsd_continueunion',
'xsd_deserialize',
'xsd_fullyqualifyname',
'xsd_generate',
'xsd_generateblankfromtype',
'xsd_generateblanksimpletype',
'xsd_generatetype',
'xsd_getschematype',
'xsd_issimpletype',
'xsd_loadschema',
'xsd_lookupnamespaceuri',
'xsd_lookuptype',
'xsd_processany',
'xsd_processattribute',
'xsd_processattributegroup',
'xsd_processcomplextype',
'xsd_processelement',
'xsd_processgroup',
'xsd_processimport',
'xsd_processinclude',
'xsd_processschema',
'xsd_processsimpletype',
'xsd_ref',
'xsd_type',
'year'
]
}
| gpl-2.0 |
Tarsbot/pika | tests/unit/frame_tests.py | 11 | 4424 | """
Tests for pika.frame
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
from pika import exceptions
from pika import frame
from pika import spec
class FrameTests(unittest.TestCase):
BASIC_ACK = (b'\x01\x00\x01\x00\x00\x00\r\x00<\x00P\x00\x00\x00\x00\x00\x00'
b'\x00d\x00\xce')
BODY_FRAME = b'\x03\x00\x01\x00\x00\x00\x14I like it that sound\xce'
BODY_FRAME_VALUE = b'I like it that sound'
CONTENT_HEADER = (b'\x02\x00\x01\x00\x00\x00\x0f\x00<\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00d\x10\x00\x02\xce')
HEARTBEAT = b'\x08\x00\x00\x00\x00\x00\x00\xce'
PROTOCOL_HEADER = b'AMQP\x00\x00\t\x01'
def frame_marshal_not_implemented_test(self):
frame_obj = frame.Frame(0x000A000B, 1)
self.assertRaises(NotImplementedError, frame_obj.marshal)
def frame_underscore_marshal_test(self):
basic_ack = frame.Method(1, spec.Basic.Ack(100))
self.assertEqual(basic_ack.marshal(), self.BASIC_ACK)
def headers_marshal_test(self):
header = frame.Header(1, 100, spec.BasicProperties(delivery_mode=2))
self.assertEqual(header.marshal(), self.CONTENT_HEADER)
def body_marshal_test(self):
body = frame.Body(1, b'I like it that sound')
self.assertEqual(body.marshal(), self.BODY_FRAME)
def heartbeat_marshal_test(self):
heartbeat = frame.Heartbeat()
self.assertEqual(heartbeat.marshal(), self.HEARTBEAT)
def protocol_header_marshal_test(self):
protocol_header = frame.ProtocolHeader()
self.assertEqual(protocol_header.marshal(), self.PROTOCOL_HEADER)
def decode_protocol_header_instance_test(self):
self.assertIsInstance(frame.decode_frame(self.PROTOCOL_HEADER)[1],
frame.ProtocolHeader)
def decode_protocol_header_bytes_test(self):
self.assertEqual(frame.decode_frame(self.PROTOCOL_HEADER)[0], 8)
def decode_method_frame_instance_test(self):
self.assertIsInstance(frame.decode_frame(self.BASIC_ACK)[1],
frame.Method)
def decode_protocol_header_failure_test(self):
self.assertEqual(frame.decode_frame(b'AMQPa'), (0, None))
def decode_method_frame_bytes_test(self):
self.assertEqual(frame.decode_frame(self.BASIC_ACK)[0], 21)
def decode_method_frame_method_test(self):
self.assertIsInstance(frame.decode_frame(self.BASIC_ACK)[1].method,
spec.Basic.Ack)
def decode_header_frame_instance_test(self):
self.assertIsInstance(frame.decode_frame(self.CONTENT_HEADER)[1],
frame.Header)
def decode_header_frame_bytes_test(self):
self.assertEqual(frame.decode_frame(self.CONTENT_HEADER)[0], 23)
def decode_header_frame_properties_test(self):
frame_value = frame.decode_frame(self.CONTENT_HEADER)[1]
self.assertIsInstance(frame_value.properties, spec.BasicProperties)
def decode_frame_decoding_failure_test(self):
self.assertEqual(frame.decode_frame(b'\x01\x00\x01\x00\x00\xce'),
(0, None))
def decode_frame_decoding_no_end_byte_test(self):
self.assertEqual(frame.decode_frame(self.BASIC_ACK[:-1]), (0, None))
def decode_frame_decoding_wrong_end_byte_test(self):
self.assertRaises(exceptions.InvalidFrameError, frame.decode_frame,
self.BASIC_ACK[:-1] + b'A')
def decode_body_frame_instance_test(self):
self.assertIsInstance(frame.decode_frame(self.BODY_FRAME)[1],
frame.Body)
def decode_body_frame_fragment_test(self):
self.assertEqual(frame.decode_frame(self.BODY_FRAME)[1].fragment,
self.BODY_FRAME_VALUE)
def decode_body_frame_fragment_consumed_bytes_test(self):
self.assertEqual(frame.decode_frame(self.BODY_FRAME)[0], 28)
def decode_heartbeat_frame_test(self):
self.assertIsInstance(frame.decode_frame(self.HEARTBEAT)[1],
frame.Heartbeat)
def decode_heartbeat_frame_bytes_consumed_test(self):
self.assertEqual(frame.decode_frame(self.HEARTBEAT)[0], 8)
def decode_frame_invalid_frame_type_test(self):
self.assertRaises(exceptions.InvalidFrameError, frame.decode_frame,
b'\x09\x00\x00\x00\x00\x00\x00\xce')
| bsd-3-clause |
thedrow/django | django/utils/timesince.py | 409 | 2671 | from __future__ import unicode_literals
import calendar
import datetime
from django.utils.html import avoid_wrapping
from django.utils.timezone import is_aware, utc
from django.utils.translation import ugettext, ungettext_lazy
TIMESINCE_CHUNKS = (
(60 * 60 * 24 * 365, ungettext_lazy('%d year', '%d years')),
(60 * 60 * 24 * 30, ungettext_lazy('%d month', '%d months')),
(60 * 60 * 24 * 7, ungettext_lazy('%d week', '%d weeks')),
(60 * 60 * 24, ungettext_lazy('%d day', '%d days')),
(60 * 60, ungettext_lazy('%d hour', '%d hours')),
(60, ungettext_lazy('%d minute', '%d minutes'))
)
def timesince(d, now=None, reversed=False):
"""
Takes two datetime objects and returns the time between d and now
as a nicely formatted string, e.g. "10 minutes". If d occurs after now,
then "0 minutes" is returned.
Units used are years, months, weeks, days, hours, and minutes.
Seconds and microseconds are ignored. Up to two adjacent units will be
displayed. For example, "2 weeks, 3 days" and "1 year, 3 months" are
possible outputs, but "2 weeks, 3 hours" and "1 year, 5 days" are not.
Adapted from
http://web.archive.org/web/20060617175230/http://blog.natbat.co.uk/archive/2003/Jun/14/time_since
"""
# Convert datetime.date to datetime.datetime for comparison.
if not isinstance(d, datetime.datetime):
d = datetime.datetime(d.year, d.month, d.day)
if now and not isinstance(now, datetime.datetime):
now = datetime.datetime(now.year, now.month, now.day)
if not now:
now = datetime.datetime.now(utc if is_aware(d) else None)
delta = (d - now) if reversed else (now - d)
# Deal with leapyears by subtracing the number of leapdays
delta -= datetime.timedelta(calendar.leapdays(d.year, now.year))
# ignore microseconds
since = delta.days * 24 * 60 * 60 + delta.seconds
if since <= 0:
# d is in the future compared to now, stop processing.
return avoid_wrapping(ugettext('0 minutes'))
for i, (seconds, name) in enumerate(TIMESINCE_CHUNKS):
count = since // seconds
if count != 0:
break
result = avoid_wrapping(name % count)
if i + 1 < len(TIMESINCE_CHUNKS):
# Now get the second item
seconds2, name2 = TIMESINCE_CHUNKS[i + 1]
count2 = (since - (seconds * count)) // seconds2
if count2 != 0:
result += ugettext(', ') + avoid_wrapping(name2 % count2)
return result
def timeuntil(d, now=None):
"""
Like timesince, but returns a string measuring the time until
the given time.
"""
return timesince(d, now, reversed=True)
| bsd-3-clause |
mszewczy/odoo | addons/website_customer/__openerp__.py | 313 | 1571 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP S.A. (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Customer References',
'category': 'Website',
'website': 'https://www.odoo.com/page/website-builder',
'summary': 'Publish Your Customer References',
'version': '1.0',
'description': """
OpenERP Customer References
===========================
""",
'author': 'OpenERP SA',
'depends': [
'crm_partner_assign',
'website_partner',
'website_google_map',
],
'demo': [
'website_customer_demo.xml',
],
'data': [
'views/website_customer.xml',
],
'qweb': [],
'installable': True,
}
| agpl-3.0 |
DedMemez/ODS-August-2017 | battle/DistributedBattleBldg.py | 1 | 8958 | # Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.battle.DistributedBattleBldg
from panda3d.core import Lens, Point3, Vec3
from direct.actor import Actor
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import State
from direct.interval.IntervalGlobal import *
import random
from BattleBase import *
import DistributedBattleBase
import MovieUtil
import SuitBattleGlobals
from otp.avatar import Emote
from otp.nametag.NametagConstants import *
from otp.nametag import NametagGlobals
from toontown.suit import Suit
from toontown.suit import SuitDNA
from toontown.toon import TTEmote
from toontown.toonbase import TTLocalizer
from toontown.toonbase import ToontownGlobals
class DistributedBattleBldg(DistributedBattleBase.DistributedBattleBase):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedBattleBldg')
camFOFov = 30.0
camFOPos = Point3(0, -10, 4)
def __init__(self, cr):
townBattle = cr.playGame.getPlace().townBattle
DistributedBattleBase.DistributedBattleBase.__init__(self, cr, townBattle)
self.streetBattle = 0
self.fsm.addState(State.State('BuildingReward', self.enterBuildingReward, self.exitBuildingReward, ['Resume']))
offState = self.fsm.getStateNamed('Off')
offState.addTransition('BuildingReward')
playMovieState = self.fsm.getStateNamed('PlayMovie')
playMovieState.addTransition('BuildingReward')
def generate(self):
DistributedBattleBase.DistributedBattleBase.generate(self)
def setBossBattle(self, value):
self.bossBattle = value
if self.bossBattle:
self.battleMusic = loader.loadMusic('phase_7/audio/bgm/encntr_suit_winning_indoor.ogg')
else:
self.battleMusic = loader.loadMusic('phase_7/audio/bgm/encntr_general_bg_indoor.ogg')
base.playMusic(self.battleMusic, looping=1, volume=0.9)
base.cr.playGame.place.loader.battleMusic = self.battleMusic
def getBossBattleTaunt(self):
return TTLocalizer.BattleBldgBossTaunt
def disable(self):
DistributedBattleBase.DistributedBattleBase.disable(self)
self.battleMusic.stop()
def delete(self):
DistributedBattleBase.DistributedBattleBase.delete(self)
del self.battleMusic
def buildJoinPointList(self, avPos, destPos, toon = 0):
return []
def __faceOff(self, ts, name, callback):
if len(self.suits) == 0:
self.notify.warning('__faceOff(): no suits.')
return
elif len(self.toons) == 0:
self.notify.warning('__faceOff(): no toons.')
return
else:
elevatorPos = self.toons[0].getPos()
if len(self.suits) == 1:
leaderIndex = 0
elif self.bossBattle == 1:
leaderIndex = 1
else:
maxTypeNum = -1
for suit in self.suits:
suitTypeNum = SuitDNA.getSuitType(suit.dna.name)
if maxTypeNum < suitTypeNum:
maxTypeNum = suitTypeNum
leaderIndex = self.suits.index(suit)
delay = FACEOFF_TAUNT_T
suitTrack = Parallel()
suitLeader = None
for suit in self.suits:
suit.setState('Battle')
suitIsLeader = 0
oneSuitTrack = Sequence()
oneSuitTrack.append(Func(suit.loop, 'neutral'))
oneSuitTrack.append(Func(suit.headsUp, elevatorPos))
if self.suits.index(suit) == leaderIndex:
suitLeader = suit
suitIsLeader = 1
if self.bossBattle == 1:
taunt = self.getBossBattleTaunt()
else:
taunt = SuitBattleGlobals.getFaceoffTaunt(suit.getStyleName(), suit.doId)
oneSuitTrack.append(Func(suit.setChatAbsolute, taunt, CFSpeech | CFTimeout))
destPos, destHpr = self.getActorPosHpr(suit, self.suits)
oneSuitTrack.append(Wait(delay))
if suitIsLeader == 1:
oneSuitTrack.append(Func(suit.clearChat))
oneSuitTrack.append(self.createAdjustInterval(suit, destPos, destHpr))
suitTrack.append(oneSuitTrack)
toonTrack = Parallel()
for toon in self.toons:
oneToonTrack = Sequence()
destPos, destHpr = self.getActorPosHpr(toon, self.toons)
oneToonTrack.append(Wait(delay))
oneToonTrack.append(self.createAdjustInterval(toon, destPos, destHpr, toon=1, run=1))
toonTrack.append(oneToonTrack)
camTrack = Sequence()
def setCamFov(fov):
base.camLens.setMinFov(fov / (4.0 / 3.0))
camTrack.append(Func(camera.wrtReparentTo, suitLeader))
camTrack.append(Func(setCamFov, self.camFOFov))
suitHeight = suitLeader.getHeight()
suitOffsetPnt = Point3(0, 0, suitHeight)
MidTauntCamHeight = suitHeight * 0.66
MidTauntCamHeightLim = suitHeight - 1.8
if MidTauntCamHeight < MidTauntCamHeightLim:
MidTauntCamHeight = MidTauntCamHeightLim
TauntCamY = 18
TauntCamX = 0
TauntCamHeight = random.choice((MidTauntCamHeight, 1, 11))
camTrack.append(Func(camera.setPos, TauntCamX, TauntCamY, TauntCamHeight))
camTrack.append(Func(camera.lookAt, suitLeader, suitOffsetPnt))
camTrack.append(Wait(delay))
camPos = Point3(0, -6, 4)
camHpr = Vec3(0, 0, 0)
camTrack.append(Func(camera.reparentTo, base.localAvatar))
camTrack.append(Func(setCamFov, settings['fov']))
camTrack.append(Func(camera.setPosHpr, camPos, camHpr))
mtrack = Parallel(suitTrack, toonTrack, camTrack)
done = Func(callback)
track = Sequence(mtrack, done, name=name)
track.start(ts)
self.storeInterval(track, name)
return
def enterFaceOff(self, ts):
if len(self.toons) > 0 and base.localAvatar == self.toons[0]:
Emote.globalEmote.disableAll(self.toons[0], 'dbattlebldg, enterFaceOff')
self.delayDeleteMembers()
self.__faceOff(ts, self.faceOffName, self.__handleFaceOffDone)
return None
def __handleFaceOffDone(self):
self.notify.debug('FaceOff done')
self.d_faceOffDone()
def exitFaceOff(self):
self.notify.debug('exitFaceOff()')
if len(self.toons) > 0 and base.localAvatar == self.toons[0]:
Emote.globalEmote.releaseAll(self.toons[0], 'dbattlebldg exitFaceOff')
self.clearInterval(self.faceOffName)
self._removeMembersKeep()
camera.wrtReparentTo(self)
base.camLens.setMinFov(self.camFov / (4.0 / 3.0))
return None
def __playReward(self, ts, callback):
toonTracks = Parallel()
for toon in self.toons:
toonTracks.append(Sequence(Func(toon.loop, 'victory'), Wait(FLOOR_REWARD_TIMEOUT), Func(toon.loop, 'neutral')))
name = self.uniqueName('floorReward')
track = Sequence(toonTracks, Func(callback), name=name)
camera.setPos(0, 0, 1)
camera.setHpr(180, 10, 0)
self.storeInterval(track, name)
track.start(ts)
def enterReward(self, ts):
self.notify.debug('enterReward()')
self.delayDeleteMembers()
self.__playReward(ts, self.__handleFloorRewardDone)
return None
def __handleFloorRewardDone(self):
return None
def exitReward(self):
self.notify.debug('exitReward()')
self.clearInterval(self.uniqueName('floorReward'))
self._removeMembersKeep()
NametagGlobals.setMasterArrowsOn(1)
for toon in self.toons:
toon.startSmooth()
def enterBuildingReward(self, ts):
self.delayDeleteMembers()
if self.hasLocalToon():
NametagGlobals.setMasterArrowsOn(0)
self.movie.playReward(ts, self.uniqueName('building-reward'), self.__handleBuildingRewardDone, noSkip=True)
def __handleBuildingRewardDone(self):
if self.hasLocalToon():
self.d_rewardDone()
self.movie.resetReward()
self.fsm.request('Resume')
def exitBuildingReward(self):
self.movie.resetReward(finish=1)
self._removeMembersKeep()
NametagGlobals.setMasterArrowsOn(1)
def enterResume(self, ts = 0):
if self.hasLocalToon():
self.removeLocalToon()
def exitResume(self):
return None | apache-2.0 |
skirsdeda/django | django/core/mail/backends/filebased.py | 35 | 2765 | """Email backend that writes messages to a file."""
import datetime
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.mail.backends.console import EmailBackend as ConsoleEmailBackend
from django.utils import six
class EmailBackend(ConsoleEmailBackend):
def __init__(self, *args, **kwargs):
self._fname = None
if 'file_path' in kwargs:
self.file_path = kwargs.pop('file_path')
else:
self.file_path = getattr(settings, 'EMAIL_FILE_PATH', None)
# Make sure self.file_path is a string.
if not isinstance(self.file_path, six.string_types):
raise ImproperlyConfigured('Path for saving emails is invalid: %r' % self.file_path)
self.file_path = os.path.abspath(self.file_path)
# Make sure that self.file_path is an directory if it exists.
if os.path.exists(self.file_path) and not os.path.isdir(self.file_path):
raise ImproperlyConfigured(
'Path for saving email messages exists, but is not a directory: %s' % self.file_path
)
# Try to create it, if it not exists.
elif not os.path.exists(self.file_path):
try:
os.makedirs(self.file_path)
except OSError as err:
raise ImproperlyConfigured(
'Could not create directory for saving email messages: %s (%s)' % (self.file_path, err)
)
# Make sure that self.file_path is writable.
if not os.access(self.file_path, os.W_OK):
raise ImproperlyConfigured('Could not write to directory: %s' % self.file_path)
# Finally, call super().
# Since we're using the console-based backend as a base,
# force the stream to be None, so we don't default to stdout
kwargs['stream'] = None
super(EmailBackend, self).__init__(*args, **kwargs)
def write_message(self, message):
self.stream.write(message.message().as_bytes() + b'\n')
self.stream.write(b'-' * 79)
self.stream.write(b'\n')
def _get_filename(self):
"""Return a unique file name."""
if self._fname is None:
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
fname = "%s-%s.log" % (timestamp, abs(id(self)))
self._fname = os.path.join(self.file_path, fname)
return self._fname
def open(self):
if self.stream is None:
self.stream = open(self._get_filename(), 'ab')
return True
return False
def close(self):
try:
if self.stream is not None:
self.stream.close()
finally:
self.stream = None
| bsd-3-clause |
Impavidity/SearchEngine | Data Cleaning/booleanQuery.py | 1 | 3845 | #!/usr/bin/python
#import time
import json
import os
import myTokenize
from porter import PorterStemmer
import utils
def inverseList(termDict):
"""NOT 1 posting list--> return a list"""
return {
# 'term':'',
'docFreq':wholeLen - termDict['docFreq'],
'docIDs':[x for x in wholeList if x not in termDict['docIDs']]
}
def disjunct2Lists(l1, l2):
"""OR 2 posting lists --> return a list"""
p1 = p2 = 0
resultDocs = []
while p1 < len(l1) and p2 < len(l2):
if l1[p1] == l2[p2]:
resultDocs.append(l1[p1])
p1 = p1 + 1
p2 = p2 + 1
elif (l1[p1]) < (l2[p2]):
resultDocs.append(l1[p1])
p1 = p1 + 1
else:
resultDocs.append(l2[p2])
p2 = p2 + 1
if p1 < len(l1):
resultDocs.extend(l1[p1:])
elif p2 < len(l2):
resultDocs.extend(l2[p2:])
return resultDocs
def intersect2Lists(l1, l2):
"""AND 2 posting lists --> return a list"""
p1 = p2 = 0
resultDocs = []
while p1 < len(l1) and p2 < len(l2):
if l1[p1] == l2[p2]:
resultDocs.append(l1[p1])
p1 = p1 + 1
p2 = p2 + 1
elif (l1[p1]) < (l2[p2]):
p1 = p1 + 1
else:
p2 = p2 + 1
return resultDocs
def intersect(postingLists):
"""AND several posting lists --> return a list"""
sortedPostingLists = sorted(postingLists, cmp=lambda l1, l2: cmp(l1['docFreq'], l2['docFreq']))
resultDocs = sortedPostingLists[0]['docIDs']
for i in xrange(1, len(sortedPostingLists)):
resultDocs = intersect2Lists(resultDocs, sortedPostingLists[i]['docIDs'])
return resultDocs
def disjunct(postingLists):
"""OR several posting lists, and return expect length --> return a dict"""
resultDocs = postingLists[0]['docIDs']
for i in xrange(1, len(postingLists)):
resultDocs = disjunct2Lists(resultDocs, postingLists[i]['docIDs'])
return {
# 'term': '',
'docFreq': sum([pl['docFreq'] for pl in postingLists]),
'docIDs': resultDocs
}
def parseOR(ORpart):
ORpart = ORpart.strip(' ')
isNOTPart = False
isSTOPWORD = False
if "NOT" in ORpart:
ORpart = ORpart[4:]
isNOTPart = True
ORpartStemed = ORpart.strip("".join(myTokenize.strip_punctuations)).lower()
if ORpartStemed in myTokenize.stop_words:
print "Term: " + ORpart + " is a stop word. We assume this word exists in all documents."
isSTOPWORD = True
else:
ORpartStemed = p.stem(ORpartStemed, 0, len(ORpartStemed)-1)
if isSTOPWORD or (ORpartStemed not in term2id):
print "Term: " + ORpart + " is not in the dictionary, we treat this term as a stop word!"
if isNOTPart:
return {
# 'term':'',
'docFreq': 0,
'docIDs':[]}
else:
return {
# 'term':'',
'docFreq': wholeLen,
'docIDs':wholeList}
else:
if isNOTPart:
return inverseList(invertedIndex[term2id[ORpartStemed]])
else:
return invertedIndex[term2id[ORpartStemed]]
def parseAND(ANDpart):
ORparts = [x for x in ANDpart.strip(' ').split("OR") if x != '']
postingListsToOR = [parseOR(ORpart) for ORpart in ORparts]
# print "postingListsToOR"
# print postingListsToOR
# print "disjunct(postingListsToOR)"
# print disjunct(postingListsToOR)
return disjunct(postingListsToOR)
def initBooleanQuery():
#start_time = time.time()
term2tidFile = open("term2tid.json", "r")
indexFile = open("invertedIndex.json", "r")
global term2id
term2id = json.load(term2tidFile)
global invertedIndex
invertedIndex = json.load(indexFile)
#print("--- %s seconds ---" % (time.time() - start_time))
term2tidFile.close()
indexFile.close()
global wholeList
wholeList = range(utils.docCount)
global wholeLen
wholeLen = utils.docCount
global p
p = PorterStemmer()
def booleanQuery(query):
ANDparts = [x for x in query.strip(' ').split("AND") if x != '']
postingListsToAND = [parseAND(ANDpart) for ANDpart in ANDparts]
# print "postingListsToAND"
# print postingListsToAND
resultDocIDs = intersect(postingListsToAND)
return resultDocIDs
| mit |
NexusIS/tempest | tempest/api/compute/security_groups/test_security_group_rules.py | 1 | 8053 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute.security_groups import base
from tempest import config
from tempest import test
CONF = config.CONF
class SecurityGroupRulesTestJSON(base.BaseSecurityGroupsTest):
@classmethod
def setup_clients(cls):
super(SecurityGroupRulesTestJSON, cls).setup_clients()
cls.client = cls.security_group_rules_client
@classmethod
def resource_setup(cls):
super(SecurityGroupRulesTestJSON, cls).resource_setup()
cls.neutron_available = CONF.service_available.neutron
cls.ip_protocol = 'tcp'
cls.from_port = 22
cls.to_port = 22
def setUp(cls):
super(SecurityGroupRulesTestJSON, cls).setUp()
from_port = cls.from_port
to_port = cls.to_port
group = {}
ip_range = {}
cls.expected = {
'id': None,
'parent_group_id': None,
'ip_protocol': cls.ip_protocol,
'from_port': from_port,
'to_port': to_port,
'ip_range': ip_range,
'group': group
}
def _check_expected_response(self, actual_rule):
for key in self.expected:
if key == 'id':
continue
self.assertEqual(self.expected[key], actual_rule[key],
"Miss-matched key is %s" % key)
@test.attr(type='smoke')
@test.idempotent_id('850795d7-d4d3-4e55-b527-a774c0123d3a')
@test.services('network')
def test_security_group_rules_create(self):
# Positive test: Creation of Security Group rule
# should be successful
# Creating a Security Group to add rules to it
security_group = self.create_security_group()
securitygroup_id = security_group['id']
# Adding rules to the created Security Group
rule = \
self.client.create_security_group_rule(securitygroup_id,
self.ip_protocol,
self.from_port,
self.to_port)
self.expected['parent_group_id'] = securitygroup_id
self.expected['ip_range'] = {'cidr': '0.0.0.0/0'}
self._check_expected_response(rule)
@test.idempotent_id('7a01873e-3c38-4f30-80be-31a043cfe2fd')
@test.services('network')
def test_security_group_rules_create_with_optional_cidr(self):
# Positive test: Creation of Security Group rule
# with optional argument cidr
# should be successful
# Creating a Security Group to add rules to it
security_group = self.create_security_group()
parent_group_id = security_group['id']
# Adding rules to the created Security Group with optional cidr
cidr = '10.2.3.124/24'
rule = \
self.client.create_security_group_rule(parent_group_id,
self.ip_protocol,
self.from_port,
self.to_port,
cidr=cidr)
self.expected['parent_group_id'] = parent_group_id
self.expected['ip_range'] = {'cidr': cidr}
self._check_expected_response(rule)
@test.idempotent_id('7f5d2899-7705-4d4b-8458-4505188ffab6')
@test.services('network')
def test_security_group_rules_create_with_optional_group_id(self):
# Positive test: Creation of Security Group rule
# with optional argument group_id
# should be successful
# Creating a Security Group to add rules to it
security_group = self.create_security_group()
parent_group_id = security_group['id']
# Creating a Security Group so as to assign group_id to the rule
security_group = self.create_security_group()
group_id = security_group['id']
group_name = security_group['name']
# Adding rules to the created Security Group with optional group_id
rule = \
self.client.create_security_group_rule(parent_group_id,
self.ip_protocol,
self.from_port,
self.to_port,
group_id=group_id)
self.expected['parent_group_id'] = parent_group_id
self.expected['group'] = {'tenant_id': self.client.tenant_id,
'name': group_name}
self._check_expected_response(rule)
@test.attr(type='smoke')
@test.idempotent_id('a6154130-5a55-4850-8be4-5e9e796dbf17')
@test.services('network')
def test_security_group_rules_list(self):
# Positive test: Created Security Group rules should be
# in the list of all rules
# Creating a Security Group to add rules to it
security_group = self.create_security_group()
securitygroup_id = security_group['id']
# Add a first rule to the created Security Group
rule = \
self.client.create_security_group_rule(securitygroup_id,
self.ip_protocol,
self.from_port,
self.to_port)
rule1_id = rule['id']
# Add a second rule to the created Security Group
ip_protocol2 = 'icmp'
from_port2 = -1
to_port2 = -1
rule = \
self.client.create_security_group_rule(securitygroup_id,
ip_protocol2,
from_port2, to_port2)
rule2_id = rule['id']
# Delete the Security Group rule2 at the end of this method
self.addCleanup(self.client.delete_security_group_rule, rule2_id)
# Get rules of the created Security Group
rules = \
self.client.list_security_group_rules(securitygroup_id)
self.assertTrue(any([i for i in rules if i['id'] == rule1_id]))
self.assertTrue(any([i for i in rules if i['id'] == rule2_id]))
@test.idempotent_id('fc5c5acf-2091-43a6-a6ae-e42760e9ffaf')
@test.services('network')
def test_security_group_rules_delete_when_peer_group_deleted(self):
# Positive test:rule will delete when peer group deleting
# Creating a Security Group to add rules to it
security_group = self.create_security_group()
sg1_id = security_group['id']
# Creating other Security Group to access to group1
security_group = self.create_security_group()
sg2_id = security_group['id']
# Adding rules to the Group1
self.client.create_security_group_rule(sg1_id,
self.ip_protocol,
self.from_port,
self.to_port,
group_id=sg2_id)
# Delete group2
self.security_groups_client.delete_security_group(sg2_id)
# Get rules of the Group1
rules = \
self.client.list_security_group_rules(sg1_id)
# The group1 has no rules because group2 has deleted
self.assertEqual(0, len(rules))
| apache-2.0 |
khara914/cf-phpbuildpack | lib/yaml/composer.py | 534 | 4921 |
__all__ = ['Composer', 'ComposerError']
from error import MarkedYAMLError
from events import *
from nodes import *
class ComposerError(MarkedYAMLError):
pass
class Composer(object):
def __init__(self):
self.anchors = {}
def check_node(self):
# Drop the STREAM-START event.
if self.check_event(StreamStartEvent):
self.get_event()
# If there are more documents available?
return not self.check_event(StreamEndEvent)
def get_node(self):
# Get the root node of the next document.
if not self.check_event(StreamEndEvent):
return self.compose_document()
def get_single_node(self):
# Drop the STREAM-START event.
self.get_event()
# Compose a document if the stream is not empty.
document = None
if not self.check_event(StreamEndEvent):
document = self.compose_document()
# Ensure that the stream contains no more documents.
if not self.check_event(StreamEndEvent):
event = self.get_event()
raise ComposerError("expected a single document in the stream",
document.start_mark, "but found another document",
event.start_mark)
# Drop the STREAM-END event.
self.get_event()
return document
def compose_document(self):
# Drop the DOCUMENT-START event.
self.get_event()
# Compose the root node.
node = self.compose_node(None, None)
# Drop the DOCUMENT-END event.
self.get_event()
self.anchors = {}
return node
def compose_node(self, parent, index):
if self.check_event(AliasEvent):
event = self.get_event()
anchor = event.anchor
if anchor not in self.anchors:
raise ComposerError(None, None, "found undefined alias %r"
% anchor.encode('utf-8'), event.start_mark)
return self.anchors[anchor]
event = self.peek_event()
anchor = event.anchor
if anchor is not None:
if anchor in self.anchors:
raise ComposerError("found duplicate anchor %r; first occurence"
% anchor.encode('utf-8'), self.anchors[anchor].start_mark,
"second occurence", event.start_mark)
self.descend_resolver(parent, index)
if self.check_event(ScalarEvent):
node = self.compose_scalar_node(anchor)
elif self.check_event(SequenceStartEvent):
node = self.compose_sequence_node(anchor)
elif self.check_event(MappingStartEvent):
node = self.compose_mapping_node(anchor)
self.ascend_resolver()
return node
def compose_scalar_node(self, anchor):
event = self.get_event()
tag = event.tag
if tag is None or tag == u'!':
tag = self.resolve(ScalarNode, event.value, event.implicit)
node = ScalarNode(tag, event.value,
event.start_mark, event.end_mark, style=event.style)
if anchor is not None:
self.anchors[anchor] = node
return node
def compose_sequence_node(self, anchor):
start_event = self.get_event()
tag = start_event.tag
if tag is None or tag == u'!':
tag = self.resolve(SequenceNode, None, start_event.implicit)
node = SequenceNode(tag, [],
start_event.start_mark, None,
flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
index = 0
while not self.check_event(SequenceEndEvent):
node.value.append(self.compose_node(node, index))
index += 1
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node
def compose_mapping_node(self, anchor):
start_event = self.get_event()
tag = start_event.tag
if tag is None or tag == u'!':
tag = self.resolve(MappingNode, None, start_event.implicit)
node = MappingNode(tag, [],
start_event.start_mark, None,
flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
while not self.check_event(MappingEndEvent):
#key_event = self.peek_event()
item_key = self.compose_node(node, None)
#if item_key in node.value:
# raise ComposerError("while composing a mapping", start_event.start_mark,
# "found duplicate key", key_event.start_mark)
item_value = self.compose_node(node, item_key)
#node.value[item_key] = item_value
node.value.append((item_key, item_value))
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node
| apache-2.0 |
Finntack/pootle | pootle/core/initdb.py | 2 | 10505 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import logging
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_noop as _
from pootle.core.models import Revision
from pootle_app.models import Directory
from pootle_app.models.permissions import PermissionSet, get_pootle_permission
from pootle_format.models import Format
from pootle_language.models import Language
from pootle_project.models import Project
from staticpages.models import StaticPage as Announcement
logger = logging.getLogger(__name__)
class InitDB(object):
def init_db(self, create_projects=True):
"""Populate the database with default initial data.
This creates the default database to get a working Pootle installation.
"""
self.create_formats()
self.create_revision()
self.create_essential_users()
self.create_root_directories()
self.create_template_languages()
if create_projects:
self.create_terminology_project()
self.create_pootle_permissions()
self.create_pootle_permission_sets()
if create_projects:
self.create_default_projects()
self.create_default_languages()
def create_formats(self):
from pootle.core.delegate import formats
formats.get().initialize()
def _create_object(self, model_klass, **criteria):
instance, created = model_klass.objects.get_or_create(**criteria)
if created:
logger.debug(
"Created %s: '%s'",
instance.__class__.__name__, instance)
else:
logger.debug(
"%s already exists - skipping: '%s'",
instance.__class__.__name__, instance)
return instance, created
def _create_pootle_user(self, **criteria):
user, created = self._create_object(get_user_model(), **criteria)
if created:
user.set_unusable_password()
user.save()
return user
def _create_pootle_permission_set(self, permissions, **criteria):
permission_set, created = self._create_object(PermissionSet,
**criteria)
if created:
permission_set.positive_permissions = permissions
permission_set.save()
return permission_set
def create_revision(self):
Revision.initialize()
def create_essential_users(self):
"""Create the 'default' and 'nobody' User instances.
These users are required for Pootle's permission system.
"""
# The nobody user is used to represent an anonymous user in cases
# where we need to associate model information with such a user. An
# example is in the permission system: we need a way to store rights
# for anonymous users; thus we use the nobody user.
criteria = {
'username': u"nobody",
'full_name': u"any anonymous user",
'is_active': True,
}
self._create_pootle_user(**criteria)
# The 'default' user represents any valid, non-anonymous user and is
# used to associate information any such user. An example is in the
# permission system: we need a way to store default rights for users.
# We use the 'default' user for this.
#
# In a future version of Pootle we should think about using Django's
# groups to do better permissions handling.
criteria = {
'username': u"default",
'full_name': u"any authenticated user",
'is_active': True,
}
self._create_pootle_user(**criteria)
# The system user represents a system, and is used to
# associate updates done by bulk commands as update_stores.
criteria = {
'username': u"system",
'full_name': u"system user",
'is_active': True,
}
self._create_pootle_user(**criteria)
def create_pootle_permissions(self):
"""Create Pootle's directory level permissions."""
args = {
'app_label': "pootle_app",
'model': "directory",
}
pootle_content_type = self._create_object(ContentType, **args)[0]
pootle_content_type.save()
# Create the permissions.
permissions = [
{
'name': _("Can access a project"),
'codename': "view",
},
{
'name': _("Cannot access a project"),
'codename': "hide",
},
{
'name': _("Can make a suggestion for a translation"),
'codename': "suggest",
},
{
'name': _("Can submit a translation"),
'codename': "translate",
},
{
'name': _("Can review suggestions"),
'codename': "review",
},
{
'name': _("Can perform administrative tasks"),
'codename': "administrate",
},
]
criteria = {
'content_type': pootle_content_type,
}
for permission in permissions:
criteria.update(permission)
self._create_object(Permission, **criteria)
def create_pootle_permission_sets(self):
"""Create the default permission set for the 'nobody' and 'default' users.
'nobody' is the anonymous (non-logged in) user, and 'default' is the
logged in user.
"""
User = get_user_model()
nobody = User.objects.get(username='nobody')
default = User.objects.get(username='default')
view = get_pootle_permission('view')
suggest = get_pootle_permission('suggest')
translate = get_pootle_permission('translate')
# Default permissions for tree root.
criteria = {
'user': nobody,
'directory': Directory.objects.root,
}
self._create_pootle_permission_set([view, suggest], **criteria)
criteria['user'] = default
self._create_pootle_permission_set(
[view, suggest, translate], **criteria)
# Default permissions for templates language.
# Override with no permissions for templates language.
criteria = {
'user': nobody,
'directory': Directory.objects.get(pootle_path="/templates/"),
}
self._create_pootle_permission_set([], **criteria)
criteria['user'] = default
self._create_pootle_permission_set([], **criteria)
def require_english(self):
"""Create the English Language item."""
criteria = {
'code': "en",
'fullname': u"English",
'nplurals': 2,
'pluralequation': "(n != 1)",
}
en = self._create_object(Language, **criteria)[0]
return en
def create_root_directories(self):
"""Create the root Directory items."""
root = self._create_object(Directory, **dict(name=""))[0]
self._create_object(Directory, **dict(name="projects", parent=root))
def create_template_languages(self):
"""Create the 'templates' and English languages.
The 'templates' language is used to give users access to the
untranslated template files.
"""
self._create_object(
Language, **dict(code="templates", fullname="Templates"))
self.require_english()
def create_terminology_project(self):
"""Create the terminology project.
The terminology project is used to display terminology suggestions
while translating.
"""
criteria = {
'code': "terminology",
'fullname': u"Terminology",
'source_language': self.require_english(),
'checkstyle': "terminology",
}
po = Format.objects.get(name="po")
terminology = self._create_object(Project, **criteria)[0]
terminology.filetypes.add(po)
def create_default_projects(self):
"""Create the default projects that we host.
You might want to add your projects here, although you can also add
things through the web interface later.
"""
en = self.require_english()
po = Format.objects.get(name="po")
criteria = {
'code': u"tutorial",
'source_language': en,
'fullname': u"Tutorial",
'checkstyle': "standard",
'treestyle': "auto",
}
tutorial = self._create_object(Project, **criteria)[0]
tutorial.filetypes.add(po)
criteria = {
'active': True,
'title': "Project instructions",
'body': (
'<div dir="ltr" lang="en">Tutorial project where users can '
'play with Pootle and learn more about translation and '
'localisation.<br />For more help on localisation, visit the '
'<a href="http://docs.translatehouse.org/projects/'
'localization-guide/en/latest/guide/start.html">localisation '
'guide</a>.</div>'),
'virtual_path': "announcements/projects/"+tutorial.code,
}
self._create_object(Announcement, **criteria)
def create_default_languages(self):
"""Create the default languages."""
from translate.lang import data, factory
# import languages from toolkit
for code in data.languages.keys():
try:
tk_lang = factory.getlanguage(code)
criteria = {
'code': code,
'fullname': tk_lang.fullname,
'nplurals': tk_lang.nplurals,
'pluralequation': tk_lang.pluralequation,
}
try:
criteria['specialchars'] = tk_lang.specialchars
except AttributeError:
pass
self._create_object(Language, **criteria)
except:
pass
| gpl-3.0 |
Arcanemagus/SickRage | lib/rtorrent/lib/bencode.py | 43 | 9232 | # Copyright (C) 2011 by clueless <clueless.nospam ! mail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Version: 20111107
#
# Changelog
# ---------
# 2011-11-07 - Added support for Python2 (tested on 2.6)
# 2011-10-03 - Fixed: moved check for end of list at the top of the while loop
# in _decode_list (in case the list is empty) (Chris Lucas)
# - Converted dictionary keys to str
# 2011-04-24 - Changed date format to YYYY-MM-DD for versioning, bigger
# integer denotes a newer version
# - Fixed a bug that would treat False as an integral type but
# encode it using the 'False' string, attempting to encode a
# boolean now results in an error
# - Fixed a bug where an integer value of 0 in a list or
# dictionary resulted in a parse error while decoding
#
# 2011-04-03 - Original release
import sys
_py3 = sys.version_info[0] == 3
if _py3:
_VALID_STRING_TYPES = (str,)
_VALID_INT_TYPES = (int,)
else:
_VALID_STRING_TYPES = (str, unicode) # @UndefinedVariable
_VALID_INT_TYPES = (int, long) # @UndefinedVariable
_TYPE_INT = 1
_TYPE_STRING = 2
_TYPE_LIST = 3
_TYPE_DICTIONARY = 4
_TYPE_END = 5
_TYPE_INVALID = 6
# Function to determine the type of he next value/item
# Arguments:
# char First character of the string that is to be decoded
# Return value:
# Returns an integer that describes what type the next value/item is
def _gettype(char):
if not isinstance(char, int):
char = ord(char)
if char == 0x6C: # 'l'
return _TYPE_LIST
elif char == 0x64: # 'd'
return _TYPE_DICTIONARY
elif char == 0x69: # 'i'
return _TYPE_INT
elif char == 0x65: # 'e'
return _TYPE_END
elif char >= 0x30 and char <= 0x39: # '0' '9'
return _TYPE_STRING
else:
return _TYPE_INVALID
# Function to parse a string from the bendcoded data
# Arguments:
# data bencoded data, must be guaranteed to be a string
# Return Value:
# Returns a tuple, the first member of the tuple is the parsed string
# The second member is whatever remains of the bencoded data so it can
# be used to parse the next part of the data
def _decode_string(data):
end = 1
# if py3, data[end] is going to be an int
# if py2, data[end] will be a string
if _py3:
char = 0x3A
else:
char = chr(0x3A)
while data[end] != char: # ':'
end = end + 1
strlen = int(data[:end])
return (data[end + 1:strlen + end + 1], data[strlen + end + 1:])
# Function to parse an integer from the bencoded data
# Arguments:
# data bencoded data, must be guaranteed to be an integer
# Return Value:
# Returns a tuple, the first member of the tuple is the parsed string
# The second member is whatever remains of the bencoded data so it can
# be used to parse the next part of the data
def _decode_int(data):
end = 1
# if py3, data[end] is going to be an int
# if py2, data[end] will be a string
if _py3:
char = 0x65
else:
char = chr(0x65)
while data[end] != char: # 'e'
end = end + 1
return (int(data[1:end]), data[end + 1:])
# Function to parse a bencoded list
# Arguments:
# data bencoded data, must be guaranted to be the start of a list
# Return Value:
# Returns a tuple, the first member of the tuple is the parsed list
# The second member is whatever remains of the bencoded data so it can
# be used to parse the next part of the data
def _decode_list(data):
x = []
overflow = data[1:]
while True: # Loop over the data
if _gettype(overflow[0]) == _TYPE_END: # - Break if we reach the end of the list
return (x, overflow[1:]) # and return the list and overflow
value, overflow = _decode(overflow) #
if isinstance(value, bool) or overflow == '': # - if we have a parse error
return (False, False) # Die with error
else: # - Otherwise
x.append(value) # add the value to the list
# Function to parse a bencoded list
# Arguments:
# data bencoded data, must be guaranted to be the start of a list
# Return Value:
# Returns a tuple, the first member of the tuple is the parsed dictionary
# The second member is whatever remains of the bencoded data so it can
# be used to parse the next part of the data
def _decode_dict(data):
x = {}
overflow = data[1:]
while True: # Loop over the data
if _gettype(overflow[0]) != _TYPE_STRING: # - If the key is not a string
return (False, False) # Die with error
key, overflow = _decode(overflow) #
if key == False or overflow == '': # - If parse error
return (False, False) # Die with error
value, overflow = _decode(overflow) #
if isinstance(value, bool) or overflow == '': # - If parse error
print("Error parsing value")
print(value)
print(overflow)
return (False, False) # Die with error
else:
# don't use bytes for the key
key = key.decode()
x[key] = value
if _gettype(overflow[0]) == _TYPE_END:
return (x, overflow[1:])
# Arguments:
# data bencoded data in bytes format
# Return Values:
# Returns a tuple, the first member is the parsed data, could be a string,
# an integer, a list or a dictionary, or a combination of those
# The second member is the leftover of parsing, if everything parses correctly this
# should be an empty byte string
def _decode(data):
btype = _gettype(data[0])
if btype == _TYPE_INT:
return _decode_int(data)
elif btype == _TYPE_STRING:
return _decode_string(data)
elif btype == _TYPE_LIST:
return _decode_list(data)
elif btype == _TYPE_DICTIONARY:
return _decode_dict(data)
else:
return (False, False)
# Function to decode bencoded data
# Arguments:
# data bencoded data, can be str or bytes
# Return Values:
# Returns the decoded data on success, this coud be bytes, int, dict or list
# or a combinatin of those
# If an error occurs the return value is False
def decode(data):
# if isinstance(data, str):
# data = data.encode()
decoded, overflow = _decode(data)
return decoded
# Args: data as integer
# return: encoded byte string
def _encode_int(data):
return b'i' + str(data).encode() + b'e'
# Args: data as string or bytes
# Return: encoded byte string
def _encode_string(data):
return str(len(data)).encode() + b':' + data
# Args: data as list
# Return: Encoded byte string, false on error
def _encode_list(data):
elist = b'l'
for item in data:
eitem = encode(item)
if eitem == False:
return False
elist += eitem
return elist + b'e'
# Args: data as dict
# Return: encoded byte string, false on error
def _encode_dict(data):
edict = b'd'
keys = []
for key in data:
if not isinstance(key, _VALID_STRING_TYPES) and not isinstance(key, bytes):
return False
keys.append(key)
keys.sort()
for key in keys:
ekey = encode(key)
eitem = encode(data[key])
if ekey == False or eitem == False:
return False
edict += ekey + eitem
return edict + b'e'
# Function to encode a variable in bencoding
# Arguments:
# data Variable to be encoded, can be a list, dict, str, bytes, int or a combination of those
# Return Values:
# Returns the encoded data as a byte string when successful
# If an error occurs the return value is False
def encode(data):
if isinstance(data, bool):
return False
elif isinstance(data, _VALID_INT_TYPES):
return _encode_int(data)
elif isinstance(data, bytes):
return _encode_string(data)
elif isinstance(data, _VALID_STRING_TYPES):
return _encode_string(data.encode())
elif isinstance(data, list):
return _encode_list(data)
elif isinstance(data, dict):
return _encode_dict(data)
else:
return False
| gpl-3.0 |
Cojacfar/Maker | comm/lib/python2.7/site-packages/django/core/management/sql.py | 107 | 8922 | from __future__ import unicode_literals
import codecs
import os
import re
from django.conf import settings
from django.core.management.base import CommandError
from django.db import models
from django.db.models import get_models
from django.utils._os import upath
def sql_create(app, style, connection):
"Returns a list of the CREATE TABLE SQL statements for the given app."
if connection.settings_dict['ENGINE'] == 'django.db.backends.dummy':
# This must be the "dummy" database backend, which means the user
# hasn't set ENGINE for the database.
raise CommandError("Django doesn't know which syntax to use for your SQL statements,\n" +
"because you haven't properly specified the ENGINE setting for the database.\n" +
"see: https://docs.djangoproject.com/en/dev/ref/settings/#databases")
# Get installed models, so we generate REFERENCES right.
# We trim models from the current app so that the sqlreset command does not
# generate invalid SQL (leaving models out of known_models is harmless, so
# we can be conservative).
app_models = models.get_models(app, include_auto_created=True)
final_output = []
tables = connection.introspection.table_names()
known_models = set([model for model in connection.introspection.installed_models(tables) if model not in app_models])
pending_references = {}
for model in app_models:
output, references = connection.creation.sql_create_model(model, style, known_models)
final_output.extend(output)
for refto, refs in references.items():
pending_references.setdefault(refto, []).extend(refs)
if refto in known_models:
final_output.extend(connection.creation.sql_for_pending_references(refto, style, pending_references))
final_output.extend(connection.creation.sql_for_pending_references(model, style, pending_references))
# Keep track of the fact that we've created the table for this model.
known_models.add(model)
# Handle references to tables that are from other apps
# but don't exist physically.
not_installed_models = set(pending_references.keys())
if not_installed_models:
alter_sql = []
for model in not_installed_models:
alter_sql.extend(['-- ' + sql for sql in
connection.creation.sql_for_pending_references(model, style, pending_references)])
if alter_sql:
final_output.append('-- The following references should be added but depend on non-existent tables:')
final_output.extend(alter_sql)
return final_output
def sql_delete(app, style, connection):
"Returns a list of the DROP TABLE SQL statements for the given app."
# This should work even if a connection isn't available
try:
cursor = connection.cursor()
except:
cursor = None
# Figure out which tables already exist
if cursor:
table_names = connection.introspection.table_names(cursor)
else:
table_names = []
output = []
# Output DROP TABLE statements for standard application tables.
to_delete = set()
references_to_delete = {}
app_models = models.get_models(app, include_auto_created=True)
for model in app_models:
if cursor and connection.introspection.table_name_converter(model._meta.db_table) in table_names:
# The table exists, so it needs to be dropped
opts = model._meta
for f in opts.local_fields:
if f.rel and f.rel.to not in to_delete:
references_to_delete.setdefault(f.rel.to, []).append((model, f))
to_delete.add(model)
for model in app_models:
if connection.introspection.table_name_converter(model._meta.db_table) in table_names:
output.extend(connection.creation.sql_destroy_model(model, references_to_delete, style))
# Close database connection explicitly, in case this output is being piped
# directly into a database client, to avoid locking issues.
if cursor:
cursor.close()
connection.close()
return output[::-1] # Reverse it, to deal with table dependencies.
def sql_flush(style, connection, only_django=False, reset_sequences=True, allow_cascade=False):
"""
Returns a list of the SQL statements used to flush the database.
If only_django is True, then only table names that have associated Django
models and are in INSTALLED_APPS will be included.
"""
if only_django:
tables = connection.introspection.django_table_names(only_existing=True)
else:
tables = connection.introspection.table_names()
seqs = connection.introspection.sequence_list() if reset_sequences else ()
statements = connection.ops.sql_flush(style, tables, seqs, allow_cascade)
return statements
def sql_custom(app, style, connection):
"Returns a list of the custom table modifying SQL statements for the given app."
output = []
app_models = get_models(app)
for model in app_models:
output.extend(custom_sql_for_model(model, style, connection))
return output
def sql_indexes(app, style, connection):
"Returns a list of the CREATE INDEX SQL statements for all models in the given app."
output = []
for model in models.get_models(app, include_auto_created=True):
output.extend(connection.creation.sql_indexes_for_model(model, style))
return output
def sql_destroy_indexes(app, style, connection):
"Returns a list of the DROP INDEX SQL statements for all models in the given app."
output = []
for model in models.get_models(app, include_auto_created=True):
output.extend(connection.creation.sql_destroy_indexes_for_model(model, style))
return output
def sql_all(app, style, connection):
"Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
return sql_create(app, style, connection) + sql_custom(app, style, connection) + sql_indexes(app, style, connection)
def _split_statements(content):
comment_re = re.compile(r"^((?:'[^']*'|[^'])*?)--.*$")
statements = []
statement = []
for line in content.split("\n"):
cleaned_line = comment_re.sub(r"\1", line).strip()
if not cleaned_line:
continue
statement.append(cleaned_line)
if cleaned_line.endswith(";"):
statements.append(" ".join(statement))
statement = []
return statements
def custom_sql_for_model(model, style, connection):
opts = model._meta
app_dir = os.path.normpath(os.path.join(os.path.dirname(upath(models.get_app(model._meta.app_label).__file__)), 'sql'))
output = []
# Post-creation SQL should come before any initial SQL data is loaded.
# However, this should not be done for models that are unmanaged or
# for fields that are part of a parent model (via model inheritance).
if opts.managed:
post_sql_fields = [f for f in opts.local_fields if hasattr(f, 'post_create_sql')]
for f in post_sql_fields:
output.extend(f.post_create_sql(style, model._meta.db_table))
# Find custom SQL, if it's available.
backend_name = connection.settings_dict['ENGINE'].split('.')[-1]
sql_files = [os.path.join(app_dir, "%s.%s.sql" % (opts.model_name, backend_name)),
os.path.join(app_dir, "%s.sql" % opts.model_name)]
for sql_file in sql_files:
if os.path.exists(sql_file):
with codecs.open(sql_file, 'U', encoding=settings.FILE_CHARSET) as fp:
# Some backends can't execute more than one SQL statement at a time,
# so split into separate statements.
output.extend(_split_statements(fp.read()))
return output
def emit_pre_sync_signal(create_models, verbosity, interactive, db):
# Emit the pre_sync signal for every application.
for app in models.get_apps():
app_name = app.__name__.split('.')[-2]
if verbosity >= 2:
print("Running pre-sync handlers for application %s" % app_name)
models.signals.pre_syncdb.send(sender=app, app=app,
create_models=create_models,
verbosity=verbosity,
interactive=interactive,
db=db)
def emit_post_sync_signal(created_models, verbosity, interactive, db):
# Emit the post_sync signal for every application.
for app in models.get_apps():
app_name = app.__name__.split('.')[-2]
if verbosity >= 2:
print("Running post-sync handlers for application %s" % app_name)
models.signals.post_syncdb.send(sender=app, app=app,
created_models=created_models, verbosity=verbosity,
interactive=interactive, db=db)
| gpl-2.0 |
voxmedia/thumbor | integration_tests/__init__.py | 3 | 1877 | import os.path
from tornado.testing import AsyncHTTPTestCase
from thumbor.app import ThumborServiceApp
from thumbor.importer import Importer
from thumbor.config import Config
from thumbor.context import Context, ServerParameters
from .urls_helpers import single_dataset # , combined_dataset
from thumbor.utils import which
class EngineCase(AsyncHTTPTestCase):
def get_app(self):
cfg = Config(SECURITY_KEY='ACME-SEC')
server_params = ServerParameters(None, None, None, None, None, None)
server_params.gifsicle_path = which('gifsicle')
cfg.DETECTORS = [
'thumbor.detectors.face_detector',
'thumbor.detectors.profile_detector',
'thumbor.detectors.glasses_detector',
'thumbor.detectors.feature_detector',
]
cfg.STORAGE = 'thumbor.storages.no_storage'
cfg.LOADER = 'thumbor.loaders.file_loader'
cfg.FILE_LOADER_ROOT_PATH = os.path.join(os.path.dirname(__file__), 'imgs')
cfg.ENGINE = getattr(self, 'engine', None)
cfg.USE_GIFSICLE_ENGINE = True
cfg.FFMPEG_PATH = which('ffmpeg')
cfg.ENGINE_THREADPOOL_SIZE = 10
cfg.OPTIMIZERS = [
'thumbor.optimizers.gifv',
]
if not cfg.ENGINE:
return None
importer = Importer(cfg)
importer.import_modules()
ctx = Context(server_params, cfg, importer)
application = ThumborServiceApp(ctx)
return application
def retrieve(self, url):
self.http_client.fetch(self.get_url(url), self.stop)
return self.wait(timeout=30)
def exec_single_params(self):
if not self._app:
return True
single_dataset(self.retrieve)
# def test_combined_params__with_pil(self):
# if not self._app:
# return True
# combined_dataset(self.retrieve)
| mit |
remb0/CouchPotatoServer | libs/argparse.py | 490 | 87791 | # Author: Steven J. Bethard <steven.bethard@gmail.com>.
"""Command-line parsing library
This module is an optparse-inspired command-line parsing library that:
- handles both optional and positional arguments
- produces highly informative usage messages
- supports parsers that dispatch to sub-parsers
The following is a simple usage example that sums integers from the
command-line and writes the result to a file::
parser = argparse.ArgumentParser(
description='sum the integers at the command line')
parser.add_argument(
'integers', metavar='int', nargs='+', type=int,
help='an integer to be summed')
parser.add_argument(
'--log', default=sys.stdout, type=argparse.FileType('w'),
help='the file where the sum should be written')
args = parser.parse_args()
args.log.write('%s' % sum(args.integers))
args.log.close()
The module contains the following public classes:
- ArgumentParser -- The main entry point for command-line parsing. As the
example above shows, the add_argument() method is used to populate
the parser with actions for optional and positional arguments. Then
the parse_args() method is invoked to convert the args at the
command-line into an object with attributes.
- ArgumentError -- The exception raised by ArgumentParser objects when
there are errors with the parser's actions. Errors raised while
parsing the command-line are caught by ArgumentParser and emitted
as command-line messages.
- FileType -- A factory for defining types of files to be created. As the
example above shows, instances of FileType are typically passed as
the type= argument of add_argument() calls.
- Action -- The base class for parser actions. Typically actions are
selected by passing strings like 'store_true' or 'append_const' to
the action= argument of add_argument(). However, for greater
customization of ArgumentParser actions, subclasses of Action may
be defined and passed as the action= argument.
- HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
ArgumentDefaultsHelpFormatter -- Formatter classes which
may be passed as the formatter_class= argument to the
ArgumentParser constructor. HelpFormatter is the default,
RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
not to change the formatting for help text, and
ArgumentDefaultsHelpFormatter adds information about argument defaults
to the help.
All other classes in this module are considered implementation details.
(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
considered public as object names -- the API of the formatter objects is
still considered an implementation detail.)
"""
__version__ = '1.2.1'
__all__ = [
'ArgumentParser',
'ArgumentError',
'ArgumentTypeError',
'FileType',
'HelpFormatter',
'ArgumentDefaultsHelpFormatter',
'RawDescriptionHelpFormatter',
'RawTextHelpFormatter',
'Namespace',
'Action',
'ONE_OR_MORE',
'OPTIONAL',
'PARSER',
'REMAINDER',
'SUPPRESS',
'ZERO_OR_MORE',
]
import copy as _copy
import os as _os
import re as _re
import sys as _sys
import textwrap as _textwrap
from gettext import gettext as _
try:
set
except NameError:
# for python < 2.4 compatibility (sets module is there since 2.3):
from sets import Set as set
try:
basestring
except NameError:
basestring = str
try:
sorted
except NameError:
# for python < 2.4 compatibility:
def sorted(iterable, reverse=False):
result = list(iterable)
result.sort()
if reverse:
result.reverse()
return result
def _callable(obj):
return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
SUPPRESS = '==SUPPRESS=='
OPTIONAL = '?'
ZERO_OR_MORE = '*'
ONE_OR_MORE = '+'
PARSER = 'A...'
REMAINDER = '...'
_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args'
# =============================
# Utility functions and classes
# =============================
class _AttributeHolder(object):
"""Abstract base class that provides __repr__.
The __repr__ method returns a string in the format::
ClassName(attr=name, attr=name, ...)
The attributes are determined either by a class-level attribute,
'_kwarg_names', or by inspecting the instance __dict__.
"""
def __repr__(self):
type_name = type(self).__name__
arg_strings = []
for arg in self._get_args():
arg_strings.append(repr(arg))
for name, value in self._get_kwargs():
arg_strings.append('%s=%r' % (name, value))
return '%s(%s)' % (type_name, ', '.join(arg_strings))
def _get_kwargs(self):
return sorted(self.__dict__.items())
def _get_args(self):
return []
def _ensure_value(namespace, name, value):
if getattr(namespace, name, None) is None:
setattr(namespace, name, value)
return getattr(namespace, name)
# ===============
# Formatting Help
# ===============
class HelpFormatter(object):
"""Formatter for generating usage messages and argument help strings.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def __init__(self,
prog,
indent_increment=2,
max_help_position=24,
width=None):
# default setting for width
if width is None:
try:
width = int(_os.environ['COLUMNS'])
except (KeyError, ValueError):
width = 80
width -= 2
self._prog = prog
self._indent_increment = indent_increment
self._max_help_position = max_help_position
self._width = width
self._current_indent = 0
self._level = 0
self._action_max_length = 0
self._root_section = self._Section(self, None)
self._current_section = self._root_section
self._whitespace_matcher = _re.compile(r'\s+')
self._long_break_matcher = _re.compile(r'\n\n\n+')
# ===============================
# Section and indentation methods
# ===============================
def _indent(self):
self._current_indent += self._indent_increment
self._level += 1
def _dedent(self):
self._current_indent -= self._indent_increment
assert self._current_indent >= 0, 'Indent decreased below 0.'
self._level -= 1
class _Section(object):
def __init__(self, formatter, parent, heading=None):
self.formatter = formatter
self.parent = parent
self.heading = heading
self.items = []
def format_help(self):
# format the indented section
if self.parent is not None:
self.formatter._indent()
join = self.formatter._join_parts
for func, args in self.items:
func(*args)
item_help = join([func(*args) for func, args in self.items])
if self.parent is not None:
self.formatter._dedent()
# return nothing if the section was empty
if not item_help:
return ''
# add the heading if the section was non-empty
if self.heading is not SUPPRESS and self.heading is not None:
current_indent = self.formatter._current_indent
heading = '%*s%s:\n' % (current_indent, '', self.heading)
else:
heading = ''
# join the section-initial newline, the heading and the help
return join(['\n', heading, item_help, '\n'])
def _add_item(self, func, args):
self._current_section.items.append((func, args))
# ========================
# Message building methods
# ========================
def start_section(self, heading):
self._indent()
section = self._Section(self, self._current_section, heading)
self._add_item(section.format_help, [])
self._current_section = section
def end_section(self):
self._current_section = self._current_section.parent
self._dedent()
def add_text(self, text):
if text is not SUPPRESS and text is not None:
self._add_item(self._format_text, [text])
def add_usage(self, usage, actions, groups, prefix=None):
if usage is not SUPPRESS:
args = usage, actions, groups, prefix
self._add_item(self._format_usage, args)
def add_argument(self, action):
if action.help is not SUPPRESS:
# find all invocations
get_invocation = self._format_action_invocation
invocations = [get_invocation(action)]
for subaction in self._iter_indented_subactions(action):
invocations.append(get_invocation(subaction))
# update the maximum item length
invocation_length = max([len(s) for s in invocations])
action_length = invocation_length + self._current_indent
self._action_max_length = max(self._action_max_length,
action_length)
# add the item to the list
self._add_item(self._format_action, [action])
def add_arguments(self, actions):
for action in actions:
self.add_argument(action)
# =======================
# Help-formatting methods
# =======================
def format_help(self):
help = self._root_section.format_help()
if help:
help = self._long_break_matcher.sub('\n\n', help)
help = help.strip('\n') + '\n'
return help
def _join_parts(self, part_strings):
return ''.join([part
for part in part_strings
if part and part is not SUPPRESS])
def _format_usage(self, usage, actions, groups, prefix):
if prefix is None:
prefix = _('usage: ')
# if usage is specified, use that
if usage is not None:
usage = usage % dict(prog=self._prog)
# if no optionals or positionals are available, usage is just prog
elif usage is None and not actions:
usage = '%(prog)s' % dict(prog=self._prog)
# if optionals and positionals are available, calculate usage
elif usage is None:
prog = '%(prog)s' % dict(prog=self._prog)
# split optionals from positionals
optionals = []
positionals = []
for action in actions:
if action.option_strings:
optionals.append(action)
else:
positionals.append(action)
# build full usage string
format = self._format_actions_usage
action_usage = format(optionals + positionals, groups)
usage = ' '.join([s for s in [prog, action_usage] if s])
# wrap the usage parts if it's too long
text_width = self._width - self._current_indent
if len(prefix) + len(usage) > text_width:
# break usage into wrappable parts
part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
opt_usage = format(optionals, groups)
pos_usage = format(positionals, groups)
opt_parts = _re.findall(part_regexp, opt_usage)
pos_parts = _re.findall(part_regexp, pos_usage)
assert ' '.join(opt_parts) == opt_usage
assert ' '.join(pos_parts) == pos_usage
# helper for wrapping lines
def get_lines(parts, indent, prefix=None):
lines = []
line = []
if prefix is not None:
line_len = len(prefix) - 1
else:
line_len = len(indent) - 1
for part in parts:
if line_len + 1 + len(part) > text_width:
lines.append(indent + ' '.join(line))
line = []
line_len = len(indent) - 1
line.append(part)
line_len += len(part) + 1
if line:
lines.append(indent + ' '.join(line))
if prefix is not None:
lines[0] = lines[0][len(indent):]
return lines
# if prog is short, follow it with optionals or positionals
if len(prefix) + len(prog) <= 0.75 * text_width:
indent = ' ' * (len(prefix) + len(prog) + 1)
if opt_parts:
lines = get_lines([prog] + opt_parts, indent, prefix)
lines.extend(get_lines(pos_parts, indent))
elif pos_parts:
lines = get_lines([prog] + pos_parts, indent, prefix)
else:
lines = [prog]
# if prog is long, put it on its own line
else:
indent = ' ' * len(prefix)
parts = opt_parts + pos_parts
lines = get_lines(parts, indent)
if len(lines) > 1:
lines = []
lines.extend(get_lines(opt_parts, indent))
lines.extend(get_lines(pos_parts, indent))
lines = [prog] + lines
# join lines into usage
usage = '\n'.join(lines)
# prefix with 'usage:'
return '%s%s\n\n' % (prefix, usage)
def _format_actions_usage(self, actions, groups):
# find group indices and identify actions in groups
group_actions = set()
inserts = {}
for group in groups:
try:
start = actions.index(group._group_actions[0])
except ValueError:
continue
else:
end = start + len(group._group_actions)
if actions[start:end] == group._group_actions:
for action in group._group_actions:
group_actions.add(action)
if not group.required:
if start in inserts:
inserts[start] += ' ['
else:
inserts[start] = '['
inserts[end] = ']'
else:
if start in inserts:
inserts[start] += ' ('
else:
inserts[start] = '('
inserts[end] = ')'
for i in range(start + 1, end):
inserts[i] = '|'
# collect all actions format strings
parts = []
for i, action in enumerate(actions):
# suppressed arguments are marked with None
# remove | separators for suppressed arguments
if action.help is SUPPRESS:
parts.append(None)
if inserts.get(i) == '|':
inserts.pop(i)
elif inserts.get(i + 1) == '|':
inserts.pop(i + 1)
# produce all arg strings
elif not action.option_strings:
part = self._format_args(action, action.dest)
# if it's in a group, strip the outer []
if action in group_actions:
if part[0] == '[' and part[-1] == ']':
part = part[1:-1]
# add the action string to the list
parts.append(part)
# produce the first way to invoke the option in brackets
else:
option_string = action.option_strings[0]
# if the Optional doesn't take a value, format is:
# -s or --long
if action.nargs == 0:
part = '%s' % option_string
# if the Optional takes a value, format is:
# -s ARGS or --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
part = '%s %s' % (option_string, args_string)
# make it look optional if it's not required or in a group
if not action.required and action not in group_actions:
part = '[%s]' % part
# add the action string to the list
parts.append(part)
# insert things at the necessary indices
for i in sorted(inserts, reverse=True):
parts[i:i] = [inserts[i]]
# join all the action items with spaces
text = ' '.join([item for item in parts if item is not None])
# clean up separators for mutually exclusive groups
open = r'[\[(]'
close = r'[\])]'
text = _re.sub(r'(%s) ' % open, r'\1', text)
text = _re.sub(r' (%s)' % close, r'\1', text)
text = _re.sub(r'%s *%s' % (open, close), r'', text)
text = _re.sub(r'\(([^|]*)\)', r'\1', text)
text = text.strip()
# return the text
return text
def _format_text(self, text):
if '%(prog)' in text:
text = text % dict(prog=self._prog)
text_width = self._width - self._current_indent
indent = ' ' * self._current_indent
return self._fill_text(text, text_width, indent) + '\n\n'
def _format_action(self, action):
# determine the required width and the entry label
help_position = min(self._action_max_length + 2,
self._max_help_position)
help_width = self._width - help_position
action_width = help_position - self._current_indent - 2
action_header = self._format_action_invocation(action)
# ho nelp; start on same line and add a final newline
if not action.help:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
# short action name; start on the same line and pad two spaces
elif len(action_header) <= action_width:
tup = self._current_indent, '', action_width, action_header
action_header = '%*s%-*s ' % tup
indent_first = 0
# long action name; start on the next line
else:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
indent_first = help_position
# collect the pieces of the action help
parts = [action_header]
# if there was help for the action, add lines of help text
if action.help:
help_text = self._expand_help(action)
help_lines = self._split_lines(help_text, help_width)
parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
for line in help_lines[1:]:
parts.append('%*s%s\n' % (help_position, '', line))
# or add a newline if the description doesn't end with one
elif not action_header.endswith('\n'):
parts.append('\n')
# if there are any sub-actions, add their help as well
for subaction in self._iter_indented_subactions(action):
parts.append(self._format_action(subaction))
# return a single string
return self._join_parts(parts)
def _format_action_invocation(self, action):
if not action.option_strings:
metavar, = self._metavar_formatter(action, action.dest)(1)
return metavar
else:
parts = []
# if the Optional doesn't take a value, format is:
# -s, --long
if action.nargs == 0:
parts.extend(action.option_strings)
# if the Optional takes a value, format is:
# -s ARGS, --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
for option_string in action.option_strings:
parts.append('%s %s' % (option_string, args_string))
return ', '.join(parts)
def _metavar_formatter(self, action, default_metavar):
if action.metavar is not None:
result = action.metavar
elif action.choices is not None:
choice_strs = [str(choice) for choice in action.choices]
result = '{%s}' % ','.join(choice_strs)
else:
result = default_metavar
def format(tuple_size):
if isinstance(result, tuple):
return result
else:
return (result, ) * tuple_size
return format
def _format_args(self, action, default_metavar):
get_metavar = self._metavar_formatter(action, default_metavar)
if action.nargs is None:
result = '%s' % get_metavar(1)
elif action.nargs == OPTIONAL:
result = '[%s]' % get_metavar(1)
elif action.nargs == ZERO_OR_MORE:
result = '[%s [%s ...]]' % get_metavar(2)
elif action.nargs == ONE_OR_MORE:
result = '%s [%s ...]' % get_metavar(2)
elif action.nargs == REMAINDER:
result = '...'
elif action.nargs == PARSER:
result = '%s ...' % get_metavar(1)
else:
formats = ['%s' for _ in range(action.nargs)]
result = ' '.join(formats) % get_metavar(action.nargs)
return result
def _expand_help(self, action):
params = dict(vars(action), prog=self._prog)
for name in list(params):
if params[name] is SUPPRESS:
del params[name]
for name in list(params):
if hasattr(params[name], '__name__'):
params[name] = params[name].__name__
if params.get('choices') is not None:
choices_str = ', '.join([str(c) for c in params['choices']])
params['choices'] = choices_str
return self._get_help_string(action) % params
def _iter_indented_subactions(self, action):
try:
get_subactions = action._get_subactions
except AttributeError:
pass
else:
self._indent()
for subaction in get_subactions():
yield subaction
self._dedent()
def _split_lines(self, text, width):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.wrap(text, width)
def _fill_text(self, text, width, indent):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.fill(text, width, initial_indent=indent,
subsequent_indent=indent)
def _get_help_string(self, action):
return action.help
class RawDescriptionHelpFormatter(HelpFormatter):
"""Help message formatter which retains any formatting in descriptions.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _fill_text(self, text, width, indent):
return ''.join([indent + line for line in text.splitlines(True)])
class RawTextHelpFormatter(RawDescriptionHelpFormatter):
"""Help message formatter which retains formatting of all help text.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _split_lines(self, text, width):
return text.splitlines()
class ArgumentDefaultsHelpFormatter(HelpFormatter):
"""Help message formatter which adds default values to argument help.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _get_help_string(self, action):
help = action.help
if '%(default)' not in action.help:
if action.default is not SUPPRESS:
defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs:
help += ' (default: %(default)s)'
return help
# =====================
# Options and Arguments
# =====================
def _get_action_name(argument):
if argument is None:
return None
elif argument.option_strings:
return '/'.join(argument.option_strings)
elif argument.metavar not in (None, SUPPRESS):
return argument.metavar
elif argument.dest not in (None, SUPPRESS):
return argument.dest
else:
return None
class ArgumentError(Exception):
"""An error from creating or using an argument (optional or positional).
The string value of this exception is the message, augmented with
information about the argument that caused it.
"""
def __init__(self, argument, message):
self.argument_name = _get_action_name(argument)
self.message = message
def __str__(self):
if self.argument_name is None:
format = '%(message)s'
else:
format = 'argument %(argument_name)s: %(message)s'
return format % dict(message=self.message,
argument_name=self.argument_name)
class ArgumentTypeError(Exception):
"""An error from trying to convert a command line string to a type."""
pass
# ==============
# Action classes
# ==============
class Action(_AttributeHolder):
"""Information about how to convert command line strings to Python objects.
Action objects are used by an ArgumentParser to represent the information
needed to parse a single argument from one or more strings from the
command line. The keyword arguments to the Action constructor are also
all attributes of Action instances.
Keyword Arguments:
- option_strings -- A list of command-line option strings which
should be associated with this action.
- dest -- The name of the attribute to hold the created object(s)
- nargs -- The number of command-line arguments that should be
consumed. By default, one argument will be consumed and a single
value will be produced. Other values include:
- N (an integer) consumes N arguments (and produces a list)
- '?' consumes zero or one arguments
- '*' consumes zero or more arguments (and produces a list)
- '+' consumes one or more arguments (and produces a list)
Note that the difference between the default and nargs=1 is that
with the default, a single value will be produced, while with
nargs=1, a list containing a single value will be produced.
- const -- The value to be produced if the option is specified and the
option uses an action that takes no values.
- default -- The value to be produced if the option is not specified.
- type -- The type which the command-line arguments should be converted
to, should be one of 'string', 'int', 'float', 'complex' or a
callable object that accepts a single string argument. If None,
'string' is assumed.
- choices -- A container of values that should be allowed. If not None,
after a command-line argument has been converted to the appropriate
type, an exception will be raised if it is not a member of this
collection.
- required -- True if the action must always be specified at the
command line. This is only meaningful for optional command-line
arguments.
- help -- The help string describing the argument.
- metavar -- The name to be used for the option's argument with the
help string. If None, the 'dest' value will be used as the name.
"""
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
self.option_strings = option_strings
self.dest = dest
self.nargs = nargs
self.const = const
self.default = default
self.type = type
self.choices = choices
self.required = required
self.help = help
self.metavar = metavar
def _get_kwargs(self):
names = [
'option_strings',
'dest',
'nargs',
'const',
'default',
'type',
'choices',
'help',
'metavar',
]
return [(name, getattr(self, name)) for name in names]
def __call__(self, parser, namespace, values, option_string=None):
raise NotImplementedError(_('.__call__() not defined'))
class _StoreAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for store actions must be > 0; if you '
'have nothing to store, actions such as store '
'true or store const may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_StoreAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
class _StoreConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_StoreConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, self.const)
class _StoreTrueAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=False,
required=False,
help=None):
super(_StoreTrueAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=True,
default=default,
required=required,
help=help)
class _StoreFalseAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=True,
required=False,
help=None):
super(_StoreFalseAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=False,
default=default,
required=required,
help=help)
class _AppendAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for append actions must be > 0; if arg '
'strings are not supplying the value to append, '
'the append const action may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_AppendAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(values)
setattr(namespace, self.dest, items)
class _AppendConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_AppendConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(self.const)
setattr(namespace, self.dest, items)
class _CountAction(Action):
def __init__(self,
option_strings,
dest,
default=None,
required=False,
help=None):
super(_CountAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
new_count = _ensure_value(namespace, self.dest, 0) + 1
setattr(namespace, self.dest, new_count)
class _HelpAction(Action):
def __init__(self,
option_strings,
dest=SUPPRESS,
default=SUPPRESS,
help=None):
super(_HelpAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
parser.print_help()
parser.exit()
class _VersionAction(Action):
def __init__(self,
option_strings,
version=None,
dest=SUPPRESS,
default=SUPPRESS,
help="show program's version number and exit"):
super(_VersionAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
self.version = version
def __call__(self, parser, namespace, values, option_string=None):
version = self.version
if version is None:
version = parser.version
formatter = parser._get_formatter()
formatter.add_text(version)
parser.exit(message=formatter.format_help())
class _SubParsersAction(Action):
class _ChoicesPseudoAction(Action):
def __init__(self, name, help):
sup = super(_SubParsersAction._ChoicesPseudoAction, self)
sup.__init__(option_strings=[], dest=name, help=help)
def __init__(self,
option_strings,
prog,
parser_class,
dest=SUPPRESS,
help=None,
metavar=None):
self._prog_prefix = prog
self._parser_class = parser_class
self._name_parser_map = {}
self._choices_actions = []
super(_SubParsersAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=PARSER,
choices=self._name_parser_map,
help=help,
metavar=metavar)
def add_parser(self, name, **kwargs):
# set prog from the existing prefix
if kwargs.get('prog') is None:
kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
# create a pseudo-action to hold the choice help
if 'help' in kwargs:
help = kwargs.pop('help')
choice_action = self._ChoicesPseudoAction(name, help)
self._choices_actions.append(choice_action)
# create the parser and add it to the map
parser = self._parser_class(**kwargs)
self._name_parser_map[name] = parser
return parser
def _get_subactions(self):
return self._choices_actions
def __call__(self, parser, namespace, values, option_string=None):
parser_name = values[0]
arg_strings = values[1:]
# set the parser name if requested
if self.dest is not SUPPRESS:
setattr(namespace, self.dest, parser_name)
# select the parser
try:
parser = self._name_parser_map[parser_name]
except KeyError:
tup = parser_name, ', '.join(self._name_parser_map)
msg = _('unknown parser %r (choices: %s)' % tup)
raise ArgumentError(self, msg)
# parse all the remaining options into the namespace
# store any unrecognized options on the object, so that the top
# level parser can decide what to do with them
namespace, arg_strings = parser.parse_known_args(arg_strings, namespace)
if arg_strings:
vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])
getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
# ==============
# Type classes
# ==============
class FileType(object):
"""Factory for creating file object types
Instances of FileType are typically passed as type= arguments to the
ArgumentParser add_argument() method.
Keyword Arguments:
- mode -- A string indicating how the file is to be opened. Accepts the
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
"""
def __init__(self, mode='r', bufsize=None):
self._mode = mode
self._bufsize = bufsize
def __call__(self, string):
# the special argument "-" means sys.std{in,out}
if string == '-':
if 'r' in self._mode:
return _sys.stdin
elif 'w' in self._mode:
return _sys.stdout
else:
msg = _('argument "-" with mode %r' % self._mode)
raise ValueError(msg)
# all other arguments are used as file names
if self._bufsize:
return open(string, self._mode, self._bufsize)
else:
return open(string, self._mode)
def __repr__(self):
args = [self._mode, self._bufsize]
args_str = ', '.join([repr(arg) for arg in args if arg is not None])
return '%s(%s)' % (type(self).__name__, args_str)
# ===========================
# Optional and Positional Parsing
# ===========================
class Namespace(_AttributeHolder):
"""Simple object for storing attributes.
Implements equality by attribute names and values, and provides a simple
string representation.
"""
def __init__(self, **kwargs):
for name in kwargs:
setattr(self, name, kwargs[name])
__hash__ = None
def __eq__(self, other):
return vars(self) == vars(other)
def __ne__(self, other):
return not (self == other)
def __contains__(self, key):
return key in self.__dict__
class _ActionsContainer(object):
def __init__(self,
description,
prefix_chars,
argument_default,
conflict_handler):
super(_ActionsContainer, self).__init__()
self.description = description
self.argument_default = argument_default
self.prefix_chars = prefix_chars
self.conflict_handler = conflict_handler
# set up registries
self._registries = {}
# register actions
self.register('action', None, _StoreAction)
self.register('action', 'store', _StoreAction)
self.register('action', 'store_const', _StoreConstAction)
self.register('action', 'store_true', _StoreTrueAction)
self.register('action', 'store_false', _StoreFalseAction)
self.register('action', 'append', _AppendAction)
self.register('action', 'append_const', _AppendConstAction)
self.register('action', 'count', _CountAction)
self.register('action', 'help', _HelpAction)
self.register('action', 'version', _VersionAction)
self.register('action', 'parsers', _SubParsersAction)
# raise an exception if the conflict handler is invalid
self._get_handler()
# action storage
self._actions = []
self._option_string_actions = {}
# groups
self._action_groups = []
self._mutually_exclusive_groups = []
# defaults storage
self._defaults = {}
# determines whether an "option" looks like a negative number
self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
# whether or not there are any optionals that look like negative
# numbers -- uses a list so it can be shared and edited
self._has_negative_number_optionals = []
# ====================
# Registration methods
# ====================
def register(self, registry_name, value, object):
registry = self._registries.setdefault(registry_name, {})
registry[value] = object
def _registry_get(self, registry_name, value, default=None):
return self._registries[registry_name].get(value, default)
# ==================================
# Namespace default accessor methods
# ==================================
def set_defaults(self, **kwargs):
self._defaults.update(kwargs)
# if these defaults match any existing arguments, replace
# the previous default on the object with the new one
for action in self._actions:
if action.dest in kwargs:
action.default = kwargs[action.dest]
def get_default(self, dest):
for action in self._actions:
if action.dest == dest and action.default is not None:
return action.default
return self._defaults.get(dest, None)
# =======================
# Adding argument actions
# =======================
def add_argument(self, *args, **kwargs):
"""
add_argument(dest, ..., name=value, ...)
add_argument(option_string, option_string, ..., name=value, ...)
"""
# if no positional args are supplied or only one is supplied and
# it doesn't look like an option string, parse a positional
# argument
chars = self.prefix_chars
if not args or len(args) == 1 and args[0][0] not in chars:
if args and 'dest' in kwargs:
raise ValueError('dest supplied twice for positional argument')
kwargs = self._get_positional_kwargs(*args, **kwargs)
# otherwise, we're adding an optional argument
else:
kwargs = self._get_optional_kwargs(*args, **kwargs)
# if no default was supplied, use the parser-level default
if 'default' not in kwargs:
dest = kwargs['dest']
if dest in self._defaults:
kwargs['default'] = self._defaults[dest]
elif self.argument_default is not None:
kwargs['default'] = self.argument_default
# create the action object, and add it to the parser
action_class = self._pop_action_class(kwargs)
if not _callable(action_class):
raise ValueError('unknown action "%s"' % action_class)
action = action_class(**kwargs)
# raise an error if the action type is not callable
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
raise ValueError('%r is not callable' % type_func)
return self._add_action(action)
def add_argument_group(self, *args, **kwargs):
group = _ArgumentGroup(self, *args, **kwargs)
self._action_groups.append(group)
return group
def add_mutually_exclusive_group(self, **kwargs):
group = _MutuallyExclusiveGroup(self, **kwargs)
self._mutually_exclusive_groups.append(group)
return group
def _add_action(self, action):
# resolve any conflicts
self._check_conflict(action)
# add to actions list
self._actions.append(action)
action.container = self
# index the action by any option strings it has
for option_string in action.option_strings:
self._option_string_actions[option_string] = action
# set the flag if any option strings look like negative numbers
for option_string in action.option_strings:
if self._negative_number_matcher.match(option_string):
if not self._has_negative_number_optionals:
self._has_negative_number_optionals.append(True)
# return the created action
return action
def _remove_action(self, action):
self._actions.remove(action)
def _add_container_actions(self, container):
# collect groups by titles
title_group_map = {}
for group in self._action_groups:
if group.title in title_group_map:
msg = _('cannot merge actions - two groups are named %r')
raise ValueError(msg % (group.title))
title_group_map[group.title] = group
# map each action to its group
group_map = {}
for group in container._action_groups:
# if a group with the title exists, use that, otherwise
# create a new group matching the container's group
if group.title not in title_group_map:
title_group_map[group.title] = self.add_argument_group(
title=group.title,
description=group.description,
conflict_handler=group.conflict_handler)
# map the actions to their new group
for action in group._group_actions:
group_map[action] = title_group_map[group.title]
# add container's mutually exclusive groups
# NOTE: if add_mutually_exclusive_group ever gains title= and
# description= then this code will need to be expanded as above
for group in container._mutually_exclusive_groups:
mutex_group = self.add_mutually_exclusive_group(
required=group.required)
# map the actions to their new mutex group
for action in group._group_actions:
group_map[action] = mutex_group
# add all actions to this container or their group
for action in container._actions:
group_map.get(action, self)._add_action(action)
def _get_positional_kwargs(self, dest, **kwargs):
# make sure required is not specified
if 'required' in kwargs:
msg = _("'required' is an invalid argument for positionals")
raise TypeError(msg)
# mark positional arguments as required if at least one is
# always required
if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
kwargs['required'] = True
if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
kwargs['required'] = True
# return the keyword arguments with no option strings
return dict(kwargs, dest=dest, option_strings=[])
def _get_optional_kwargs(self, *args, **kwargs):
# determine short and long option strings
option_strings = []
long_option_strings = []
for option_string in args:
# error on strings that don't start with an appropriate prefix
if not option_string[0] in self.prefix_chars:
msg = _('invalid option string %r: '
'must start with a character %r')
tup = option_string, self.prefix_chars
raise ValueError(msg % tup)
# strings starting with two prefix characters are long options
option_strings.append(option_string)
if option_string[0] in self.prefix_chars:
if len(option_string) > 1:
if option_string[1] in self.prefix_chars:
long_option_strings.append(option_string)
# infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
dest = kwargs.pop('dest', None)
if dest is None:
if long_option_strings:
dest_option_string = long_option_strings[0]
else:
dest_option_string = option_strings[0]
dest = dest_option_string.lstrip(self.prefix_chars)
if not dest:
msg = _('dest= is required for options like %r')
raise ValueError(msg % option_string)
dest = dest.replace('-', '_')
# return the updated keyword arguments
return dict(kwargs, dest=dest, option_strings=option_strings)
def _pop_action_class(self, kwargs, default=None):
action = kwargs.pop('action', default)
return self._registry_get('action', action, action)
def _get_handler(self):
# determine function from conflict handler string
handler_func_name = '_handle_conflict_%s' % self.conflict_handler
try:
return getattr(self, handler_func_name)
except AttributeError:
msg = _('invalid conflict_resolution value: %r')
raise ValueError(msg % self.conflict_handler)
def _check_conflict(self, action):
# find all options that conflict with this option
confl_optionals = []
for option_string in action.option_strings:
if option_string in self._option_string_actions:
confl_optional = self._option_string_actions[option_string]
confl_optionals.append((option_string, confl_optional))
# resolve any conflicts
if confl_optionals:
conflict_handler = self._get_handler()
conflict_handler(action, confl_optionals)
def _handle_conflict_error(self, action, conflicting_actions):
message = _('conflicting option string(s): %s')
conflict_string = ', '.join([option_string
for option_string, action
in conflicting_actions])
raise ArgumentError(action, message % conflict_string)
def _handle_conflict_resolve(self, action, conflicting_actions):
# remove all conflicting options
for option_string, action in conflicting_actions:
# remove the conflicting option
action.option_strings.remove(option_string)
self._option_string_actions.pop(option_string, None)
# if the option now has no option string, remove it from the
# container holding it
if not action.option_strings:
action.container._remove_action(action)
class _ArgumentGroup(_ActionsContainer):
def __init__(self, container, title=None, description=None, **kwargs):
# add any missing keyword arguments by checking the container
update = kwargs.setdefault
update('conflict_handler', container.conflict_handler)
update('prefix_chars', container.prefix_chars)
update('argument_default', container.argument_default)
super_init = super(_ArgumentGroup, self).__init__
super_init(description=description, **kwargs)
# group attributes
self.title = title
self._group_actions = []
# share most attributes with the container
self._registries = container._registries
self._actions = container._actions
self._option_string_actions = container._option_string_actions
self._defaults = container._defaults
self._has_negative_number_optionals = \
container._has_negative_number_optionals
def _add_action(self, action):
action = super(_ArgumentGroup, self)._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
super(_ArgumentGroup, self)._remove_action(action)
self._group_actions.remove(action)
class _MutuallyExclusiveGroup(_ArgumentGroup):
def __init__(self, container, required=False):
super(_MutuallyExclusiveGroup, self).__init__(container)
self.required = required
self._container = container
def _add_action(self, action):
if action.required:
msg = _('mutually exclusive arguments must be optional')
raise ValueError(msg)
action = self._container._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
self._container._remove_action(action)
self._group_actions.remove(action)
class ArgumentParser(_AttributeHolder, _ActionsContainer):
"""Object for parsing command line strings into Python objects.
Keyword Arguments:
- prog -- The name of the program (default: sys.argv[0])
- usage -- A usage message (default: auto-generated from arguments)
- description -- A description of what the program does
- epilog -- Text following the argument descriptions
- parents -- Parsers whose arguments should be copied into this one
- formatter_class -- HelpFormatter class for printing help messages
- prefix_chars -- Characters that prefix optional arguments
- fromfile_prefix_chars -- Characters that prefix files containing
additional arguments
- argument_default -- The default value for all arguments
- conflict_handler -- String indicating how to handle conflicts
- add_help -- Add a -h/-help option
"""
def __init__(self,
prog=None,
usage=None,
description=None,
epilog=None,
version=None,
parents=[],
formatter_class=HelpFormatter,
prefix_chars='-',
fromfile_prefix_chars=None,
argument_default=None,
conflict_handler='error',
add_help=True):
if version is not None:
import warnings
warnings.warn(
"""The "version" argument to ArgumentParser is deprecated. """
"""Please use """
""""add_argument(..., action='version', version="N", ...)" """
"""instead""", DeprecationWarning)
superinit = super(ArgumentParser, self).__init__
superinit(description=description,
prefix_chars=prefix_chars,
argument_default=argument_default,
conflict_handler=conflict_handler)
# default setting for prog
if prog is None:
prog = _os.path.basename(_sys.argv[0])
self.prog = prog
self.usage = usage
self.epilog = epilog
self.version = version
self.formatter_class = formatter_class
self.fromfile_prefix_chars = fromfile_prefix_chars
self.add_help = add_help
add_group = self.add_argument_group
self._positionals = add_group(_('positional arguments'))
self._optionals = add_group(_('optional arguments'))
self._subparsers = None
# register types
def identity(string):
return string
self.register('type', None, identity)
# add help and version arguments if necessary
# (using explicit default to override global argument_default)
if '-' in prefix_chars:
default_prefix = '-'
else:
default_prefix = prefix_chars[0]
if self.add_help:
self.add_argument(
default_prefix+'h', default_prefix*2+'help',
action='help', default=SUPPRESS,
help=_('show this help message and exit'))
if self.version:
self.add_argument(
default_prefix+'v', default_prefix*2+'version',
action='version', default=SUPPRESS,
version=self.version,
help=_("show program's version number and exit"))
# add parent arguments and defaults
for parent in parents:
self._add_container_actions(parent)
try:
defaults = parent._defaults
except AttributeError:
pass
else:
self._defaults.update(defaults)
# =======================
# Pretty __repr__ methods
# =======================
def _get_kwargs(self):
names = [
'prog',
'usage',
'description',
'version',
'formatter_class',
'conflict_handler',
'add_help',
]
return [(name, getattr(self, name)) for name in names]
# ==================================
# Optional/Positional adding methods
# ==================================
def add_subparsers(self, **kwargs):
if self._subparsers is not None:
self.error(_('cannot have multiple subparser arguments'))
# add the parser class to the arguments if it's not present
kwargs.setdefault('parser_class', type(self))
if 'title' in kwargs or 'description' in kwargs:
title = _(kwargs.pop('title', 'subcommands'))
description = _(kwargs.pop('description', None))
self._subparsers = self.add_argument_group(title, description)
else:
self._subparsers = self._positionals
# prog defaults to the usage message of this parser, skipping
# optional arguments and with no "usage:" prefix
if kwargs.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(self.usage, positionals, groups, '')
kwargs['prog'] = formatter.format_help().strip()
# create the parsers action and add it to the positionals list
parsers_class = self._pop_action_class(kwargs, 'parsers')
action = parsers_class(option_strings=[], **kwargs)
self._subparsers._add_action(action)
# return the created parsers action
return action
def _add_action(self, action):
if action.option_strings:
self._optionals._add_action(action)
else:
self._positionals._add_action(action)
return action
def _get_optional_actions(self):
return [action
for action in self._actions
if action.option_strings]
def _get_positional_actions(self):
return [action
for action in self._actions
if not action.option_strings]
# =====================================
# Command line argument parsing methods
# =====================================
def parse_args(self, args=None, namespace=None):
args, argv = self.parse_known_args(args, namespace)
if argv:
msg = _('unrecognized arguments: %s')
self.error(msg % ' '.join(argv))
return args
def parse_known_args(self, args=None, namespace=None):
# args default to the system args
if args is None:
args = _sys.argv[1:]
# default Namespace built from parser defaults
if namespace is None:
namespace = Namespace()
# add any action defaults that aren't present
for action in self._actions:
if action.dest is not SUPPRESS:
if not hasattr(namespace, action.dest):
if action.default is not SUPPRESS:
default = action.default
if isinstance(action.default, basestring):
default = self._get_value(action, default)
setattr(namespace, action.dest, default)
# add any parser defaults that aren't present
for dest in self._defaults:
if not hasattr(namespace, dest):
setattr(namespace, dest, self._defaults[dest])
# parse the arguments and exit if there are any errors
try:
namespace, args = self._parse_known_args(args, namespace)
if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
return namespace, args
except ArgumentError:
err = _sys.exc_info()[1]
self.error(str(err))
def _parse_known_args(self, arg_strings, namespace):
# replace arg strings that are file references
if self.fromfile_prefix_chars is not None:
arg_strings = self._read_args_from_files(arg_strings)
# map all mutually exclusive arguments to the other arguments
# they can't occur with
action_conflicts = {}
for mutex_group in self._mutually_exclusive_groups:
group_actions = mutex_group._group_actions
for i, mutex_action in enumerate(mutex_group._group_actions):
conflicts = action_conflicts.setdefault(mutex_action, [])
conflicts.extend(group_actions[:i])
conflicts.extend(group_actions[i + 1:])
# find all option indices, and determine the arg_string_pattern
# which has an 'O' if there is an option at an index,
# an 'A' if there is an argument, or a '-' if there is a '--'
option_string_indices = {}
arg_string_pattern_parts = []
arg_strings_iter = iter(arg_strings)
for i, arg_string in enumerate(arg_strings_iter):
# all args after -- are non-options
if arg_string == '--':
arg_string_pattern_parts.append('-')
for arg_string in arg_strings_iter:
arg_string_pattern_parts.append('A')
# otherwise, add the arg to the arg strings
# and note the index if it was an option
else:
option_tuple = self._parse_optional(arg_string)
if option_tuple is None:
pattern = 'A'
else:
option_string_indices[i] = option_tuple
pattern = 'O'
arg_string_pattern_parts.append(pattern)
# join the pieces together to form the pattern
arg_strings_pattern = ''.join(arg_string_pattern_parts)
# converts arg strings to the appropriate and then takes the action
seen_actions = set()
seen_non_default_actions = set()
def take_action(action, argument_strings, option_string=None):
seen_actions.add(action)
argument_values = self._get_values(action, argument_strings)
# error if this argument is not allowed with other previously
# seen arguments, assuming that actions that use the default
# value don't really count as "present"
if argument_values is not action.default:
seen_non_default_actions.add(action)
for conflict_action in action_conflicts.get(action, []):
if conflict_action in seen_non_default_actions:
msg = _('not allowed with argument %s')
action_name = _get_action_name(conflict_action)
raise ArgumentError(action, msg % action_name)
# take the action if we didn't receive a SUPPRESS value
# (e.g. from a default)
if argument_values is not SUPPRESS:
action(self, namespace, argument_values, option_string)
# function to convert arg_strings into an optional action
def consume_optional(start_index):
# get the optional identified at this index
option_tuple = option_string_indices[start_index]
action, option_string, explicit_arg = option_tuple
# identify additional optionals in the same arg string
# (e.g. -xyz is the same as -x -y -z if no args are required)
match_argument = self._match_argument
action_tuples = []
while True:
# if we found no optional action, skip it
if action is None:
extras.append(arg_strings[start_index])
return start_index + 1
# if there is an explicit argument, try to match the
# optional's string arguments to only this
if explicit_arg is not None:
arg_count = match_argument(action, 'A')
# if the action is a single-dash option and takes no
# arguments, try to parse more single-dash options out
# of the tail of the option string
chars = self.prefix_chars
if arg_count == 0 and option_string[1] not in chars:
action_tuples.append((action, [], option_string))
char = option_string[0]
option_string = char + explicit_arg[0]
new_explicit_arg = explicit_arg[1:] or None
optionals_map = self._option_string_actions
if option_string in optionals_map:
action = optionals_map[option_string]
explicit_arg = new_explicit_arg
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if the action expect exactly one argument, we've
# successfully matched the option; exit the loop
elif arg_count == 1:
stop = start_index + 1
args = [explicit_arg]
action_tuples.append((action, args, option_string))
break
# error if a double-dash option did not use the
# explicit argument
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if there is no explicit argument, try to match the
# optional's string arguments with the following strings
# if successful, exit the loop
else:
start = start_index + 1
selected_patterns = arg_strings_pattern[start:]
arg_count = match_argument(action, selected_patterns)
stop = start + arg_count
args = arg_strings[start:stop]
action_tuples.append((action, args, option_string))
break
# add the Optional to the list and return the index at which
# the Optional's string args stopped
assert action_tuples
for action, args, option_string in action_tuples:
take_action(action, args, option_string)
return stop
# the list of Positionals left to be parsed; this is modified
# by consume_positionals()
positionals = self._get_positional_actions()
# function to convert arg_strings into positional actions
def consume_positionals(start_index):
# match as many Positionals as possible
match_partial = self._match_arguments_partial
selected_pattern = arg_strings_pattern[start_index:]
arg_counts = match_partial(positionals, selected_pattern)
# slice off the appropriate arg strings for each Positional
# and add the Positional and its args to the list
for action, arg_count in zip(positionals, arg_counts):
args = arg_strings[start_index: start_index + arg_count]
start_index += arg_count
take_action(action, args)
# slice off the Positionals that we just parsed and return the
# index at which the Positionals' string args stopped
positionals[:] = positionals[len(arg_counts):]
return start_index
# consume Positionals and Optionals alternately, until we have
# passed the last option string
extras = []
start_index = 0
if option_string_indices:
max_option_string_index = max(option_string_indices)
else:
max_option_string_index = -1
while start_index <= max_option_string_index:
# consume any Positionals preceding the next option
next_option_string_index = min([
index
for index in option_string_indices
if index >= start_index])
if start_index != next_option_string_index:
positionals_end_index = consume_positionals(start_index)
# only try to parse the next optional if we didn't consume
# the option string during the positionals parsing
if positionals_end_index > start_index:
start_index = positionals_end_index
continue
else:
start_index = positionals_end_index
# if we consumed all the positionals we could and we're not
# at the index of an option string, there were extra arguments
if start_index not in option_string_indices:
strings = arg_strings[start_index:next_option_string_index]
extras.extend(strings)
start_index = next_option_string_index
# consume the next optional and any arguments for it
start_index = consume_optional(start_index)
# consume any positionals following the last Optional
stop_index = consume_positionals(start_index)
# if we didn't consume all the argument strings, there were extras
extras.extend(arg_strings[stop_index:])
# if we didn't use all the Positional objects, there were too few
# arg strings supplied.
if positionals:
self.error(_('too few arguments'))
# make sure all required actions were present
for action in self._actions:
if action.required:
if action not in seen_actions:
name = _get_action_name(action)
self.error(_('argument %s is required') % name)
# make sure all required groups had one option present
for group in self._mutually_exclusive_groups:
if group.required:
for action in group._group_actions:
if action in seen_non_default_actions:
break
# if no actions were used, report the error
else:
names = [_get_action_name(action)
for action in group._group_actions
if action.help is not SUPPRESS]
msg = _('one of the arguments %s is required')
self.error(msg % ' '.join(names))
# return the updated namespace and the extra arguments
return namespace, extras
def _read_args_from_files(self, arg_strings):
# expand arguments referencing files
new_arg_strings = []
for arg_string in arg_strings:
# for regular arguments, just add them back into the list
if arg_string[0] not in self.fromfile_prefix_chars:
new_arg_strings.append(arg_string)
# replace arguments referencing files with the file content
else:
try:
args_file = open(arg_string[1:])
try:
arg_strings = []
for arg_line in args_file.read().splitlines():
for arg in self.convert_arg_line_to_args(arg_line):
arg_strings.append(arg)
arg_strings = self._read_args_from_files(arg_strings)
new_arg_strings.extend(arg_strings)
finally:
args_file.close()
except IOError:
err = _sys.exc_info()[1]
self.error(str(err))
# return the modified argument list
return new_arg_strings
def convert_arg_line_to_args(self, arg_line):
return [arg_line]
def _match_argument(self, action, arg_strings_pattern):
# match the pattern for this action to the arg strings
nargs_pattern = self._get_nargs_pattern(action)
match = _re.match(nargs_pattern, arg_strings_pattern)
# raise an exception if we weren't able to find a match
if match is None:
nargs_errors = {
None: _('expected one argument'),
OPTIONAL: _('expected at most one argument'),
ONE_OR_MORE: _('expected at least one argument'),
}
default = _('expected %s argument(s)') % action.nargs
msg = nargs_errors.get(action.nargs, default)
raise ArgumentError(action, msg)
# return the number of arguments matched
return len(match.group(1))
def _match_arguments_partial(self, actions, arg_strings_pattern):
# progressively shorten the actions list by slicing off the
# final actions until we find a match
result = []
for i in range(len(actions), 0, -1):
actions_slice = actions[:i]
pattern = ''.join([self._get_nargs_pattern(action)
for action in actions_slice])
match = _re.match(pattern, arg_strings_pattern)
if match is not None:
result.extend([len(string) for string in match.groups()])
break
# return the list of arg string counts
return result
def _parse_optional(self, arg_string):
# if it's an empty string, it was meant to be a positional
if not arg_string:
return None
# if it doesn't start with a prefix, it was meant to be positional
if not arg_string[0] in self.prefix_chars:
return None
# if the option string is present in the parser, return the action
if arg_string in self._option_string_actions:
action = self._option_string_actions[arg_string]
return action, arg_string, None
# if it's just a single character, it was meant to be positional
if len(arg_string) == 1:
return None
# if the option string before the "=" is present, return the action
if '=' in arg_string:
option_string, explicit_arg = arg_string.split('=', 1)
if option_string in self._option_string_actions:
action = self._option_string_actions[option_string]
return action, option_string, explicit_arg
# search through all possible prefixes of the option string
# and all actions in the parser for possible interpretations
option_tuples = self._get_option_tuples(arg_string)
# if multiple actions match, the option string was ambiguous
if len(option_tuples) > 1:
options = ', '.join([option_string
for action, option_string, explicit_arg in option_tuples])
tup = arg_string, options
self.error(_('ambiguous option: %s could match %s') % tup)
# if exactly one action matched, this segmentation is good,
# so return the parsed action
elif len(option_tuples) == 1:
option_tuple, = option_tuples
return option_tuple
# if it was not found as an option, but it looks like a negative
# number, it was meant to be positional
# unless there are negative-number-like options
if self._negative_number_matcher.match(arg_string):
if not self._has_negative_number_optionals:
return None
# if it contains a space, it was meant to be a positional
if ' ' in arg_string:
return None
# it was meant to be an optional but there is no such option
# in this parser (though it might be a valid option in a subparser)
return None, arg_string, None
def _get_option_tuples(self, option_string):
result = []
# option strings starting with two prefix characters are only
# split at the '='
chars = self.prefix_chars
if option_string[0] in chars and option_string[1] in chars:
if '=' in option_string:
option_prefix, explicit_arg = option_string.split('=', 1)
else:
option_prefix = option_string
explicit_arg = None
for option_string in self._option_string_actions:
if option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# single character options can be concatenated with their arguments
# but multiple character options always have to have their argument
# separate
elif option_string[0] in chars and option_string[1] not in chars:
option_prefix = option_string
explicit_arg = None
short_option_prefix = option_string[:2]
short_explicit_arg = option_string[2:]
for option_string in self._option_string_actions:
if option_string == short_option_prefix:
action = self._option_string_actions[option_string]
tup = action, option_string, short_explicit_arg
result.append(tup)
elif option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# shouldn't ever get here
else:
self.error(_('unexpected option string: %s') % option_string)
# return the collected option tuples
return result
def _get_nargs_pattern(self, action):
# in all examples below, we have to allow for '--' args
# which are represented as '-' in the pattern
nargs = action.nargs
# the default (None) is assumed to be a single argument
if nargs is None:
nargs_pattern = '(-*A-*)'
# allow zero or one arguments
elif nargs == OPTIONAL:
nargs_pattern = '(-*A?-*)'
# allow zero or more arguments
elif nargs == ZERO_OR_MORE:
nargs_pattern = '(-*[A-]*)'
# allow one or more arguments
elif nargs == ONE_OR_MORE:
nargs_pattern = '(-*A[A-]*)'
# allow any number of options or arguments
elif nargs == REMAINDER:
nargs_pattern = '([-AO]*)'
# allow one argument followed by any number of options or arguments
elif nargs == PARSER:
nargs_pattern = '(-*A[-AO]*)'
# all others should be integers
else:
nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
# if this is an optional action, -- is not allowed
if action.option_strings:
nargs_pattern = nargs_pattern.replace('-*', '')
nargs_pattern = nargs_pattern.replace('-', '')
# return the pattern
return nargs_pattern
# ========================
# Value conversion methods
# ========================
def _get_values(self, action, arg_strings):
# for everything but PARSER args, strip out '--'
if action.nargs not in [PARSER, REMAINDER]:
arg_strings = [s for s in arg_strings if s != '--']
# optional argument produces a default when not present
if not arg_strings and action.nargs == OPTIONAL:
if action.option_strings:
value = action.const
else:
value = action.default
if isinstance(value, basestring):
value = self._get_value(action, value)
self._check_value(action, value)
# when nargs='*' on a positional, if there were no command-line
# args, use the default if it is anything other than None
elif (not arg_strings and action.nargs == ZERO_OR_MORE and
not action.option_strings):
if action.default is not None:
value = action.default
else:
value = arg_strings
self._check_value(action, value)
# single argument or optional argument produces a single value
elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
arg_string, = arg_strings
value = self._get_value(action, arg_string)
self._check_value(action, value)
# REMAINDER arguments convert all values, checking none
elif action.nargs == REMAINDER:
value = [self._get_value(action, v) for v in arg_strings]
# PARSER arguments convert all values, but check only the first
elif action.nargs == PARSER:
value = [self._get_value(action, v) for v in arg_strings]
self._check_value(action, value[0])
# all other types of nargs produce a list
else:
value = [self._get_value(action, v) for v in arg_strings]
for v in value:
self._check_value(action, v)
# return the converted value
return value
def _get_value(self, action, arg_string):
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
msg = _('%r is not callable')
raise ArgumentError(action, msg % type_func)
# convert the value to the appropriate type
try:
result = type_func(arg_string)
# ArgumentTypeErrors indicate errors
except ArgumentTypeError:
name = getattr(action.type, '__name__', repr(action.type))
msg = str(_sys.exc_info()[1])
raise ArgumentError(action, msg)
# TypeErrors or ValueErrors also indicate errors
except (TypeError, ValueError):
name = getattr(action.type, '__name__', repr(action.type))
msg = _('invalid %s value: %r')
raise ArgumentError(action, msg % (name, arg_string))
# return the converted value
return result
def _check_value(self, action, value):
# converted value must be one of the choices (if specified)
if action.choices is not None and value not in action.choices:
tup = value, ', '.join(map(repr, action.choices))
msg = _('invalid choice: %r (choose from %s)') % tup
raise ArgumentError(action, msg)
# =======================
# Help-formatting methods
# =======================
def format_usage(self):
formatter = self._get_formatter()
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
return formatter.format_help()
def format_help(self):
formatter = self._get_formatter()
# usage
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
# description
formatter.add_text(self.description)
# positionals, optionals and user-defined groups
for action_group in self._action_groups:
formatter.start_section(action_group.title)
formatter.add_text(action_group.description)
formatter.add_arguments(action_group._group_actions)
formatter.end_section()
# epilog
formatter.add_text(self.epilog)
# determine help from format above
return formatter.format_help()
def format_version(self):
import warnings
warnings.warn(
'The format_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
formatter = self._get_formatter()
formatter.add_text(self.version)
return formatter.format_help()
def _get_formatter(self):
return self.formatter_class(prog=self.prog)
# =====================
# Help-printing methods
# =====================
def print_usage(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_usage(), file)
def print_help(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_help(), file)
def print_version(self, file=None):
import warnings
warnings.warn(
'The print_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
self._print_message(self.format_version(), file)
def _print_message(self, message, file=None):
if message:
if file is None:
file = _sys.stderr
file.write(message)
# ===============
# Exiting methods
# ===============
def exit(self, status=0, message=None):
if message:
self._print_message(message, _sys.stderr)
_sys.exit(status)
def error(self, message):
"""error(message: string)
Prints a usage message incorporating the message to stderr and
exits.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
self.print_usage(_sys.stderr)
self.exit(2, _('%s: error: %s\n') % (self.prog, message))
| gpl-3.0 |
paboldin/rally | rally/plugins/common/sla/iteraion_time.py | 14 | 1664 | # Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SLA (Service-level agreement) is set of details for determining compliance
with contracted values such as maximum error rate or minimum response time.
"""
from rally.common.i18n import _
from rally.task import sla
@sla.configure(name="max_seconds_per_iteration")
class IterationTime(sla.SLA):
"""Maximum time for one iteration in seconds."""
CONFIG_SCHEMA = {"type": "number", "minimum": 0.0,
"exclusiveMinimum": True}
def __init__(self, criterion_value):
super(IterationTime, self).__init__(criterion_value)
self.max_iteration_time = 0.0
def add_iteration(self, iteration):
if iteration["duration"] > self.max_iteration_time:
self.max_iteration_time = iteration["duration"]
self.success = self.max_iteration_time <= self.criterion_value
return self.success
def details(self):
return (_("Maximum seconds per iteration %.2fs <= %.2fs - %s") %
(self.max_iteration_time, self.criterion_value, self.status()))
| apache-2.0 |
louietsai/python-for-android | python-build/python-libs/gdata/build/lib/gdata/analytics/__init__.py | 261 | 6995 | #!/usr/bin/python
#
# Original Copyright (C) 2006 Google Inc.
# Refactored in 2009 to work for Google Analytics by Sal Uryasev at Juice Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Note that this module will not function without specifically adding
# 'analytics': [ #Google Analytics
# 'https://www.google.com/analytics/feeds/'],
# to CLIENT_LOGIN_SCOPES in the gdata/service.py file
"""Contains extensions to Atom objects used with Google Analytics."""
__author__ = 'api.suryasev (Sal Uryasev)'
import atom
import gdata
GAN_NAMESPACE = 'http://schemas.google.com/analytics/2009'
class TableId(gdata.GDataEntry):
"""tableId element."""
_tag = 'tableId'
_namespace = GAN_NAMESPACE
class Property(gdata.GDataEntry):
_tag = 'property'
_namespace = GAN_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_attributes['name'] = 'name'
_attributes['value'] = 'value'
def __init__(self, name=None, value=None, *args, **kwargs):
self.name = name
self.value = value
super(Property, self).__init__(*args, **kwargs)
def __str__(self):
return self.value
def __repr__(self):
return self.value
class AccountListEntry(gdata.GDataEntry):
"""The Google Documents version of an Atom Entry"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}tableId' % GAN_NAMESPACE] = ('tableId',
[TableId])
_children['{%s}property' % GAN_NAMESPACE] = ('property',
[Property])
def __init__(self, tableId=None, property=None,
*args, **kwargs):
self.tableId = tableId
self.property = property
super(AccountListEntry, self).__init__(*args, **kwargs)
def AccountListEntryFromString(xml_string):
"""Converts an XML string into an AccountListEntry object.
Args:
xml_string: string The XML describing a Document List feed entry.
Returns:
A AccountListEntry object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(AccountListEntry, xml_string)
class AccountListFeed(gdata.GDataFeed):
"""A feed containing a list of Google Documents Items"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[AccountListEntry])
def AccountListFeedFromString(xml_string):
"""Converts an XML string into an AccountListFeed object.
Args:
xml_string: string The XML describing an AccountList feed.
Returns:
An AccountListFeed object corresponding to the given XML.
All properties are also linked to with a direct reference
from each entry object for convenience. (e.g. entry.AccountName)
"""
feed = atom.CreateClassFromXMLString(AccountListFeed, xml_string)
for entry in feed.entry:
for pro in entry.property:
entry.__dict__[pro.name.replace('ga:','')] = pro
for td in entry.tableId:
td.__dict__['value'] = td.text
return feed
class Dimension(gdata.GDataEntry):
_tag = 'dimension'
_namespace = GAN_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_attributes['name'] = 'name'
_attributes['value'] = 'value'
_attributes['type'] = 'type'
_attributes['confidenceInterval'] = 'confidence_interval'
def __init__(self, name=None, value=None, type=None,
confidence_interval = None, *args, **kwargs):
self.name = name
self.value = value
self.type = type
self.confidence_interval = confidence_interval
super(Dimension, self).__init__(*args, **kwargs)
def __str__(self):
return self.value
def __repr__(self):
return self.value
class Metric(gdata.GDataEntry):
_tag = 'metric'
_namespace = GAN_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_attributes['name'] = 'name'
_attributes['value'] = 'value'
_attributes['type'] = 'type'
_attributes['confidenceInterval'] = 'confidence_interval'
def __init__(self, name=None, value=None, type=None,
confidence_interval = None, *args, **kwargs):
self.name = name
self.value = value
self.type = type
self.confidence_interval = confidence_interval
super(Metric, self).__init__(*args, **kwargs)
def __str__(self):
return self.value
def __repr__(self):
return self.value
class AnalyticsDataEntry(gdata.GDataEntry):
"""The Google Analytics version of an Atom Entry"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}dimension' % GAN_NAMESPACE] = ('dimension',
[Dimension])
_children['{%s}metric' % GAN_NAMESPACE] = ('metric',
[Metric])
def __init__(self, dimension=None, metric=None, *args, **kwargs):
self.dimension = dimension
self.metric = metric
super(AnalyticsDataEntry, self).__init__(*args, **kwargs)
class AnalyticsDataFeed(gdata.GDataFeed):
"""A feed containing a list of Google Analytics Data Feed"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[AnalyticsDataEntry])
"""
Data Feed
"""
def AnalyticsDataFeedFromString(xml_string):
"""Converts an XML string into an AccountListFeed object.
Args:
xml_string: string The XML describing an AccountList feed.
Returns:
An AccountListFeed object corresponding to the given XML.
Each metric and dimension is also referenced directly from
the entry for easier access. (e.g. entry.keyword.value)
"""
feed = atom.CreateClassFromXMLString(AnalyticsDataFeed, xml_string)
if feed.entry:
for entry in feed.entry:
for met in entry.metric:
entry.__dict__[met.name.replace('ga:','')] = met
if entry.dimension is not None:
for dim in entry.dimension:
entry.__dict__[dim.name.replace('ga:','')] = dim
return feed
| apache-2.0 |
perwin/imfit | distribution_manifest.py | 1 | 6640 | # Lists of files for use by make_distributions.py
binary_only_files = """
imfit
imfit-mcmc
makeimage
"""
documentation_files = """
imfit_howto.pdf
imfit_howto.tex
"""
misc_required_files = """
COPYING.txt
DISCLAIMER
README.txt
CHANGELOG.md
cdream/LICENSE_cdream.txt
"""
extras_files = """
imfit_completions.bash
README.md
"""
python_files_for_binary_dist = """
python/imfit.py
python/imfit_funcs.py
"""
# header files in core/
source_header_files_core = """
add_functions
bootstrap_errors
commandline_parser
config_file_parser
convolver
definitions
downsample
estimate_memory
getimages
image_io
mersenne_twister
model_object
mp_enorm
options_base
options_imfit
options_makeimage
options_mcmc
oversampled_region
param_struct
print_results
psf_oversampling_info
sample_configs
setup_model_object
statistics
utilities_pub
"""
# header files in cdream/
source_header_files_mcmc = """
array
dream
dream_params
include/rng/GSLRng
include/rng/GSLStream
include/rng/MKLRng
include/rng/MKLStream
include/rng/Rng
include/rng/RngStream
"""
# the following are C++ files
source_files_core = """
add_functions
bootstrap_errors
commandline_parser
config_file_parser
convolver
downsample
estimate_memory
getimages
image_io
imfit_main
makeimage_main
mcmc_main
mersenne_twister
model_object
mp_enorm
oversampled_region
print_results
psf_oversampling_info
setup_model_object
statistics
utilities
"""
source_files_solvers ="""
levmar_fit
mpfit
diff_evoln_fit
DESolver
nmsimplex_fit
nlopt_fit
dispatch_solver
solver_results
"""
source_files_mcmc ="""
check_outliers
dream
dream_initialize
dream_pars
gelman_rubin
gen_CR
restore_state
"""
source_files_funcobj = """
function_object
func_gaussian
func_exp
func_gen-exp
func_sersic
func_gen-sersic
func_core-sersic
func_broken-exp
func_broken-exp2d
func_king
func_king2
func_moffat
func_flatsky
func_tilted-sky-plane
func_gaussian-ring
func_gaussian-ring2side
func_gaussian-ring-az
func_edge-on-disk_n4762
func_edge-on-disk_n4762v2
func_n4608disk
func_edge-on-ring
func_edge-on-ring2side
func_edge-on-disk
func_brokenexpdisk3d
func_expdisk3d
func_gaussianring3d
func_ferrersbar2d
func_ferrersbar3d
func_flatbar
func_pointsource
helper_funcs
helper_funcs_3d
integrator
psf_interpolators
"""
example_files = """
config_exponential_ic3478_256.dat
config_sersic_ic3478_256.dat
ic3478rss_256.fits
ic3478rss_256_mask.fits
config_makeimage_moffat_psf.dat
psf_moffat_51.fits
README_examples.txt
"""
testing_scripts = """
do_imfit_tests
do_mcmc_tests
do_makeimage_tests
"""
python_files = """
py_startup_test.py
check_speedup.py
compare_fits_files.py
compare_imfit_printouts.py
diff_printouts.py
imfit.py
imfit_funcs.py
"""
# for tests/imfit_reference/
test_files_imfit = """
config_imfit_expdisk32.dat
imfit_config_ic3478_64x64.dat
imfit_config_ic3478_64x64b.dat
imfit_config_ic3478_64x64c.dat
imfit_config_n3073.dat
config_imfit_pgc35772.dat
config_imfit_gauss-oversample-test2.dat
config_imfit_2gauss_small.dat
config_imfit_poisson.dat
config_imfit_flatsky.dat
config_imfit_small-gaussian.dat
config_3x3_flatsky.dat
config_imfit_ptsource.dat
config_imfit_sersictest512_badlimits1.dat
config_imfit_sersictest512_badlimits2.dat
config_imfit_sersictest512_badlimits3.dat
config_imfit_badparamline.dat
imfit_textout1
imfit_textout2
imfit_textout3
imfit_textout3b
imfit_textout3c_tail
imfit_textout3d
imfit_textout3d2
imfit_textout3e
imfit_textout3e2
imfit_textout4
imfit_textout4b
imfit_textout4c
imfit_textout4d
imfit_textout4e
imfit_textout4e2
imfit_textout4e3
imfit_textout5a_tail
imfit_textout5b_tail
imfit_textout5c_tail
imfit_textout5d_tail
imfit_textout5e_tail
imfit_textout6
imfit_textout6b
imfit_textout6c
imfit_textout6d
imfit_textout6e
imfit_textout6f
imfit_textout6g
imfit_textout6h
imfit_textout7a
imfit_textout7b
imfit_textout7c
imfit_textout7d
imfit_textout7e
imfit_textout7f
imfit_textout8a
imfit_textout_bad0
imfit_textout_bad1
imfit_textout_bad2
imfit_textout_bad3
imfit_textout_bad4
imfit_textout_bad5
imfit_textout_bad6
imfit_textout_bad7
imfit_textout_bad8
imfit_textout_badnloptname
"""
# for tests/makeimage_reference/
test_files_makeimage = """
config_biggertest_4c.dat
config_makeimage_gensersic512.dat
config_makeimage_sersic+exp512b.dat
config_makeimage_sersic+exp512.dat
config_makeimage_sersic+exp512_nosize.dat
config_makeimage_gauss-oversample.dat
config_makeimage_gauss-oversample2.dat
config_makeimage_pointsource-oversample.dat
config_makeimage_sersictest512_bad1.dat
config_makeimage_sersictest512_bad2.dat
config_makeimage_sersictest512_bad3.dat
config_makeimage_sersictest512_bad4.dat
config_makeimage_sersictest512_bad5.dat
makeimage_textout0
makeimage_textout1
makeimage_textout2
makeimage_textout3
makeimage_textout4
makeimage_textout5
makeimage_textout5b
makeimage_textout6
makeimage_textout7
makeimage_textout8
makeimage_textout9
makeimage_textout10
makeimage_textout11
makeimage_textout12
makeimage_textout13
makeimage_textout13b
makeimage_textout13c
makeimage_textout14
makeimage_textout15
savefluxes_out.dat
"""
# for tests/imfit-mcmc_reference/
test_files_mcmc = """
config_imfit_faintstar.dat
config_imfit_faintstar_nolims.dat
mcmc_textout1
mcmc_ref1.1.txt_skip3
mcmc_ref2.1.txt_skip3
mcmc_test_short_ref.1.txt_last100
mcmc_ref4.1.txt_skip3
"""
# for tests/
test_files = """
bestfit_params_2gauss_small_tail.dat
bootstrap_output_seed10_tail.dat
bootstrap_output_seed10_2_tail.dat
uniform_image32.fits
testimage_expdisk32.fits
testimage_poisson_lowsn20.fits
testimage_3x3_ones.fits
testimage_3x3_allzeros.fits
testimage_3x3_mask-with-nan.fits
ic3478rss_64x64.fits
ic3478rss_64x64_sigma.fits
ic3478rss_64x64_variance.fits
n3073rss_small.fits
n3073rss_small_cps.fits
n3073rss_small_mask.fits
pgc35772_continuum.fits
pgc35772_mask.fits
faintstar.fits
totalmask_64x64.fits
biggertest_orig.fits
gensersictest_orig.fits
sersic+exp_orig.fits
gensersictest612_conv_cutout512.fits
testimage_2gauss_psf.fits
twogaussian_psf+2osamp_noisy.fits
flatsky_128x128.fits
testimage_3x3_nan.fits
testimage_3x3_onezero.fits
testimage_3x3_ones.fits
mask_for_onezero.fits
oversamp_test4.fits
test_emptyhdu.fits
test_multiextension_hdu0empty.fits
test_table.fits
psf_standard.fits
psf_oversamp.fits
psf_moffat_35.fits
psf_moffat_35_n4699z.fits
psf_moffat_35_oversamp3.fits
psf_moffat_fwhm2.fits
psf_moffat_fwhm2_35x35.fits
test_dump_mcmc2a
test_dump_mcmc2b
oversampled_orig.fits
oversampled2both_orig.fits
oversampled_pointsource.fits
mcmc_data/mcmc_test_short.1.txt
mcmc_data/mcmc_test_short.2.txt
mcmc_data/mcmc_test_short.3.txt
mcmc_data/mcmc_test_short.4.txt
mcmc_data/mcmc_test_short.5.txt
mcmc_data/mcmc_test_short.6.txt
"""
| gpl-3.0 |
jjmleiro/hue | desktop/core/ext-py/boto-2.38.0/boto/vpc/customergateway.py | 170 | 1968 | # Copyright (c) 2009-2010 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents a Customer Gateway
"""
from boto.ec2.ec2object import TaggedEC2Object
class CustomerGateway(TaggedEC2Object):
def __init__(self, connection=None):
super(CustomerGateway, self).__init__(connection)
self.id = None
self.type = None
self.state = None
self.ip_address = None
self.bgp_asn = None
def __repr__(self):
return 'CustomerGateway:%s' % self.id
def endElement(self, name, value, connection):
if name == 'customerGatewayId':
self.id = value
elif name == 'ipAddress':
self.ip_address = value
elif name == 'type':
self.type = value
elif name == 'state':
self.state = value
elif name == 'bgpAsn':
self.bgp_asn = int(value)
else:
setattr(self, name, value)
| apache-2.0 |
sahlinet/fastapp | fastapp/api_views.py | 1 | 13365 | # -*- coding: utf-8 -*-
import zipfile
import requests
import json
from threading import Thread
from rest_framework.renderers import JSONRenderer, JSONPRenderer
from rest_framework import permissions, viewsets, views
from rest_framework import status
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import get_object_or_404
from django.db import transaction
from django.core.management import call_command
from rest_framework import renderers
from rest_framework.authentication import SessionAuthentication, TokenAuthentication, BasicAuthentication
from rest_framework.response import Response
from rest_framework.decorators import link
from rest_framework.exceptions import APIException
from fastapp.importer import import_base
from fastapp.models import Base, Apy, Setting, TransportEndpoint, Transaction
from fastapp.api_serializers import PublicApySerializer, ApySerializer, BaseSerializer, SettingSerializer, TransportEndpointSerializer, TransactionSerializer
from fastapp.utils import check_code
from fastapp.api_auth import EveryoneAuthentication
from django.contrib.auth import get_user_model
User = get_user_model()
import logging
logger = logging.getLogger(__name__)
class ServerConfigViewSet(views.APIView):
renderer_classes = (JSONRenderer, )
def get(self, *args, **kwargs):
from django.conf import settings
data = {'QUEUE_HOST_ADDR': settings.WORKER_RABBITMQ_HOST,
'QUEUE_HOST_PORT': settings.WORKER_RABBITMQ_PORT,
'FASTAPP_WORKER_THREADCOUNT': settings.FASTAPP_WORKER_THREADCOUNT,
'FASTAPP_PUBLISH_INTERVAL': settings.FASTAPP_PUBLISH_INTERVAL
}
return Response(data)
class SettingViewSet(viewsets.ModelViewSet):
model = Setting
serializer_class = SettingSerializer
authentication_classes = (TokenAuthentication, SessionAuthentication,)
renderer_classes = [JSONRenderer, JSONPRenderer]
def get_queryset(self):
name = self.kwargs['name']
return Setting.objects.filter(base__user=self.request.user, base__name=name)
def pre_save(self, obj):
obj.base = Base.objects.get(name=self.kwargs['name'])
class TransportEndpointViewSet(viewsets.ModelViewSet):
model = TransportEndpoint
serializer_class = TransportEndpointSerializer
authentication_classes = (SessionAuthentication,)
def get_queryset(self):
return TransportEndpoint.objects.filter(user=self.request.user)
def pre_save(self, obj):
obj.user = self.request.user
class TransactionViewSet(viewsets.ModelViewSet):
model = Transaction
serializer_class = TransactionSerializer
renderer_classes = [JSONRenderer, JSONPRenderer]
authentication_classes = (TokenAuthentication, SessionAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
def get_queryset(self):
name = self.kwargs['name']
get_object_or_404(Base, user=self.request.user, name=name)
queryset = Transaction.objects.filter(apy__base__name=name)
rid = self.request.GET.get('rid', None)
if rid is not None:
return queryset.filter(rid=rid)
return queryset.order_by("-modified")[:10]
class ApyViewSet(viewsets.ModelViewSet):
model = Apy
serializer_class = ApySerializer
renderer_classes = [JSONRenderer, JSONPRenderer]
authentication_classes = (TokenAuthentication, SessionAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
def get_queryset(self):
name = self.kwargs['name']
get_object_or_404(Base, user=self.request.user, name=name)
return Apy.objects.filter(base__user=self.request.user, base__name=name)
def pre_save(self, obj):
obj.base = Base.objects.get(name=self.kwargs['name'], user=self.request.user)
result, warnings, errors = check_code(obj.module, obj.name)
warnings_prep = []
errors_prep = []
for warning in warnings:
warnings_prep.append(
{
'filename': warning.filename,
'lineno': warning.lineno,
'col': warning.col,
'msg': warning.message % warning.message_args,
})
for error in errors:
errors_prep.append(
{
'filename': error.filename,
'lineno': error.lineno,
'col': error.col,
'msg': error.message,
})
if not result:
response_data = {
'warnings': warnings_prep,
'errors': errors_prep
}
raise APIException(response_data)
def clone(self, request, name, pk):
base = get_object_or_404(Base, name=name,
user=User.objects.get(username=request.user.username))
clone_count = base.apys.filter(name__startswith="%s_clone" % pk).count()
created = False
while not created:
cloned_exec, created = Apy.objects.get_or_create(base=base,
name="%s_clone_%s" % (pk, str(clone_count+1)))
clone_count += 1
cloned_exec.module = base.apys.get(id=pk).module
cloned_exec.save()
self.object = cloned_exec
self.kwargs['pk'] = self.object.id
return self.retrieve(request, new_pk=cloned_exec.id)
class ApyPublicExecutionViewSet(viewsets.ModelViewSet):
model = Apy
serializer_class = ApySerializer
renderer_classes = [JSONRenderer, JSONPRenderer]
authentication_classes = (EveryoneAuthentication,)
def execute(self, request, username, name, apy_name):
apy_obj = get_object_or_404(Apy, base__user__username=username, name=apy_name, base__name=name)
from fastapp.views import DjendExecView
kwargs = {
'base': name,
'id': apy_obj.id
}
return DjendExecView.as_view()(self.request, **kwargs)
class ApyExecutionViewSet(viewsets.ModelViewSet):
model = Apy
serializer_class = ApySerializer
renderer_classes = [JSONRenderer, JSONPRenderer]
authentication_classes = (TokenAuthentication, SessionAuthentication,)
def execute(self, request, name, apy_name):
apy_obj = get_object_or_404(Apy, base__user=self.request.user, name=apy_name, base__name=name)
from fastapp.views import DjendExecView
kwargs = {
'base': name,
'id': apy_obj.id
}
return DjendExecView.as_view()(self.request, **kwargs)
#return reverse('exec', args=[name, apy_name])
class PublicApyViewSet(ApyViewSet):
serializer_class = PublicApySerializer
def get_queryset(self):
return Apy.objects.filter(public=True)
class BaseAdminViewSet(viewsets.ModelViewSet):
model = Base
serializer_class = BaseSerializer
renderer_classes = [JSONRenderer, JSONPRenderer]
authentication_classes = (
TokenAuthentication,
SessionAuthentication,
BasicAuthentication
)
permission_classes = (permissions.IsAuthenticated, permissions.IsAdminUser)
def get_queryset(self):
return Base.objects.all()._clone().all()
def destroy_all(self, request):
logger.info("Destroy all workers")
thread = Thread(target=call_command, args=('destroy_workers', ))
thread.start()
return Response("ok", status=status.HTTP_202_ACCEPTED)
def recreate_all(self, request):
logger.info("Recreate all workers")
thread = Thread(target=call_command, args=('recreate_workers', ))
thread.start()
return Response("ok", status=status.HTTP_202_ACCEPTED)
class BaseLogViewSet(viewsets.ViewSet):
model = Base
renderer_classes = [JSONRenderer, JSONPRenderer]
authentication_classes = (
TokenAuthentication,
SessionAuthentication,
BasicAuthentication
)
permission_classes = (permissions.IsAuthenticated, permissions.IsAdminUser)
def log(self, request, pk):
base = Base.objects.get(pk=pk)
logs = base.executor.implementation.log(base.executor.pid)
return Response(logs)
class BaseViewSet(viewsets.ModelViewSet):
model = Base
serializer_class = BaseSerializer
renderer_classes = [JSONRenderer, JSONPRenderer]
authentication_classes = (TokenAuthentication, SessionAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
lookup_field = 'name'
def get_queryset(self):
return Base.objects.all()._clone().filter(user=self.request.user)
def start(self, request, name):
transaction.set_autocommit(False)
logger.info("starting %s" % name)
base = self.get_queryset().select_for_update(nowait=True).get(name=name)
base.start()
transaction.commit()
return self.retrieve(request, name=name)
def stop(self, request, name):
transaction.set_autocommit(False)
base = self.get_queryset().select_for_update(nowait=True).get(name=name)
logger.info("stopping %s" % base.name)
base.stop()
transaction.commit()
return self.retrieve(request, name=name)
def restart(self, request, name):
transaction.set_autocommit(False)
logger.info("restarting %s" % name)
base = self.get_queryset().get(name=name)
base.stop()
base.start()
transaction.commit()
return self.retrieve(request, name=name)
def destroy(self, request, name):
transaction.set_autocommit(False)
logger.info("destroying %s: " % name)
base = self.get_queryset().get(name=name)
base.stop()
base.destroy()
transaction.commit()
return self.retrieve(request, name=name)
def transport(self, request, name):
base = self.get_queryset().get(name=name)
transport_url = self.request.DATA['url']
transport_token = self.request.DATA['token']
zf = base.export()
zf.seek(0)
logger.info("Calling "+transport_url)
logger.info("Token "+transport_token)
transport = TransportEndpoint.objects.get(
user=request.user,
url=transport_url
)
r = requests.post(transport_url, headers={
'Authorization': 'Token '+transport_token
}, data={
'name': base.name,
'override_private': transport.override_settings_priv,
'override_public': transport.override_settings_pub,
}, files={
'file': zf
}, verify=False)
logger.info(r.request.headers)
logger.info((r.status_code))
logger.info((r.text))
s = "transport %s" % transport_url
if r.status_code == 201:
logger.info("%s success" % s)
return self.retrieve(request, name=name)
else:
logger.error("%s failed with returncode %s" % (s, r.status_code))
raise Exception("%s failed" % s)
@link()
def apy(self, request, name):
queryset = Apy.objects.filter(base__name=name)
serializer = ApySerializer(queryset,
context={'request': request}, many=True)
return Response(serializer.data)
class ZipFileRenderer(renderers.BaseRenderer):
media_type = 'application/zip'
format = 'zip'
def render(self, data, media_type=None, renderer_context=None):
return data
class BaseExportViewSet(viewsets.ModelViewSet):
model = Base
permission_classes = (permissions.IsAuthenticated,)
renderer_classes = [ZipFileRenderer]
def get_queryset(self):
return Base.objects.all()._clone().filter(user=self.request.user)
def export(self, request, name):
base = self.get_queryset().get(name=name)
f = base.export()
logger.info(f)
response = Response(f.getvalue(), headers={
'Content-Disposition': 'attachment; filename=%s.zip' % base.name
}, content_type='application/zip')
return response
class BaseImportViewSet(viewsets.ModelViewSet):
model = Base
authentication_classes = (TokenAuthentication, SessionAuthentication, )
permission_classes = (permissions.IsAuthenticated,)
@method_decorator(csrf_exempt)
def dispatch(self, *args, **kwargs):
return super(BaseImportViewSet, self).dispatch(*args, **kwargs)
def get_queryset(self):
return Base.objects.all()._clone().filter(user=self.request.user)
def imp(self, request):
logger.info("start import")
# Base
name = request.POST['name']
override_public = bool(request.GET.get('override_public', False))
override_private = bool(request.GET.get('override_private', False))
f = request.FILES['file']
zf = zipfile.ZipFile(f)
base = import_base(zf,
request.user,
name,
override_public,
override_private)
base_queryset = base
base.save()
serializer = BaseSerializer(base_queryset,
context={'request': request}, many=False)
response = Response(serializer.data, status=201)
return response
| mit |
Neamar/django | django/core/serializers/pyyaml.py | 439 | 2843 | """
YAML serializer.
Requires PyYaml (http://pyyaml.org/), but that's checked for in __init__.
"""
import collections
import decimal
import sys
from io import StringIO
import yaml
from django.core.serializers.base import DeserializationError
from django.core.serializers.python import (
Deserializer as PythonDeserializer, Serializer as PythonSerializer,
)
from django.db import models
from django.utils import six
# Use the C (faster) implementation if possible
try:
from yaml import CSafeLoader as SafeLoader
from yaml import CSafeDumper as SafeDumper
except ImportError:
from yaml import SafeLoader, SafeDumper
class DjangoSafeDumper(SafeDumper):
def represent_decimal(self, data):
return self.represent_scalar('tag:yaml.org,2002:str', str(data))
def represent_ordered_dict(self, data):
return self.represent_mapping('tag:yaml.org,2002:map', data.items())
DjangoSafeDumper.add_representer(decimal.Decimal, DjangoSafeDumper.represent_decimal)
DjangoSafeDumper.add_representer(collections.OrderedDict, DjangoSafeDumper.represent_ordered_dict)
class Serializer(PythonSerializer):
"""
Convert a queryset to YAML.
"""
internal_use_only = False
def handle_field(self, obj, field):
# A nasty special case: base YAML doesn't support serialization of time
# types (as opposed to dates or datetimes, which it does support). Since
# we want to use the "safe" serializer for better interoperability, we
# need to do something with those pesky times. Converting 'em to strings
# isn't perfect, but it's better than a "!!python/time" type which would
# halt deserialization under any other language.
if isinstance(field, models.TimeField) and getattr(obj, field.name) is not None:
self._current[field.name] = str(getattr(obj, field.name))
else:
super(Serializer, self).handle_field(obj, field)
def end_serialization(self):
yaml.dump(self.objects, self.stream, Dumper=DjangoSafeDumper, **self.options)
def getvalue(self):
# Grand-parent super
return super(PythonSerializer, self).getvalue()
def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of YAML data.
"""
if isinstance(stream_or_string, bytes):
stream_or_string = stream_or_string.decode('utf-8')
if isinstance(stream_or_string, six.string_types):
stream = StringIO(stream_or_string)
else:
stream = stream_or_string
try:
for obj in PythonDeserializer(yaml.load(stream, Loader=SafeLoader), **options):
yield obj
except GeneratorExit:
raise
except Exception as e:
# Map to deserializer error
six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2])
| bsd-3-clause |
alianmohammad/linux-kernel-4.2.6-hacks | tools/perf/scripts/python/syscall-counts.py | 1996 | 1700 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
raw_syscalls__sys_enter(**locals())
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
Rajeshkumar90/ansible-modules-extras | commands/expect.py | 11 | 7702 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Matt Martz <matt@sivel.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import datetime
try:
import pexpect
HAS_PEXPECT = True
except ImportError:
HAS_PEXPECT = False
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: expect
version_added: 2.0
short_description: Executes a command and responds to prompts
description:
- The M(expect) module executes a command and responds to prompts
- The given command will be executed on all selected nodes. It will not be
processed through the shell, so variables like C($HOME) and operations
like C("<"), C(">"), C("|"), and C("&") will not work
options:
command:
description:
- the command module takes command to run.
required: true
creates:
description:
- a filename, when it already exists, this step will B(not) be run.
required: false
removes:
description:
- a filename, when it does not exist, this step will B(not) be run.
required: false
chdir:
description:
- cd into this directory before running the command
required: false
responses:
description:
- Mapping of expected string/regex and string to respond with. If the
response is a list, successive matches return successive
responses. List functionality is new in 2.1.
required: true
timeout:
description:
- Amount of time in seconds to wait for the expected strings
default: 30
echo:
description:
- Whether or not to echo out your response strings
default: false
requirements:
- python >= 2.6
- pexpect >= 3.3
notes:
- If you want to run a command through the shell (say you are using C(<),
C(>), C(|), etc), you must specify a shell in the command such as
C(/bin/bash -c "/path/to/something | grep else")
- The question, or key, under I(responses) is a python regex match. Case
insensitive searches are indicated with a prefix of C(?i)
- By default, if a question is encountered multiple times, it's string
response will be repeated. If you need different responses for successive
question matches, instead of a string response, use a list of strings as
the response. The list functionality is new in 2.1
author: "Matt Martz (@sivel)"
'''
EXAMPLES = '''
# Case insensitve password string match
- expect:
command: passwd username
responses:
(?i)password: "MySekretPa$$word"
# Generic question with multiple different responses
- expect:
command: /path/to/custom/command
responses:
Question:
- response1
- response2
- response3
'''
def response_closure(module, question, responses):
resp_gen = (u'%s\n' % r.rstrip('\n').decode() for r in responses)
def wrapped(info):
try:
return resp_gen.next()
except StopIteration:
module.fail_json(msg="No remaining responses for '%s', "
"output was '%s'" %
(question,
info['child_result_list'][-1]))
return wrapped
def main():
module = AnsibleModule(
argument_spec=dict(
command=dict(required=True),
chdir=dict(),
creates=dict(),
removes=dict(),
responses=dict(type='dict', required=True),
timeout=dict(type='int', default=30),
echo=dict(type='bool', default=False),
)
)
if not HAS_PEXPECT:
module.fail_json(msg='The pexpect python module is required')
chdir = module.params['chdir']
args = module.params['command']
creates = module.params['creates']
removes = module.params['removes']
responses = module.params['responses']
timeout = module.params['timeout']
echo = module.params['echo']
events = dict()
for key, value in responses.iteritems():
if isinstance(value, list):
response = response_closure(module, key, value)
else:
response = u'%s\n' % value.rstrip('\n').decode()
events[key.decode()] = response
if args.strip() == '':
module.fail_json(rc=256, msg="no command given")
if chdir:
chdir = os.path.abspath(os.path.expanduser(chdir))
os.chdir(chdir)
if creates:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
v = os.path.expanduser(creates)
if os.path.exists(v):
module.exit_json(
cmd=args,
stdout="skipped, since %s exists" % v,
changed=False,
rc=0
)
if removes:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
v = os.path.expanduser(removes)
if not os.path.exists(v):
module.exit_json(
cmd=args,
stdout="skipped, since %s does not exist" % v,
changed=False,
rc=0
)
startd = datetime.datetime.now()
try:
try:
# Prefer pexpect.run from pexpect>=4
out, rc = pexpect.run(args, timeout=timeout, withexitstatus=True,
events=events, cwd=chdir, echo=echo,
encoding='utf-8')
except TypeError:
# Use pexpect.runu in pexpect>=3.3,<4
out, rc = pexpect.runu(args, timeout=timeout, withexitstatus=True,
events=events, cwd=chdir, echo=echo)
except (TypeError, AttributeError):
e = get_exception()
# This should catch all insufficient versions of pexpect
# We deem them insufficient for their lack of ability to specify
# to not echo responses via the run/runu functions, which would
# potentially leak sensentive information
module.fail_json(msg='Insufficient version of pexpect installed '
'(%s), this module requires pexpect>=3.3. '
'Error was %s' % (pexpect.__version__, e))
except pexpect.ExceptionPexpect:
e = get_exception()
module.fail_json(msg='%s' % e)
endd = datetime.datetime.now()
delta = endd - startd
if out is None:
out = ''
ret = dict(
cmd=args,
stdout=out.rstrip('\r\n'),
rc=rc,
start=str(startd),
end=str(endd),
delta=str(delta),
changed=True,
)
if rc is not None:
module.exit_json(**ret)
else:
ret['msg'] = 'command exceeded timeout'
module.fail_json(**ret)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
if __name__ == '__main__':
main()
| gpl-3.0 |
coldfix/udiskie | udiskie/notify.py | 1 | 9790 | """
Notification utility.
"""
import logging
from gi.repository import GLib
from .async_ import run_bg
from .common import exc_message, DaemonBase, format_exc
from .mount import DeviceActions
from .locale import _
__all__ = ['Notify']
class Notify(DaemonBase):
"""
Notification tool.
Can be connected to udisks daemon in order to automatically issue
notifications when system status has changed.
NOTE: the action buttons in the notifications don't work with all
notification services.
"""
EVENTS = ['device_mounted', 'device_unmounted',
'device_locked', 'device_unlocked',
'device_added', 'device_removed',
'job_failed']
def __init__(self, notify, mounter, timeout=None, aconfig=None):
"""
Initialize notifier and connect to service.
:param notify: notification service module (gi.repository.Notify)
:param mounter: Mounter object
:param dict timeout: dictionary with timeouts for notifications
"""
self._notify = notify
self._mounter = mounter
self._actions = DeviceActions(mounter)
self._timeout = timeout or {}
self._aconfig = aconfig or {}
self._default = self._timeout.get('timeout', -1)
self._log = logging.getLogger(__name__)
self._notifications = []
self.events = {
event: getattr(self, event)
for event in self.EVENTS
if self._enabled(event)
}
# event handlers:
def device_mounted(self, device):
"""Show mount notification for specified device object."""
if not self._mounter.is_handleable(device):
return
browse_action = (device, 'browse', _('Browse directory'),
self._mounter.browse, device)
terminal_action = (device, 'terminal', _('Open terminal'),
self._mounter.terminal, device)
self._show_notification(
'device_mounted',
_('Device mounted'),
_('{0.ui_label} mounted on {0.mount_paths[0]}', device),
device.icon_name,
self._mounter._browser and browse_action,
self._mounter._terminal and terminal_action)
def device_unmounted(self, device):
"""Show unmount notification for specified device object."""
if not self._mounter.is_handleable(device):
return
self._show_notification(
'device_unmounted',
_('Device unmounted'),
_('{0.ui_label} unmounted', device),
device.icon_name)
def device_locked(self, device):
"""Show lock notification for specified device object."""
if not self._mounter.is_handleable(device):
return
self._show_notification(
'device_locked',
_('Device locked'),
_('{0.device_presentation} locked', device),
device.icon_name)
def device_unlocked(self, device):
"""Show unlock notification for specified device object."""
if not self._mounter.is_handleable(device):
return
self._show_notification(
'device_unlocked',
_('Device unlocked'),
_('{0.device_presentation} unlocked', device),
device.icon_name)
def device_added(self, device):
"""Show discovery notification for specified device object."""
if not self._mounter.is_handleable(device):
return
if self._has_actions('device_added'):
# wait for partitions etc to be reported to udiskie, otherwise we
# can't discover the actions
GLib.timeout_add(500, self._device_added, device)
else:
self._device_added(device)
def _device_added(self, device):
device_file = device.device_presentation
if (device.is_drive or device.is_toplevel) and device_file:
# On UDisks1: cannot invoke self._actions.detect() for newly added
# LUKS devices. It should be okay if we had waited for the actions
# to be added, though.
if self._has_actions('device_added'):
node_tree = self._actions.detect(device.object_path)
flat_actions = self._flatten_node(node_tree)
actions = [
(action.device,
action.method,
action.label.format(action.device.ui_label),
action.action)
for action in flat_actions
]
else:
actions = ()
self._show_notification(
'device_added',
_('Device added'),
_('device appeared on {0.device_presentation}', device),
device.icon_name,
*actions)
def _flatten_node(self, node):
actions = [action
for branch in node.branches
for action in self._flatten_node(branch)]
actions += node.methods
return actions
def device_removed(self, device):
"""Show removal notification for specified device object."""
if not self._mounter.is_handleable(device):
return
device_file = device.device_presentation
if (device.is_drive or device.is_toplevel) and device_file:
self._show_notification(
'device_removed',
_('Device removed'),
_('device disappeared on {0.device_presentation}', device),
device.icon_name)
def job_failed(self, device, action, message):
"""Show 'Job failed' notification with 'Retry' button."""
if not self._mounter.is_handleable(device):
return
device_file = device.device_presentation or device.object_path
if message:
text = _('failed to {0} {1}:\n{2}', action, device_file, message)
else:
text = _('failed to {0} device {1}.', action, device_file)
try:
retry = getattr(self._mounter, action)
except AttributeError:
retry_action = None
else:
retry_action = (device, 'retry', _('Retry'), retry, device)
self._show_notification(
'job_failed',
_('Job failed'), text,
device.icon_name,
retry_action)
def _show_notification(self,
event, summary, message, icon,
*actions):
"""
Show a notification.
:param str event: event name
:param str summary: notification title
:param str message: notification body
:param str icon: icon name
:param actions: each item is a tuple with parameters for _add_action
"""
notification = self._notify(summary, message, icon)
timeout = self._get_timeout(event)
if timeout != -1:
notification.set_timeout(int(timeout * 1000))
for action in actions:
if action and self._action_enabled(event, action[1]):
self._add_action(notification, *action)
try:
notification.show()
except GLib.GError as exc:
# Catch and log the exception. Starting udiskie with notifications
# enabled while not having a notification service installed is a
# mistake too easy to be made, but it should not render the rest of
# udiskie's logic useless by raising an exception before the
# automount handler gets invoked.
self._log.error(_("Failed to show notification: {0}", exc_message(exc)))
self._log.debug(format_exc())
def _add_action(self, notification, device, action, label, callback, *args):
"""
Show an action button button in mount notifications.
Note, this only works with some libnotify services.
"""
action = action + ':' + device.device_file
on_action_click = run_bg(lambda *_: callback(*args))
try:
# this is the correct signature for Notify-0.7, the last argument
# being 'user_data':
notification.add_action(action, label, on_action_click, None)
except TypeError:
# this is the signature for some older version, I don't know what
# the last argument is for.
notification.add_action(action, label, on_action_click, None, None)
# gi.Notify does not store hard references to the notification
# objects. When a signal is received and the notification does not
# exist anymore, no handler will be called. Therefore, we need to
# prevent these notifications from being destroyed by storing
# references:
notification.connect('closed', self._notifications.remove)
self._notifications.append(notification)
def _enabled(self, event):
"""Check if the notification for an event is enabled."""
return self._get_timeout(event) not in (None, False)
def _get_timeout(self, event):
"""Get the timeout for an event from the config or None."""
return self._timeout.get(event, self._default)
def _action_enabled(self, event, action):
"""Check if an action for a notification is enabled."""
event_actions = self._aconfig.get(event)
if event_actions is None:
return True
if event_actions is False:
return False
return action in event_actions
def _has_actions(self, event):
"""Check if a notification type has any enabled actions."""
event_actions = self._aconfig.get(event)
return event_actions is None or bool(event_actions)
| mit |
guangtunbenzhu/BGT-Cosmology | Spectroscopy/nmf.py | 2 | 4022 | """
Code for Non-negative Matrix Factorization (NMF)
Why class: taking advantage of encapsulation and inheritance in this case
n_instance: Number of instances (input data points)
n_dimention: Number of dimension of each instance
To-do:
- Sparese matrix
"""
from __future__ import division
import scipy.linalg as LA
__all__ = [
'NMF']
class _BaseNMF(object):
""" Base class for NMF
Meant to be subclassed
Likely need to revisit if we want a more general matrix factorization class
"""
def __new__(cls):
class NMF(_BaseNMF):
"""NMF with multiplicative update rules
Parameters
----------
n_component : int | None
Number of components
init : 'nndsvd' | 'random' | 'user'
sparseness : 'data' | 'component' | None, default: None
tol : double, default: 1E-5
maxiter : int, default: 1000
random_state : int or RandomState
Attributes
----------
components_ : array, [n_components, n_dimention]
chi_squared_ : double
Frobenius norm: ``||X - WH||^2``
n_iter_ : int
Number of iterations
Examples
--------
>>> import numpy as np
... X.shape = (100, 3000)
>>> From nmf import NMF
>>> nmfbasis = NMF(n_component=12, init='random', seed='0.13')
>>> nmfbasis.fit(X)
References
----------
- Lee & Seung (2001) "Algorithms for Non-negative Matrix Factorization"
- Blanton & Roweis, Kcorrect, ApJ, 133, 734 (2007)
- jhusdss_nmf_engine.pro in jhusdss, Guangtun Ben Zhu
- nmf.py in scikit-learn
"""
def __init__(self, n_component, ranseed=None, tol=1E-5, maxiter=1000):
"""
Take one argument, n_component
"""
self._n_component = n_component
self._ranseed = ranseed
self._tol = tol
self._maxiter = maxiter
def _initialize(self, X, Winit, Hinit):
if Winit==None:
W = np.random.rand(X.shape[0], self._n_component)
else:
if (Winit.shape != (X.shape[0], self._n_component):
raise ValueError("Initial values have wrong shape.")
W = np.copy(Winit)
if Hinit==None:
H = np.random.rand(self._n_component, X.shape[1])
else
if (Hinit.shape != (self._n_component, X.shape[1])
raise ValueError("Initial values have wrong shape.")
H = np.copy(Hinit)
return (W, H)
# Main method
def construct(self, X, Weight, Winit=None, Hinit=None)
"""Construct (Learn) an NMF model for input data matrix X
If given, Winit, Hinit should be given together
"""
XWeight = X*Weight
W, H = self._initialize(X, Winit, Hinit)
# np.dot: 1D - inner product, 2D - matrix multiplication
# norm: 1D - Euclidean norm; 2D - Frobenius norm
# scipy.linalg.norm - faster than numpy.linalg.norm for 2D Frobenius norm?
# scipy.linalg is better than numpy.linalg in general
chi_squared = LA.norm((X-np.dot(W,H))*Weight)
chi_squared_old = 1.e+100
niter = 0
# Need a sparse version of this
while niter<self._maxiter and np.fabs(chi_squared-chi_squared_old)/chi_squared_old > tol):
# Update H first. Does the order matter?
H_up = np.dot(W.T, XWeight)
H_down = np.dot(W.T, np.dot(W,H)*Weight)
H = H*H_up/H_down
# Update W
W_up = np.dot(XWeight, H.T)
W_down = np.dot(np.dot(W,H)*Weight, H.T)
W = W*W_up/W_down
# chi_squared, a copy or a view?
chi_squared_old = chi_squared
chi_squared = LA.norm((X-np.dot(W,H))*Weight)
# Some quick check. May need its error class ...
if not np.isfinite(chi_squared):
raise ValueError("NMF construction failed, likely due to missing data")
niter += 1
self.basisvector = H
self.coefficient = W
self.niter = niter
return 'Success'
| mit |
deepmind/acme | acme/agents/tf/dmpo/agent.py | 1 | 7916 | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Distributional MPO agent implementation."""
import copy
from typing import Optional
from acme import datasets
from acme import specs
from acme import types
from acme.adders import reverb as adders
from acme.agents import agent
from acme.agents.tf import actors
from acme.agents.tf.dmpo import learning
from acme.tf import networks
from acme.tf import utils as tf2_utils
from acme.utils import counting
from acme.utils import loggers
import reverb
import sonnet as snt
import tensorflow as tf
class DistributionalMPO(agent.Agent):
"""Distributional MPO Agent.
This implements a single-process distributional MPO agent. This is an
actor-critic algorithm that generates data via a behavior policy, inserts
N-step transitions into a replay buffer, and periodically updates the policy
(and as a result the behavior) by sampling uniformly from this buffer.
This agent distinguishes itself from the MPO agent by using a distributional
critic (state-action value approximator).
"""
def __init__(self,
environment_spec: specs.EnvironmentSpec,
policy_network: snt.Module,
critic_network: snt.Module,
observation_network: types.TensorTransformation = tf.identity,
discount: float = 0.99,
batch_size: int = 256,
prefetch_size: int = 4,
target_policy_update_period: int = 100,
target_critic_update_period: int = 100,
min_replay_size: int = 1000,
max_replay_size: int = 1000000,
samples_per_insert: float = 32.0,
policy_loss_module: Optional[snt.Module] = None,
policy_optimizer: Optional[snt.Optimizer] = None,
critic_optimizer: Optional[snt.Optimizer] = None,
n_step: int = 5,
num_samples: int = 20,
clipping: bool = True,
logger: Optional[loggers.Logger] = None,
counter: Optional[counting.Counter] = None,
checkpoint: bool = True,
replay_table_name: str = adders.DEFAULT_PRIORITY_TABLE):
"""Initialize the agent.
Args:
environment_spec: description of the actions, observations, etc.
policy_network: the online (optimized) policy.
critic_network: the online critic.
observation_network: optional network to transform the observations before
they are fed into any network.
discount: discount to use for TD updates.
batch_size: batch size for updates.
prefetch_size: size to prefetch from replay.
target_policy_update_period: number of updates to perform before updating
the target policy network.
target_critic_update_period: number of updates to perform before updating
the target critic network.
min_replay_size: minimum replay size before updating.
max_replay_size: maximum replay size.
samples_per_insert: number of samples to take from replay for every insert
that is made.
policy_loss_module: configured MPO loss function for the policy
optimization; defaults to sensible values on the control suite.
See `acme/tf/losses/mpo.py` for more details.
policy_optimizer: optimizer to be used on the policy.
critic_optimizer: optimizer to be used on the critic.
n_step: number of steps to squash into a single transition.
num_samples: number of actions to sample when doing a Monte Carlo
integration with respect to the policy.
clipping: whether to clip gradients by global norm.
logger: logging object used to write to logs.
counter: counter object used to keep track of steps.
checkpoint: boolean indicating whether to checkpoint the learner.
replay_table_name: string indicating what name to give the replay table.
"""
# Create a replay server to add data to.
replay_table = reverb.Table(
name=adders.DEFAULT_PRIORITY_TABLE,
sampler=reverb.selectors.Uniform(),
remover=reverb.selectors.Fifo(),
max_size=max_replay_size,
rate_limiter=reverb.rate_limiters.MinSize(min_size_to_sample=1),
signature=adders.NStepTransitionAdder.signature(environment_spec))
self._server = reverb.Server([replay_table], port=None)
# The adder is used to insert observations into replay.
address = f'localhost:{self._server.port}'
adder = adders.NStepTransitionAdder(
client=reverb.Client(address),
n_step=n_step,
discount=discount)
# The dataset object to learn from.
dataset = datasets.make_reverb_dataset(
table=replay_table_name,
server_address=address,
batch_size=batch_size,
prefetch_size=prefetch_size)
# Make sure observation network is a Sonnet Module.
observation_network = tf2_utils.to_sonnet_module(observation_network)
# Create target networks before creating online/target network variables.
target_policy_network = copy.deepcopy(policy_network)
target_critic_network = copy.deepcopy(critic_network)
target_observation_network = copy.deepcopy(observation_network)
# Get observation and action specs.
act_spec = environment_spec.actions
obs_spec = environment_spec.observations
emb_spec = tf2_utils.create_variables(observation_network, [obs_spec])
# Create the behavior policy.
behavior_network = snt.Sequential([
observation_network,
policy_network,
networks.StochasticSamplingHead(),
])
# Create variables.
tf2_utils.create_variables(policy_network, [emb_spec])
tf2_utils.create_variables(critic_network, [emb_spec, act_spec])
tf2_utils.create_variables(target_policy_network, [emb_spec])
tf2_utils.create_variables(target_critic_network, [emb_spec, act_spec])
tf2_utils.create_variables(target_observation_network, [obs_spec])
# Create the actor which defines how we take actions.
actor = actors.FeedForwardActor(
policy_network=behavior_network, adder=adder)
# Create optimizers.
policy_optimizer = policy_optimizer or snt.optimizers.Adam(1e-4)
critic_optimizer = critic_optimizer or snt.optimizers.Adam(1e-4)
# The learner updates the parameters (and initializes them).
learner = learning.DistributionalMPOLearner(
policy_network=policy_network,
critic_network=critic_network,
observation_network=observation_network,
target_policy_network=target_policy_network,
target_critic_network=target_critic_network,
target_observation_network=target_observation_network,
policy_loss_module=policy_loss_module,
policy_optimizer=policy_optimizer,
critic_optimizer=critic_optimizer,
clipping=clipping,
discount=discount,
num_samples=num_samples,
target_policy_update_period=target_policy_update_period,
target_critic_update_period=target_critic_update_period,
dataset=dataset,
logger=logger,
counter=counter,
checkpoint=checkpoint)
super().__init__(
actor=actor,
learner=learner,
min_observations=max(batch_size, min_replay_size),
observations_per_step=float(batch_size) / samples_per_insert)
| apache-2.0 |
harshita-gupta/Harvard-FRSEM-Catalog-2016-17 | flask/lib/python2.7/site-packages/pip/commands/list.py | 168 | 7412 | from __future__ import absolute_import
import logging
import warnings
from pip.basecommand import Command
from pip.exceptions import CommandError
from pip.index import PackageFinder
from pip.utils import (
get_installed_distributions, dist_is_editable)
from pip.utils.deprecation import RemovedInPip10Warning
from pip.cmdoptions import make_option_group, index_group
logger = logging.getLogger(__name__)
class ListCommand(Command):
"""
List installed packages, including editables.
Packages are listed in a case-insensitive sorted order.
"""
name = 'list'
usage = """
%prog [options]"""
summary = 'List installed packages.'
def __init__(self, *args, **kw):
super(ListCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-o', '--outdated',
action='store_true',
default=False,
help='List outdated packages')
cmd_opts.add_option(
'-u', '--uptodate',
action='store_true',
default=False,
help='List uptodate packages')
cmd_opts.add_option(
'-e', '--editable',
action='store_true',
default=False,
help='List editable projects.')
cmd_opts.add_option(
'-l', '--local',
action='store_true',
default=False,
help=('If in a virtualenv that has global access, do not list '
'globally-installed packages.'),
)
self.cmd_opts.add_option(
'--user',
dest='user',
action='store_true',
default=False,
help='Only output packages installed in user-site.')
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help=("Include pre-release and development versions. By default, "
"pip only finds stable versions."),
)
index_opts = make_option_group(index_group, self.parser)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def _build_package_finder(self, options, index_urls, session):
"""
Create a package finder appropriate to this list command.
"""
return PackageFinder(
find_links=options.find_links,
index_urls=index_urls,
allow_all_prereleases=options.pre,
trusted_hosts=options.trusted_hosts,
process_dependency_links=options.process_dependency_links,
session=session,
)
def run(self, options, args):
if options.allow_external:
warnings.warn(
"--allow-external has been deprecated and will be removed in "
"the future. Due to changes in the repository protocol, it no "
"longer has any effect.",
RemovedInPip10Warning,
)
if options.allow_all_external:
warnings.warn(
"--allow-all-external has been deprecated and will be removed "
"in the future. Due to changes in the repository protocol, it "
"no longer has any effect.",
RemovedInPip10Warning,
)
if options.allow_unverified:
warnings.warn(
"--allow-unverified has been deprecated and will be removed "
"in the future. Due to changes in the repository protocol, it "
"no longer has any effect.",
RemovedInPip10Warning,
)
if options.outdated and options.uptodate:
raise CommandError(
"Options --outdated and --uptodate cannot be combined.")
if options.outdated:
self.run_outdated(options)
elif options.uptodate:
self.run_uptodate(options)
else:
self.run_listing(options)
def run_outdated(self, options):
for dist, latest_version, typ in sorted(
self.find_packages_latest_versions(options),
key=lambda p: p[0].project_name.lower()):
if latest_version > dist.parsed_version:
logger.info(
'%s - Latest: %s [%s]',
self.output_package(dist), latest_version, typ,
)
def find_packages_latest_versions(self, options):
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.info('Ignoring indexes: %s', ','.join(index_urls))
index_urls = []
dependency_links = []
for dist in get_installed_distributions(
local_only=options.local,
user_only=options.user,
editables_only=options.editable):
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
)
with self._build_session(options) as session:
finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
editables_only=options.editable,
)
for dist in installed_packages:
typ = 'unknown'
all_candidates = finder.find_all_candidates(dist.key)
if not options.pre:
# Remove prereleases
all_candidates = [candidate for candidate in all_candidates
if not candidate.version.is_prerelease]
if not all_candidates:
continue
best_candidate = max(all_candidates,
key=finder._candidate_sort_key)
remote_version = best_candidate.version
if best_candidate.location.is_wheel:
typ = 'wheel'
else:
typ = 'sdist'
yield dist, remote_version, typ
def run_listing(self, options):
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
editables_only=options.editable,
)
self.output_package_listing(installed_packages)
def output_package(self, dist):
if dist_is_editable(dist):
return '%s (%s, %s)' % (
dist.project_name,
dist.version,
dist.location,
)
else:
return '%s (%s)' % (dist.project_name, dist.version)
def output_package_listing(self, installed_packages):
installed_packages = sorted(
installed_packages,
key=lambda dist: dist.project_name.lower(),
)
for dist in installed_packages:
logger.info(self.output_package(dist))
def run_uptodate(self, options):
uptodate = []
for dist, version, typ in self.find_packages_latest_versions(options):
if dist.parsed_version == version:
uptodate.append(dist)
self.output_package_listing(uptodate)
| mit |
coufon/neon-distributed | tests/test_initializers.py | 3 | 2974 | # ----------------------------------------------------------------------------
# Copyright 2015 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
'''
Tests for the initializer classes.
'''
import itertools as itt
import numpy as np
from neon import NervanaObject
from neon.initializers.initializer import Array, Constant, Uniform, Gaussian, GlorotUniform
def pytest_generate_tests(metafunc):
if 'args' in metafunc.fixturenames:
fargs = []
dim1 = [1, 5]
dim2 = [2, 10]
fargs = itt.product(dim1, dim2)
metafunc.parametrize('args', fargs)
def test_constant(backend_default, args):
be = NervanaObject.be
dim1, dim2 = args
shape = (dim1, dim2)
const_arg = 3
Wdev = be.empty(shape)
const_init = Constant(const_arg)
const_init.fill(Wdev)
Whost = Wdev.get()
flat = Whost.flatten()
for elt in flat:
assert elt == const_arg
return
def test_array(backend_default, args):
be = NervanaObject.be
dim1, dim2 = args
shape = (dim1, dim2)
Wloc = be.array(np.arange(shape[0]*shape[1]).reshape(shape))
Wdev = be.empty(shape)
init = Array(Wdev)
init.fill(Wloc)
assert np.all(np.equal(Wdev.get(), Wloc.get()))
return
def test_uniform(backend_default, args):
be = NervanaObject.be
dim1, dim2 = args
shape = (dim1, dim2)
Wdev = be.empty(shape)
uniform_init = Uniform(low=-5, high=15)
uniform_init.fill(Wdev)
Whost = Wdev.get()
flat = Whost.flatten()
for elt in flat:
assert elt <= 15 and elt >= -5
return
def test_gaussian(backend_default, args):
be = NervanaObject.be
dim1, dim2 = args
shape = (dim1, dim2)
Wdev = be.empty(shape)
gaussian_init = Gaussian(loc=10000, scale=1)
gaussian_init.fill(Wdev)
Whost = Wdev.get()
flat = Whost.flatten()
for elt in flat:
# Not a very robust test...
assert elt >= 0
return
def test_glorot(backend_default, args):
be = NervanaObject.be
shape_1 = (1, 2)
shape_2 = (1000, 10000)
Wdev_1 = be.empty(shape_1)
Wdev_2 = be.empty(shape_2)
glorot_init = GlorotUniform()
glorot_init.fill(Wdev_1)
glorot_init.fill(Wdev_2)
Whost_1 = Wdev_1.get()
Whost_2 = Wdev_2.get()
mean_1 = np.mean(Whost_1)
mean_2 = np.mean(Whost_2)
assert np.abs(mean_1) > np.abs(mean_2)
return
| apache-2.0 |
flother/agate | tests/test_type_tester.py | 4 | 4667 | #!/usr/bin/env python
# -*- coding: utf8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
from agate.data_types import *
from agate.type_tester import TypeTester
class TestTypeTester(unittest.TestCase):
def setUp(self):
self.tester = TypeTester()
def test_text_type(self):
rows = [
('a',),
('b',),
('',)
]
inferred = self.tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Text)
def test_number_type(self):
rows = [
('1.7',),
('200000000',),
('',)
]
inferred = self.tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Number)
def test_number_percent(self):
rows = [
('1.7%',),
('200000000%',),
('',)
]
inferred = self.tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Number)
def test_number_currency(self):
rows = [
('$1.7',),
('$200000000',),
('',)
]
inferred = self.tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Number)
def test_number_currency_locale(self):
rows = [
(u'£1.7',),
(u'£200000000',),
('',)
]
inferred = self.tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Number)
def test_boolean_type(self):
rows = [
('True',),
('FALSE',),
('',)
]
inferred = self.tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Boolean)
def test_date_type(self):
rows = [
('5/7/1984',),
('2/28/1997',),
('3/19/2020',),
('',)
]
inferred = self.tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Date)
def test_date_type_iso_format(self):
rows = [
('1984-05-07',),
('1997-02-28',),
('2020-03-19',),
('',)
]
inferred = self.tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Date)
def test_date_time_type(self):
rows = [
('5/7/84 3:44:12',),
('2/28/1997 3:12 AM',),
('3/19/20 4:40 PM',),
('',)
]
inferred = self.tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], DateTime)
def test_date_time_type_isoformat(self):
rows = [
('1984-07-05T03:44:12',),
('1997-02-28T03:12:00',),
('2020-03-19T04:40:00',),
('',)
]
inferred = self.tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], DateTime)
def test_time_delta_type(self):
rows = [
('1:42',),
('1w 27h',),
('',)
]
inferred = self.tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], TimeDelta)
def test_force_type(self):
rows = [
('1.7',),
('200000000',),
('',)
]
tester = TypeTester(force={
'one': Text()
})
inferred = tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Text)
def test_limit(self):
rows = [
('1.7',),
('foo',),
('',)
]
tester = TypeTester(limit=1)
inferred = tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Number)
tester = TypeTester(limit=2)
inferred = tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Text)
def test_types_force_text(self):
rows = [
('1.7',),
('200000000',),
('',)
]
tester = TypeTester(types=[Text()])
inferred = tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Text)
def test_types_no_boolean(self):
rows = [
('True',),
('False',),
('False',)
]
tester = TypeTester(types=[Number(), Text()])
inferred = tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Text)
def test_types_number_locale(self):
rows = [
('1,7',),
('200.000.000',),
('',)
]
tester = TypeTester(types=[Number(locale='de_DE'), Text()])
inferred = tester.run(rows, ['one'])
self.assertIsInstance(inferred[0], Number)
self.assertEqual(str(inferred[0].locale), 'de_DE')
| mit |
tboyce021/home-assistant | homeassistant/components/wirelesstag/sensor.py | 16 | 3592 | """Sensor support for Wireless Sensor Tags platform."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_MONITORED_CONDITIONS
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import DOMAIN as WIRELESSTAG_DOMAIN, SIGNAL_TAG_UPDATE, WirelessTagBaseSensor
_LOGGER = logging.getLogger(__name__)
SENSOR_TEMPERATURE = "temperature"
SENSOR_HUMIDITY = "humidity"
SENSOR_MOISTURE = "moisture"
SENSOR_LIGHT = "light"
SENSOR_TYPES = [SENSOR_TEMPERATURE, SENSOR_HUMIDITY, SENSOR_MOISTURE, SENSOR_LIGHT]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MONITORED_CONDITIONS, default=[]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
)
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the sensor platform."""
platform = hass.data.get(WIRELESSTAG_DOMAIN)
sensors = []
tags = platform.tags
for tag in tags.values():
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
if sensor_type in tag.allowed_sensor_types:
sensors.append(
WirelessTagSensor(platform, tag, sensor_type, hass.config)
)
add_entities(sensors, True)
class WirelessTagSensor(WirelessTagBaseSensor):
"""Representation of a Sensor."""
def __init__(self, api, tag, sensor_type, config):
"""Initialize a WirelessTag sensor."""
super().__init__(api, tag)
self._sensor_type = sensor_type
self._name = self._tag.name
# I want to see entity_id as:
# sensor.wirelesstag_bedroom_temperature
# and not as sensor.bedroom for temperature and
# sensor.bedroom_2 for humidity
self._entity_id = (
f"sensor.{WIRELESSTAG_DOMAIN}_{self.underscored_name}_{self._sensor_type}"
)
async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_TAG_UPDATE.format(self.tag_id, self.tag_manager_mac),
self._update_tag_info_callback,
)
)
@property
def entity_id(self):
"""Overridden version."""
return self._entity_id
@property
def underscored_name(self):
"""Provide name savvy to be used in entity_id name of self."""
return self.name.lower().replace(" ", "_")
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_class(self):
"""Return the class of the sensor."""
return self._sensor_type
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._sensor.unit
@property
def principal_value(self):
"""Return sensor current value."""
return self._sensor.value
@property
def _sensor(self):
"""Return tag sensor entity."""
return self._tag.sensor[self._sensor_type]
@callback
def _update_tag_info_callback(self, event):
"""Handle push notification sent by tag manager."""
_LOGGER.debug("Entity to update state: %s event data: %s", self, event.data)
new_value = self._sensor.value_from_update_event(event.data)
self._state = self.decorate_value(new_value)
self.async_write_ha_state()
| apache-2.0 |
krismcfarlin/gae_endpoints_aloha | endpoints_proto_datastore/ndb/utils.py | 14 | 8427 | # Copyright 2012 Google Inc. All Rights Reserved.
"""Utility module for converting NDB properties to ProtoRPC messages/fields.
In the dictionary NDB_PROPERTY_TO_PROTO, each property defined by NDB is
registered. The registry values can either be a ProtoRPC field for simple
types/properties or a custom method for converting a property into a
ProtoRPC field.
Some properties have no corresponding implementation. These fields are
registered with a method that will raise a NotImplementedError. As of right now,
these are:
Property -- this is the base property class and shouldn't be used
GenericProperty -- this does not play nicely with strongly typed messages
ModelKey -- this is only intended for the key of the instance, and doesn't
make sense to send in messages
ComputedProperty -- a variant of this class is needed to determine the type
desired of the output. Such a variant is provided in
properties
"""
from .. import utils
from protorpc import messages
from google.appengine.ext import ndb
from google.appengine.ext.ndb import msgprop
__all__ = []
GeoPtMessage = utils.GeoPtMessage
RaiseNotImplementedMethod = utils.RaiseNotImplementedMethod
UserMessage = utils.UserMessage
MODEL_KEY_EXPLANATION = (
'A model key property can\'t be used to define an EndpointsModel. These '
'are intended to be used as the lone key of an entity and all ModelKey '
'properties on an entity will have the same value.')
COMPUTED_PROPERTY_EXPLANATION = (
'A computed property can\'t be used to define an EndpointsModel. The type '
'of the message field must be explicitly named; this can be done by using '
'the property EndpointsComputedProperty.')
NDB_PROPERTY_TO_PROTO = {
ndb.BlobKeyProperty: messages.StringField,
ndb.BlobProperty: messages.BytesField, # No concept of compressed here
ndb.BooleanProperty: messages.BooleanField,
ndb.ComputedProperty: RaiseNotImplementedMethod(
ndb.ComputedProperty,
explanation=COMPUTED_PROPERTY_EXPLANATION),
ndb.DateProperty: messages.StringField,
ndb.DateTimeProperty: messages.StringField,
ndb.FloatProperty: messages.FloatField,
ndb.GenericProperty: RaiseNotImplementedMethod(ndb.GenericProperty),
ndb.IntegerProperty: messages.IntegerField,
ndb.JsonProperty: messages.BytesField,
ndb.KeyProperty: messages.StringField,
ndb.ModelKey: RaiseNotImplementedMethod(
ndb.ModelKey,
explanation=MODEL_KEY_EXPLANATION),
ndb.PickleProperty: messages.BytesField,
ndb.Property: RaiseNotImplementedMethod(ndb.Property),
ndb.StringProperty: messages.StringField,
ndb.TextProperty: messages.StringField, # No concept of compressed here
ndb.TimeProperty: messages.StringField,
}
def GetKeywordArgs(prop, include_default=True):
"""Captures attributes from an NDB property to be passed to a ProtoRPC field.
Args:
prop: The NDB property which will have its attributes captured.
include_default: An optional boolean indicating whether or not the default
value of the property should be included. Defaults to True, and is
intended to be turned off for special ProtoRPC fields which don't take
a default.
Returns:
A dictionary of attributes, intended to be passed to the constructor of a
ProtoRPC field as keyword arguments.
"""
kwargs = {
'required': prop._required,
'repeated': prop._repeated,
}
if include_default and hasattr(prop, '_default'):
kwargs['default'] = prop._default
if hasattr(prop, '_variant'):
kwargs['variant'] = prop._variant
return kwargs
def MessageFromSimpleField(field, prop, index):
"""Converts a property to the corresponding field of specified type.
Assumes index is the only positional argument needed to create an instance
of {field}, hence only simple fields will work and an EnumField or
MessageField will fail.
Args:
field: A ProtoRPC field type.
prop: The NDB property to be converted.
index: The index of the property within the message.
Returns:
An instance of field with attributes corresponding to those in prop and
index corresponding to that which was passed in.
"""
return field(index, **GetKeywordArgs(prop))
def StructuredPropertyToProto(prop, index):
"""Converts a structured property to the corresponding message field.
Args:
prop: The NDB property to be converted.
index: The index of the property within the message.
Returns:
A message field with attributes corresponding to those in prop, index
corresponding to that which was passed in and with underlying message
class equal to the message class produced by the model class, which
should be a subclass of EndpointsModel.
Raises:
TypeError if the model class of the property does not have a callable
ProtoModel method. This is because we expected a subclass of
EndpointsModel set on the structured property.
"""
modelclass = prop._modelclass
try:
property_proto_method = modelclass.ProtoModel
property_proto = property_proto_method()
except (AttributeError, TypeError):
error_msg = ('Structured properties must receive a model class with a '
'callable ProtoModel attribute. The class %s has no such '
'attribute.' % (modelclass.__name__,))
raise TypeError(error_msg)
# No default for {MessageField}s
kwargs = GetKeywordArgs(prop, include_default=False)
return messages.MessageField(property_proto, index, **kwargs)
NDB_PROPERTY_TO_PROTO[ndb.StructuredProperty] = StructuredPropertyToProto
# Ignore fact that LocalStructuredProperty is just a blob in the datastore
NDB_PROPERTY_TO_PROTO[ndb.LocalStructuredProperty] = StructuredPropertyToProto
def EnumPropertyToProto(prop, index):
"""Converts an enum property from a model to a message field.
Args:
prop: The NDB enum property to be converted.
index: The index of the property within the message.
Returns:
An enum field with attributes corresponding to those in prop, index
corresponding to that which was passed in and with underlying enum type
equal to the enum type set in the enum property.
"""
enum = prop._enum_type
kwargs = GetKeywordArgs(prop)
return messages.EnumField(enum, index, **kwargs)
NDB_PROPERTY_TO_PROTO[msgprop.EnumProperty] = EnumPropertyToProto
def MessagePropertyToProto(prop, index):
"""Converts a message property from a model to a message field.
Args:
prop: The NDB message property to be converted.
index: The index of the property within the message.
Returns:
A message field with attributes corresponding to those in prop, index
corresponding to that which was passed in and with underlying message
class equal to the message type set in the message property.
"""
message_type = prop._message_type
# No default for {MessageField}s
kwargs = GetKeywordArgs(prop, include_default=False)
return messages.MessageField(message_type, index, **kwargs)
NDB_PROPERTY_TO_PROTO[msgprop.MessageProperty] = MessagePropertyToProto
def GeoPtPropertyToProto(prop, index):
"""Converts a model property to a Geo Point message field.
Args:
prop: The NDB property to be converted.
index: The index of the property within the message.
Returns:
A message field with attributes corresponding to those in prop, index
corresponding to that which was passed in and with underlying message
class equal to GeoPtMessage.
"""
# No default for {MessageField}s
kwargs = GetKeywordArgs(prop, include_default=False)
return messages.MessageField(GeoPtMessage, index, **kwargs)
NDB_PROPERTY_TO_PROTO[ndb.GeoPtProperty] = GeoPtPropertyToProto
def UserPropertyToProto(prop, index):
"""Converts a model property to a user message field.
Args:
prop: The NDB property to be converted.
index: The index of the property within the message.
Returns:
A message field with attributes corresponding to those in prop, index
corresponding to that which was passed in and with underlying message
class equal to UserMessage.
"""
# No default for {MessageField}s
kwargs = GetKeywordArgs(prop, include_default=False)
return messages.MessageField(UserMessage, index, **kwargs)
NDB_PROPERTY_TO_PROTO[ndb.UserProperty] = UserPropertyToProto
| lgpl-3.0 |
dag/stutuz | stutuz/utils/tex.py | 1 | 1947 | #-*- coding:utf-8 -*-
from __future__ import division
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import unicode_literals
import re
class Tex(object):
"""Wraps a string of TeX such that wrapping it in
:class:`~flask.Markup` gives a HTML representation, otherwise the
original string.
"""
def __init__(self, text):
self.text = text
def __str__(self):
return self.text
def __unicode__(self):
return self.text
def __repr__(self):
return 'Tex({0!r})'.format(self.text)
def __html__(self):
return _tex_to_html(self.text)
def _tex_to_html(tex):
tex = re.sub(r'\$(.+?)\$', _sub_math, tex)
tex = re.sub(r'\\(emph|textbf)\{(.+?)\}', _sub_typography, tex)
tex = re.sub(r'(?![|>\-])\s\s+(.+)', _sub_lines, tex)
tex = re.sub(r'inchoative\s\s+(----.+)', _sub_puho, tex)
return tex
def _sub_math(m):
t = []
for x in m.group(1).split('='):
x = x.replace('{', '').replace('}', '')
x = x.replace('*', u'×')
if '_' in x:
t.append(u'%s<sub>%s</sub>' % tuple(x.split('_')[0:2]))
elif '^' in x:
t.append(u'%s<sup>%s</sup>' % tuple(x.split('^')[0:2]))
else:
t.append(x)
return '='.join(t)
def _sub_typography(m):
if m.group(1) == 'emph':
return u'<em>%s</em>' % m.group(2)
elif m.group(1) == 'textbf':
return u'<strong>%s</strong>' % m.group(2)
def _sub_lines(m):
format = '\n%s'
if m.group(1).startswith('|'):
format = '\n<span style="font-family: monospace"> %s</span>'
elif m.group(1).startswith('>'):
format = '\n<span style="font-family: monospace"> %s</span>'
return format % m.group(1)
def _sub_puho(m):
format = 'inchoative\n<span style="font-family: monospace">%s</span>'
return format % m.group(1)
| bsd-2-clause |
druce/safewithdrawal_tensorflow | lifetable.py | 1 | 8359 | import numpy as np
import pandas as pd
from pandas import DataFrame
############################################################
# Life tables
# https://www.ssa.gov/oact/STATS/table4c6.html
############################################################
############################################################
# Male life table
############################################################
# survivors from 100000 births
MlivesArray = [100000, 99348, 99302, 99273, 99252, 99235, 99219, 99205, 99192, 99180,
99170, 99161, 99151, 99138, 99119, 99091, 99052, 99003, 98943, 98870,
98785, 98685, 98572, 98449, 98321, 98191, 98060, 97928, 97795, 97659,
97519, 97376, 97230, 97080, 96927, 96772, 96612, 96448, 96277, 96097,
95908, 95708, 95493, 95262, 95012, 94739, 94441, 94115, 93759, 93368,
92940, 92472, 91961, 91406, 90804, 90153, 89450, 88693, 87883, 87022,
86112, 85147, 84125, 83042, 81899, 80691, 79412, 78054, 76613, 75084,
73461, 71732, 69889, 67930, 65853, 63657, 61329, 58859, 56249, 53504,
50629, 47621, 44484, 41233, 37890, 34482, 31040, 27598, 24201, 20896,
17735, 14768, 12043, 9599, 7463, 5647, 4157, 2977, 2075, 1410,
935, 605, 380, 232, 137, 78, 43, 23, 11, 5,
2, 1, 0, 0, 0, 0, 0, 0, 0, 0, ]
MlivesSeries = pd.Series(MlivesArray)
# life expectancy
MLEarray = [76.28, 75.78, 74.82, 73.84, 72.85, 71.87, 70.88, 69.89, 68.9, 67.9,
66.91, 65.92, 64.92, 63.93, 62.94, 61.96, 60.99, 60.02, 59.05, 58.09,
57.14, 56.2, 55.27, 54.33, 53.4, 52.47, 51.54, 50.61, 49.68, 48.75,
47.82, 46.89, 45.96, 45.03, 44.1, 43.17, 42.24, 41.31, 40.38, 39.46,
38.53, 37.61, 36.7, 35.78, 34.88, 33.98, 33.08, 32.19, 31.32, 30.44,
29.58, 28.73, 27.89, 27.05, 26.23, 25.41, 24.61, 23.82, 23.03, 22.25,
21.48, 20.72, 19.97, 19.22, 18.48, 17.75, 17.03, 16.32, 15.61, 14.92,
14.24, 13.57, 12.92, 12.27, 11.65, 11.03, 10.43, 9.85, 9.28, 8.73,
8.2, 7.68, 7.19, 6.72, 6.27, 5.84, 5.43, 5.04, 4.68, 4.34,
4.03, 3.74, 3.47, 3.23, 3.01, 2.82, 2.64, 2.49, 2.36, 2.24,
2.12, 2.01, 1.9, 1.8, 1.7, 1.6, 1.51, 1.42, 1.34, 1.26,
1.18, 1.11, 1.04, 0.97, 0.9, 0.84, 0.78, 0.72, 0.67, 0.61,
]
MLEseries = pd.Series(MLEarray)
# death probability
MdeathrateArray = [0.006519, 0.000462, 0.000291, 0.000209, 0.000176, 0.000159, 0.000146, 0.000133, 0.000118, 0.000102,
0.000091, 0.000096, 0.000128, 0.000195, 0.000288, 0.000389, 0.000492, 0.000607, 0.000735, 0.000869,
0.001011, 0.001145, 0.001246, 0.001301, 0.001321, 0.00133, 0.001345, 0.001363, 0.001391, 0.001427,
0.001467, 0.001505, 0.001541, 0.001573, 0.001606, 0.001648, 0.001704, 0.001774, 0.001861, 0.001967,
0.002092, 0.00224, 0.002418, 0.002629, 0.002873, 0.003146, 0.003447, 0.003787, 0.004167, 0.004586,
0.005038, 0.00552, 0.006036, 0.006587, 0.00717, 0.007801, 0.008466, 0.009133, 0.009792, 0.010462,
0.011197, 0.012009, 0.012867, 0.013772, 0.014749, 0.015852, 0.017097, 0.018463, 0.019959, 0.021616,
0.023528, 0.025693, 0.028041, 0.030567, 0.033347, 0.036572, 0.040276, 0.044348, 0.048797, 0.053739,
0.059403, 0.065873, 0.073082, 0.08107, 0.089947, 0.099842, 0.110863, 0.123088, 0.136563, 0.151299,
0.167291, 0.18452, 0.202954, 0.222555, 0.243272, 0.263821, 0.283833, 0.302916, 0.320672, 0.336706,
0.353541, 0.371218, 0.389779, 0.409268, 0.429732, 0.451218, 0.473779, 0.497468, 0.522341, 0.548458,
0.575881, 0.604675, 0.634909, 0.666655, 0.699987, 0.734987, 0.771736, 0.810323, 0.850839, 0.893381,
]
MdeathrateSeries = pd.Series(MdeathrateArray)
MlivesSeries.to_csv('MLivesSeries.csv', index_label='index')
MLEseries.to_csv('MLEseries.csv', index_label='index')
MdeathrateSeries.to_csv('MdeathrateSeries.csv', index_label='index')
############################################################
# Female life table
############################################################
FlivesArray = [100000, 99462, 99425, 99403, 99387, 99373, 99361, 99351, 99341, 99331,
99322, 99312, 99303, 99291, 99278, 99262, 99243, 99220, 99194, 99165,
99132, 99095, 99054, 99010, 98963, 98915, 98864, 98811, 98755, 98697,
98635, 98569, 98500, 98426, 98348, 98265, 98176, 98081, 97979, 97870,
97753, 97627, 97491, 97343, 97182, 97004, 96810, 96597, 96364, 96109,
95829, 95524, 95193, 94834, 94449, 94038, 93598, 93126, 92623, 92090,
91526, 90927, 90287, 89600, 88858, 88054, 87177, 86223, 85187, 84069,
82864, 81561, 80147, 78616, 76961, 75177, 73244, 71148, 68888, 66467,
63880, 61114, 58159, 55016, 51694, 48205, 44565, 40796, 36933, 33017,
29104, 25257, 21542, 18027, 14775, 11839, 9267, 7083, 5285, 3852,
2745, 1909, 1292, 850, 541, 333, 197, 112, 61, 31,
15, 7, 3, 1, 0, 0, 0, 0, 0, 0,]
FlivesSeries = pd.Series(FlivesArray)
FLEarray = [81.05, 80.49, 79.52, 78.54, 77.55, 76.56, 75.57, 74.58, 73.58, 72.59,
71.6, 70.6, 69.61, 68.62, 67.63, 66.64, 65.65, 64.67, 63.68, 62.7,
61.72, 60.75, 59.77, 58.8, 57.82, 56.85, 55.88, 54.91, 53.94, 52.97,
52.01, 51.04, 50.08, 49.11, 48.15, 47.19, 46.23, 45.28, 44.33, 43.37,
42.43, 41.48, 40.54, 39.6, 38.66, 37.73, 36.81, 35.89, 34.97, 34.06,
33.16, 32.27, 31.38, 30.49, 29.62, 28.74, 27.88, 27.01, 26.16, 25.31,
24.46, 23.62, 22.78, 21.95, 21.13, 20.32, 19.52, 18.73, 17.95, 17.18,
16.43, 15.68, 14.95, 14.23, 13.53, 12.83, 12.16, 11.5, 10.86, 10.24,
9.64, 9.05, 8.48, 7.94, 7.42, 6.92, 6.44, 5.99, 5.57, 5.17,
4.8, 4.45, 4.13, 3.84, 3.57, 3.34, 3.12, 2.93, 2.76, 2.6,
2.45, 2.3, 2.17, 2.03, 1.91, 1.78, 1.67, 1.56, 1.45, 1.35,
1.26, 1.17, 1.08, 1, 0.92, 0.85, 0.78, 0.72, 0.67, 0.61,]
FLEseries = pd.Series(FLEarray)
FdeathrateArray =[0.005377, 0.000379, 0.000221, 0.000162, 0.000133, 0.000119, 0.000109, 0.000101, 0.000096, 0.000093,
0.000094, 0.0001, 0.000112, 0.000134, 0.000162, 0.000194, 0.000226, 0.000261, 0.000297, 0.000334,
0.000373, 0.000412, 0.000446, 0.000472, 0.000493, 0.000513, 0.000537, 0.000563, 0.000593, 0.000627,
0.000664, 0.000705, 0.000748, 0.000794, 0.000845, 0.000903, 0.000968, 0.001038, 0.001113, 0.001196,
0.001287, 0.001393, 0.001517, 0.001662, 0.001827, 0.002005, 0.002198, 0.002412, 0.002648, 0.002904,
0.003182, 0.003473, 0.003767, 0.004058, 0.004352, 0.004681, 0.00504, 0.0054, 0.005756, 0.006128,
0.006545, 0.007034, 0.007607, 0.008281, 0.009057, 0.009953, 0.01095, 0.01201, 0.013124, 0.01433,
0.015728, 0.017338, 0.019108, 0.021041, 0.023191, 0.025713, 0.028609, 0.03176, 0.035157, 0.03892,
0.043289, 0.048356, 0.054041, 0.060384, 0.067498, 0.075516, 0.084556, 0.094703, 0.106014, 0.118513,
0.132206, 0.147092, 0.163154, 0.180371, 0.198714, 0.217264, 0.235735, 0.25381, 0.271155, 0.287424,
0.30467, 0.32295, 0.342327, 0.362867, 0.384639, 0.407717, 0.43218, 0.458111, 0.485597, 0.514733,
0.545617, 0.578354, 0.613055, 0.649839, 0.688829, 0.730159, 0.771736, 0.810323, 0.850839, 0.893381,]
FdeathrateSeries = pd.Series(FdeathrateArray)
def genLifetable(livesSeries, leSeries, ret_age, ret_length):
"""Create frame with life expectancies, lives"""
# check bounds of lifetable
# assert start age, end age between the two
# 2nd version - take DataFrame where everything lines up by age
end_age = ret_age + ret_length
survival = livesSeries[ret_age:end_age]
survival = survival / float(survival[ret_age])
deathrate = survival - survival.shift(-1)
deathrate.ix[end_age-1] = 1 - np.sum(deathrate)
lifetable = DataFrame(survival, columns=['survival'])
LE = leSeries[ret_age:end_age]
lifetable['life_expectancy'] = LE
lifetable['deathrate'] = deathrate
return lifetable
| mit |
dpiers/coderang-meteor | public/jsrepl/extern/python/unclosured/lib/python2.7/encodings/euc_jis_2004.py | 816 | 1051 | #
# euc_jis_2004.py: Python Unicode Codec for EUC_JIS_2004
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_jp, codecs
import _multibytecodec as mbc
codec = _codecs_jp.getcodec('euc_jis_2004')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='euc_jis_2004',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mit |
chelsea1620/google-belay | station/openid/store/filestore.py | 153 | 13542 | """
This module contains an C{L{OpenIDStore}} implementation backed by
flat files.
"""
import string
import os
import os.path
import time
from errno import EEXIST, ENOENT
try:
from tempfile import mkstemp
except ImportError:
# Python < 2.3
import warnings
warnings.filterwarnings("ignore",
"tempnam is a potential security risk",
RuntimeWarning,
"openid.store.filestore")
def mkstemp(dir):
for _ in range(5):
name = os.tempnam(dir)
try:
fd = os.open(name, os.O_CREAT | os.O_EXCL | os.O_RDWR, 0600)
except OSError, why:
if why.errno != EEXIST:
raise
else:
return fd, name
raise RuntimeError('Failed to get temp file after 5 attempts')
from openid.association import Association
from openid.store.interface import OpenIDStore
from openid.store import nonce
from openid import cryptutil, oidutil
_filename_allowed = string.ascii_letters + string.digits + '.'
try:
# 2.4
set
except NameError:
try:
# 2.3
import sets
except ImportError:
# Python < 2.2
d = {}
for c in _filename_allowed:
d[c] = None
_isFilenameSafe = d.has_key
del d
else:
_isFilenameSafe = sets.Set(_filename_allowed).__contains__
else:
_isFilenameSafe = set(_filename_allowed).__contains__
def _safe64(s):
h64 = oidutil.toBase64(cryptutil.sha1(s))
h64 = h64.replace('+', '_')
h64 = h64.replace('/', '.')
h64 = h64.replace('=', '')
return h64
def _filenameEscape(s):
filename_chunks = []
for c in s:
if _isFilenameSafe(c):
filename_chunks.append(c)
else:
filename_chunks.append('_%02X' % ord(c))
return ''.join(filename_chunks)
def _removeIfPresent(filename):
"""Attempt to remove a file, returning whether the file existed at
the time of the call.
str -> bool
"""
try:
os.unlink(filename)
except OSError, why:
if why.errno == ENOENT:
# Someone beat us to it, but it's gone, so that's OK
return 0
else:
raise
else:
# File was present
return 1
def _ensureDir(dir_name):
"""Create dir_name as a directory if it does not exist. If it
exists, make sure that it is, in fact, a directory.
Can raise OSError
str -> NoneType
"""
try:
os.makedirs(dir_name)
except OSError, why:
if why.errno != EEXIST or not os.path.isdir(dir_name):
raise
class FileOpenIDStore(OpenIDStore):
"""
This is a filesystem-based store for OpenID associations and
nonces. This store should be safe for use in concurrent systems
on both windows and unix (excluding NFS filesystems). There are a
couple race conditions in the system, but those failure cases have
been set up in such a way that the worst-case behavior is someone
having to try to log in a second time.
Most of the methods of this class are implementation details.
People wishing to just use this store need only pay attention to
the C{L{__init__}} method.
Methods of this object can raise OSError if unexpected filesystem
conditions, such as bad permissions or missing directories, occur.
"""
def __init__(self, directory):
"""
Initializes a new FileOpenIDStore. This initializes the
nonce and association directories, which are subdirectories of
the directory passed in.
@param directory: This is the directory to put the store
directories in.
@type directory: C{str}
"""
# Make absolute
directory = os.path.normpath(os.path.abspath(directory))
self.nonce_dir = os.path.join(directory, 'nonces')
self.association_dir = os.path.join(directory, 'associations')
# Temp dir must be on the same filesystem as the assciations
# directory
self.temp_dir = os.path.join(directory, 'temp')
self.max_nonce_age = 6 * 60 * 60 # Six hours, in seconds
self._setup()
def _setup(self):
"""Make sure that the directories in which we store our data
exist.
() -> NoneType
"""
_ensureDir(self.nonce_dir)
_ensureDir(self.association_dir)
_ensureDir(self.temp_dir)
def _mktemp(self):
"""Create a temporary file on the same filesystem as
self.association_dir.
The temporary directory should not be cleaned if there are any
processes using the store. If there is no active process using
the store, it is safe to remove all of the files in the
temporary directory.
() -> (file, str)
"""
fd, name = mkstemp(dir=self.temp_dir)
try:
file_obj = os.fdopen(fd, 'wb')
return file_obj, name
except:
_removeIfPresent(name)
raise
def getAssociationFilename(self, server_url, handle):
"""Create a unique filename for a given server url and
handle. This implementation does not assume anything about the
format of the handle. The filename that is returned will
contain the domain name from the server URL for ease of human
inspection of the data directory.
(str, str) -> str
"""
if server_url.find('://') == -1:
raise ValueError('Bad server URL: %r' % server_url)
proto, rest = server_url.split('://', 1)
domain = _filenameEscape(rest.split('/', 1)[0])
url_hash = _safe64(server_url)
if handle:
handle_hash = _safe64(handle)
else:
handle_hash = ''
filename = '%s-%s-%s-%s' % (proto, domain, url_hash, handle_hash)
return os.path.join(self.association_dir, filename)
def storeAssociation(self, server_url, association):
"""Store an association in the association directory.
(str, Association) -> NoneType
"""
association_s = association.serialize()
filename = self.getAssociationFilename(server_url, association.handle)
tmp_file, tmp = self._mktemp()
try:
try:
tmp_file.write(association_s)
os.fsync(tmp_file.fileno())
finally:
tmp_file.close()
try:
os.rename(tmp, filename)
except OSError, why:
if why.errno != EEXIST:
raise
# We only expect EEXIST to happen only on Windows. It's
# possible that we will succeed in unlinking the existing
# file, but not in putting the temporary file in place.
try:
os.unlink(filename)
except OSError, why:
if why.errno == ENOENT:
pass
else:
raise
# Now the target should not exist. Try renaming again,
# giving up if it fails.
os.rename(tmp, filename)
except:
# If there was an error, don't leave the temporary file
# around.
_removeIfPresent(tmp)
raise
def getAssociation(self, server_url, handle=None):
"""Retrieve an association. If no handle is specified, return
the association with the latest expiration.
(str, str or NoneType) -> Association or NoneType
"""
if handle is None:
handle = ''
# The filename with the empty handle is a prefix of all other
# associations for the given server URL.
filename = self.getAssociationFilename(server_url, handle)
if handle:
return self._getAssociation(filename)
else:
association_files = os.listdir(self.association_dir)
matching_files = []
# strip off the path to do the comparison
name = os.path.basename(filename)
for association_file in association_files:
if association_file.startswith(name):
matching_files.append(association_file)
matching_associations = []
# read the matching files and sort by time issued
for name in matching_files:
full_name = os.path.join(self.association_dir, name)
association = self._getAssociation(full_name)
if association is not None:
matching_associations.append(
(association.issued, association))
matching_associations.sort()
# return the most recently issued one.
if matching_associations:
(_, assoc) = matching_associations[-1]
return assoc
else:
return None
def _getAssociation(self, filename):
try:
assoc_file = file(filename, 'rb')
except IOError, why:
if why.errno == ENOENT:
# No association exists for that URL and handle
return None
else:
raise
else:
try:
assoc_s = assoc_file.read()
finally:
assoc_file.close()
try:
association = Association.deserialize(assoc_s)
except ValueError:
_removeIfPresent(filename)
return None
# Clean up expired associations
if association.getExpiresIn() == 0:
_removeIfPresent(filename)
return None
else:
return association
def removeAssociation(self, server_url, handle):
"""Remove an association if it exists. Do nothing if it does not.
(str, str) -> bool
"""
assoc = self.getAssociation(server_url, handle)
if assoc is None:
return 0
else:
filename = self.getAssociationFilename(server_url, handle)
return _removeIfPresent(filename)
def useNonce(self, server_url, timestamp, salt):
"""Return whether this nonce is valid.
str -> bool
"""
if abs(timestamp - time.time()) > nonce.SKEW:
return False
if server_url:
proto, rest = server_url.split('://', 1)
else:
# Create empty proto / rest values for empty server_url,
# which is part of a consumer-generated nonce.
proto, rest = '', ''
domain = _filenameEscape(rest.split('/', 1)[0])
url_hash = _safe64(server_url)
salt_hash = _safe64(salt)
filename = '%08x-%s-%s-%s-%s' % (timestamp, proto, domain,
url_hash, salt_hash)
filename = os.path.join(self.nonce_dir, filename)
try:
fd = os.open(filename, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0200)
except OSError, why:
if why.errno == EEXIST:
return False
else:
raise
else:
os.close(fd)
return True
def _allAssocs(self):
all_associations = []
association_filenames = map(
lambda filename: os.path.join(self.association_dir, filename),
os.listdir(self.association_dir))
for association_filename in association_filenames:
try:
association_file = file(association_filename, 'rb')
except IOError, why:
if why.errno == ENOENT:
oidutil.log("%s disappeared during %s._allAssocs" % (
association_filename, self.__class__.__name__))
else:
raise
else:
try:
assoc_s = association_file.read()
finally:
association_file.close()
# Remove expired or corrupted associations
try:
association = Association.deserialize(assoc_s)
except ValueError:
_removeIfPresent(association_filename)
else:
all_associations.append(
(association_filename, association))
return all_associations
def cleanup(self):
"""Remove expired entries from the database. This is
potentially expensive, so only run when it is acceptable to
take time.
() -> NoneType
"""
self.cleanupAssociations()
self.cleanupNonces()
def cleanupAssociations(self):
removed = 0
for assoc_filename, assoc in self._allAssocs():
if assoc.getExpiresIn() == 0:
_removeIfPresent(assoc_filename)
removed += 1
return removed
def cleanupNonces(self):
nonces = os.listdir(self.nonce_dir)
now = time.time()
removed = 0
# Check all nonces for expiry
for nonce_fname in nonces:
timestamp = nonce_fname.split('-', 1)[0]
timestamp = int(timestamp, 16)
if abs(timestamp - now) > nonce.SKEW:
filename = os.path.join(self.nonce_dir, nonce_fname)
_removeIfPresent(filename)
removed += 1
return removed
| apache-2.0 |
jwlin/web-crawler-tutorial | ch8/twse.py | 1 | 1294 | # source: https://github.com/yotsuba1022/web-crawler-practice/blob/master/ch4/tw_stock_exchange.py
import requests
import time
TWSE_URL = 'http://www.twse.com.tw/exchangeReport/STOCK_DAY?response=json'
def get_web_content(stock_id, current_date):
resp = requests.get(TWSE_URL + '&date=' + current_date + '&stockNo=' + stock_id)
if resp.status_code != 200:
return None
else:
return resp.json()
def get_data(stock_id, current_date):
info = list()
resp = get_web_content(stock_id, current_date)
if resp is None:
return None
else:
if resp['data']:
for data in resp['data']:
record = {
'日期': data[0],
'開盤價': data[3],
'收盤價': data[6],
'成交筆數': data[8]
}
info.append(record)
return info
def main():
stock_id = '2330'
current_date = time.strftime('%Y%m%d')
current_year = time.strftime('%Y')
current_month = time.strftime('%m')
print('Processing data for %s %s...' % (current_year, current_month))
collected_info = get_data(stock_id, current_date)
for info in collected_info:
print(info)
if __name__ == '__main__':
main() | mit |
tomekwojcik/osxnotify-python-cffi | setup.py | 1 | 2466 | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Tomek Wójcik <tomek@bthlabs.pl>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import codecs
from setuptools import setup
import sys
desc_file = codecs.open('README.rst', 'r', 'utf-8')
long_description = desc_file.read()
desc_file.close()
meta = {}
with open('osxnotify_cffi/meta.py', 'r') as f:
exec(compile(f.read(), 'osxnotify_cffi/meta.py', 'exec'), meta)
setup(
name=meta['__title__'].encode('utf-8'),
version=meta['__version__'],
description=(
'No nonsense OS X notifications for Python scripts (CFFI wrapper)'
),
long_description=long_description,
author=meta['__author__'].encode('utf-8'),
author_email='tomek@bthlabs.pl',
packages=['osxnotify_cffi'],
install_requires=['cffi>=1.0.0'],
license=meta['__license__'].encode('utf-8'),
url='https://github.com/tomekwojcik/osxnotify-python-cffi',
download_url=(
'http://github.com/tomekwojcik/osxnotify-python-cffi/tarball/v%s' %
meta['__version__']
),
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: MacOS X :: Cocoa",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
| mit |
shi2wei3/virt-test | virttest/libvirt_xml/nwfilter_protocols/ipv6.py | 26 | 5232 | """
ipv6 protocl support class(es)
http://libvirt.org/formatnwfilter.html#nwfelemsRulesProtoIPv6
"""
from virttest.libvirt_xml import accessors, xcepts
from virttest.libvirt_xml.nwfilter_protocols import base
class Ipv6(base.TypedDeviceBase):
"""
Create new Ipv6 xml instances
Properties:
attrs: libvirt_xml.nwfilter_protocols.Ipv6.Attr instance
"""
__slots__ = ('attrs',)
def __init__(self, type_name='file', virsh_instance=base.base.virsh):
accessors.XMLElementNest('attrs', self, parent_xpath='/',
tag_name='ipv6', subclass=self.Attr,
subclass_dargs={
'virsh_instance': virsh_instance})
super(Ipv6, self).__init__(protocol_tag='ipv6', type_name=type_name,
virsh_instance=virsh_instance)
def new_attr(self, **dargs):
"""
Return a new Attr instance and set properties from dargs
:param dargs: dict of attributes
:return: new Attr instance
"""
new_one = self.Attr(virsh_instance=self.virsh)
for key, value in dargs.items():
setattr(new_one, key, value)
return new_one
def get_attr(self):
"""
Return ipv6 attribute dict
:return: None if no ipv6 in xml, dict of ipv6's attributes.
"""
try:
ipv6_node = self.xmltreefile.reroot('/ipv6')
except KeyError, detail:
raise xcepts.LibvirtXMLError(detail)
node = ipv6_node.getroot()
ipv6_attr = dict(node.items())
return ipv6_attr
class Attr(base.base.LibvirtXMLBase):
"""
Ipv6 attribute XML class
Properties:
srcmacaddr: string, MAC address of sender
srcmacmask: string, Mask applied to MAC address of sender
dstmacaddr: string, MAC address of destination
dstmacaddr: string, Mask applied to MAC address of destination
srcipaddr: string, Source IP address
srcipmask: string, Mask applied to source IP address
dstipaddr: string, Destination IP address
dstipmask: string, Mask applied to destination IP address
ip_protocol: string, Layer 4 protocol identifier
srcportstart: string, Start of range of valid source ports; requires protocol
srcportend: string, End of range of valid source ports; requires protocol
dstportstart: string, Start of range of valid destination ports; requires protocol
dstportend: string, End of range of valid destination ports; requires protocol
comment: string, text with max. 256 characters
"""
__slots__ = ('srcmacaddr', 'srcmacmask', 'dstmacaddr', 'dstmacmask',
'srcipaddr', 'srcipmask', 'dstipaddr', 'dstipmask',
'ip_protocol', 'srcportstart', 'srcportend',
'dstportstart', 'dstportend', 'dscp', 'comment')
def __init__(self, virsh_instance=base.base.virsh):
accessors.XMLAttribute('srcmacaddr', self, parent_xpath='/',
tag_name='ipv6', attribute='srcmacaddr')
accessors.XMLAttribute('srcmacmask', self, parent_xpath='/',
tag_name='ipv6', attribute='srcmacmask')
accessors.XMLAttribute('dstmacaddr', self, parent_xpath='/',
tag_name='ipv6', attribute='dstmacaddr')
accessors.XMLAttribute('dstmacmask', self, parent_xpath='/',
tag_name='ipv6', attribute='dstmacmask')
accessors.XMLAttribute('srcipaddr', self, parent_xpath='/',
tag_name='ipv6', attribute='srcipaddr')
accessors.XMLAttribute('srcipmask', self, parent_xpath='/',
tag_name='ipv6', attribute='srcipmask')
accessors.XMLAttribute('dstipaddr', self, parent_xpath='/',
tag_name='ipv6', attribute='dstipaddr')
accessors.XMLAttribute('dstipmask', self, parent_xpath='/',
tag_name='ipv6', attribute='dstipmask')
accessors.XMLAttribute('ip_protocol', self, parent_xpath='/',
tag_name='ipv6', attribute='protocol')
accessors.XMLAttribute('srcportstart', self, parent_xpath='/',
tag_name='ipv6', attribute='srcportstart')
accessors.XMLAttribute('srcportend', self, parent_xpath='/',
tag_name='ipv6', attribute='srcportend')
accessors.XMLAttribute('dstportstart', self, parent_xpath='/',
tag_name='ipv6', attribute='dstportstart')
accessors.XMLAttribute('dstportend', self, parent_xpath='/',
tag_name='ipv6', attribute='dstportend')
accessors.XMLAttribute('comment', self, parent_xpath='/',
tag_name='ipv6', attribute='comment')
super(self.__class__, self).__init__(virsh_instance=virsh_instance)
self.xml = '<ipv6/>'
| gpl-2.0 |
GeraudBourdin/Ti.infrared_demo_app | plugins/ti.alloy/plugin.py | 1729 | 5251 | import os, sys, subprocess, hashlib
import subprocess
def check_output(*popenargs, **kwargs):
r"""Run command with arguments and return its output as a byte string.
Backported from Python 2.7 as it's implemented as pure python on stdlib.
>>> check_output(['/usr/bin/python', '--version'])
Python 2.6.2
"""
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
error = subprocess.CalledProcessError(retcode, cmd)
error.output = output
raise error
return output
def compile(config):
paths = {}
binaries = ["alloy","node"]
dotAlloy = os.path.abspath(os.path.join(config['project_dir'], 'build', '.alloynewcli'))
if os.path.exists(dotAlloy):
print "[DEBUG] build/.alloynewcli file found, skipping plugin..."
os.remove(dotAlloy)
else:
for binary in binaries:
try:
# see if the environment variable is defined
paths[binary] = os.environ["ALLOY_" + ("NODE_" if binary == "node" else "") + "PATH"]
except KeyError as ex:
# next try PATH, and then our guess paths
if sys.platform == "darwin" or sys.platform.startswith('linux'):
userPath = os.environ["HOME"]
guessPaths = [
"/usr/local/bin/"+binary,
"/opt/local/bin/"+binary,
userPath+"/local/bin/"+binary,
"/opt/bin/"+binary,
"/usr/bin/"+binary,
"/usr/local/share/npm/bin/"+binary
]
try:
binaryPath = check_output(["which",binary], stderr=subprocess.STDOUT).strip()
print "[DEBUG] %s installed at '%s'" % (binary,binaryPath)
except:
print "[WARN] Couldn't find %s on your PATH:" % binary
print "[WARN] %s" % os.environ["PATH"]
print "[WARN]"
print "[WARN] Checking for %s in a few default locations:" % binary
for p in guessPaths:
sys.stdout.write("[WARN] %s -> " % p)
if os.path.exists(p):
binaryPath = p
print "FOUND"
break
else:
print "not found"
binaryPath = None
if binaryPath is None:
print "[ERROR] Couldn't find %s" % binary
sys.exit(1)
else:
paths[binary] = binaryPath
# no guesses on windows, just use the PATH
elif sys.platform == "win32":
paths["alloy"] = "alloy.cmd"
f = os.path.abspath(os.path.join(config['project_dir'], 'app'))
if os.path.exists(f):
print "[INFO] alloy app found at %s" % f
rd = os.path.abspath(os.path.join(config['project_dir'], 'Resources'))
devicefamily = 'none'
simtype = 'none'
version = '0'
deploytype = 'development'
if config['platform']==u'ios':
version = config['iphone_version']
devicefamily = config['devicefamily']
deploytype = config['deploytype']
if config['platform']==u'android':
builder = config['android_builder']
version = builder.tool_api_level
deploytype = config['deploy_type']
if config['platform']==u'mobileweb':
builder = config['mobileweb_builder']
deploytype = config['deploytype']
cfg = "platform=%s,version=%s,simtype=%s,devicefamily=%s,deploytype=%s," % (config['platform'],version,simtype,devicefamily,deploytype)
if sys.platform == "win32":
cmd = [paths["alloy"], "compile", f, "--no-colors", "--config", cfg]
else:
cmd = [paths["node"], paths["alloy"], "compile", f, "--no-colors", "--config", cfg]
print "[INFO] Executing Alloy compile:"
print "[INFO] %s" % " ".join(cmd)
try:
print check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as ex:
if hasattr(ex, 'output'):
print ex.output
print "[ERROR] Alloy compile failed"
retcode = 1
if hasattr(ex, 'returncode'):
retcode = ex.returncode
sys.exit(retcode)
except EnvironmentError as ex:
print "[ERROR] Unexpected error with Alloy compiler plugin: %s" % ex.strerror
sys.exit(2)
| apache-2.0 |
minor-jason/notepad-plus-plus | scintilla/scripts/Face.py | 56 | 3553 | # Face.py - module for reading and parsing Scintilla.iface file
# Implemented 2000 by Neil Hodgson neilh@scintilla.org
# Released to the public domain.
# Requires Python 2.5 or later
def sanitiseLine(line):
if line[-1:] == '\n': line = line[:-1]
if line.find("##") != -1:
line = line[:line.find("##")]
line = line.strip()
return line
def decodeFunction(featureVal):
retType, rest = featureVal.split(" ", 1)
nameIdent, params = rest.split("(")
name, value = nameIdent.split("=")
params, rest = params.split(")")
param1, param2 = params.split(",")
return retType, name, value, param1, param2
def decodeEvent(featureVal):
retType, rest = featureVal.split(" ", 1)
nameIdent, params = rest.split("(")
name, value = nameIdent.split("=")
return retType, name, value
def decodeParam(p):
param = p.strip()
type = ""
name = ""
value = ""
if " " in param:
type, nv = param.split(" ")
if "=" in nv:
name, value = nv.split("=")
else:
name = nv
return type, name, value
class Face:
def __init__(self):
self.order = []
self.features = {}
self.values = {}
self.events = {}
def ReadFromFile(self, name):
currentCategory = ""
currentComment = []
currentCommentFinished = 0
file = open(name)
for line in file.readlines():
line = sanitiseLine(line)
if line:
if line[0] == "#":
if line[1] == " ":
if currentCommentFinished:
currentComment = []
currentCommentFinished = 0
currentComment.append(line[2:])
else:
currentCommentFinished = 1
featureType, featureVal = line.split(" ", 1)
if featureType in ["fun", "get", "set"]:
try:
retType, name, value, param1, param2 = decodeFunction(featureVal)
except ValueError:
print("Failed to decode %s" % line)
raise
p1 = decodeParam(param1)
p2 = decodeParam(param2)
self.features[name] = {
"FeatureType": featureType,
"ReturnType": retType,
"Value": value,
"Param1Type": p1[0], "Param1Name": p1[1], "Param1Value": p1[2],
"Param2Type": p2[0], "Param2Name": p2[1], "Param2Value": p2[2],
"Category": currentCategory, "Comment": currentComment
}
if value in self.values:
raise Exception("Duplicate value " + value + " " + name)
self.values[value] = 1
self.order.append(name)
elif featureType == "evt":
retType, name, value = decodeEvent(featureVal)
self.features[name] = {
"FeatureType": featureType,
"ReturnType": retType,
"Value": value,
"Category": currentCategory, "Comment": currentComment
}
if value in self.events:
raise Exception("Duplicate event " + value + " " + name)
self.events[value] = 1
self.order.append(name)
elif featureType == "cat":
currentCategory = featureVal
elif featureType == "val":
try:
name, value = featureVal.split("=", 1)
except ValueError:
print("Failure %s" % featureVal)
raise Exception()
self.features[name] = {
"FeatureType": featureType,
"Category": currentCategory,
"Value": value }
self.order.append(name)
elif featureType == "enu" or featureType == "lex":
name, value = featureVal.split("=", 1)
self.features[name] = {
"FeatureType": featureType,
"Category": currentCategory,
"Value": value }
self.order.append(name)
| gpl-2.0 |
rodrigc/buildbot | master/buildbot/test/unit/db_migrate/test_versions_043_add_changes_parent.py | 6 | 3886 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import sqlalchemy as sa
from twisted.trial import unittest
from buildbot.test.util import migration
from buildbot.util import sautils
class Migration(migration.MigrateTestMixin, unittest.TestCase):
def setUp(self):
return self.setUpMigrateTest()
def tearDown(self):
return self.tearDownMigrateTest()
def create_tables_thd(self, conn):
metadata = sa.MetaData()
metadata.bind = conn
patches = sautils.Table(
'patches', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('patchlevel', sa.Integer, nullable=False),
sa.Column('patch_base64', sa.Text, nullable=False),
sa.Column('patch_author', sa.Text, nullable=False),
sa.Column('patch_comment', sa.Text, nullable=False),
sa.Column('subdir', sa.Text),
)
sourcestamps = sautils.Table(
'sourcestamps', metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('ss_hash', sa.String(40), nullable=False),
sa.Column('branch', sa.String(256)),
sa.Column('revision', sa.String(256)),
sa.Column('patchid', sa.Integer, sa.ForeignKey('patches.id')),
sa.Column('repository', sa.String(length=512), nullable=False,
server_default=''),
sa.Column('codebase', sa.String(256), nullable=False,
server_default=sa.DefaultClause("")),
sa.Column('project', sa.String(length=512), nullable=False,
server_default=''),
sa.Column('created_at', sa.Integer, nullable=False),
)
changes = sautils.Table(
'changes', metadata,
sa.Column('changeid', sa.Integer, primary_key=True),
sa.Column('author', sa.String(256), nullable=False),
sa.Column('comments', sa.Text, nullable=False),
sa.Column('branch', sa.String(256)),
sa.Column('revision', sa.String(256)),
sa.Column('revlink', sa.String(256)),
sa.Column('when_timestamp', sa.Integer, nullable=False),
sa.Column('category', sa.String(256)),
sa.Column('repository', sa.String(length=512), nullable=False,
server_default=''),
sa.Column('codebase', sa.String(256), nullable=False,
server_default=sa.DefaultClause("")),
sa.Column('project', sa.String(length=512), nullable=False,
server_default=''),
sa.Column('sourcestampid', sa.Integer,
sa.ForeignKey('sourcestamps.id')),
)
patches.create()
sourcestamps.create()
changes.create()
def test_update(self):
def setup_thd(conn):
self.create_tables_thd(conn)
def verify_thd(conn):
metadata = sa.MetaData()
metadata.bind = conn
changes = sautils.Table('changes', metadata, autoload=True)
self.assertIsInstance(changes.c.parent_changeids.type, sa.Integer)
return self.do_test_migration(42, 43, setup_thd, verify_thd)
| gpl-2.0 |
yd0str/infernal-twin | build/reportlab/src/reportlab/graphics/widgets/eventcal.py | 34 | 13239 | #see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/widgets/eventcal.py
# Event Calendar widget
# author: Andy Robinson
__version__=''' $Id$ '''
__doc__="""This file is a
"""
from reportlab.lib import colors
from reportlab.lib.validators import *
from reportlab.lib.attrmap import *
from reportlab.graphics.shapes import Line, Rect, Polygon, Drawing, Group, String, Circle, Wedge
from reportlab.graphics.charts.textlabels import Label
from reportlab.graphics.widgetbase import Widget
from reportlab.graphics import renderPDF
class EventCalendar(Widget):
def __init__(self):
self.x = 0
self.y = 0
self.width = 300
self.height = 150
self.timeColWidth = None # if declared, use it; otherwise auto-size.
self.trackRowHeight = 20
self.data = [] # list of Event objects
self.trackNames = None
self.startTime = None #displays ALL data on day if not set
self.endTime = None # displays ALL data on day if not set
self.day = 0
# we will keep any internal geometry variables
# here. These are computed by computeSize(),
# which is the first thing done when drawing.
self._talksVisible = [] # subset of data which will get plotted, cache
self._startTime = None
self._endTime = None
self._trackCount = 0
self._colWidths = []
self._colLeftEdges = [] # left edge of each column
def computeSize(self):
"Called at start of draw. Sets various column widths"
self._talksVisible = self.getRelevantTalks(self.data)
self._trackCount = len(self.getAllTracks())
self.computeStartAndEndTimes()
self._colLeftEdges = [self.x]
if self.timeColWidth is None:
w = self.width / (1 + self._trackCount)
self._colWidths = [w] * (1+ self._trackCount)
for i in range(self._trackCount):
self._colLeftEdges.append(self._colLeftEdges[-1] + w)
else:
self._colWidths = [self.timeColWidth]
w = (self.width - self.timeColWidth) / self._trackCount
for i in range(self._trackCount):
self._colWidths.append(w)
self._colLeftEdges.append(self._colLeftEdges[-1] + w)
def computeStartAndEndTimes(self):
"Work out first and last times to display"
if self.startTime:
self._startTime = self.startTime
else:
for (title, speaker, trackId, day, start, duration) in self._talksVisible:
if self._startTime is None: #first one
self._startTime = start
else:
if start < self._startTime:
self._startTime = start
if self.endTime:
self._endTime = self.endTime
else:
for (title, speaker, trackId, day, start, duration) in self._talksVisible:
if self._endTime is None: #first one
self._endTime = start + duration
else:
if start + duration > self._endTime:
self._endTime = start + duration
def getAllTracks(self):
tracks = []
for (title, speaker, trackId, day, hours, duration) in self.data:
if trackId is not None:
if trackId not in tracks:
tracks.append(trackId)
tracks.sort()
return tracks
def getRelevantTalks(self, talkList):
"Scans for tracks actually used"
used = []
for talk in talkList:
(title, speaker, trackId, day, hours, duration) = talk
assert trackId != 0, "trackId must be None or 1,2,3... zero not allowed!"
if day == self.day:
if (((self.startTime is None) or ((hours + duration) >= self.startTime))
and ((self.endTime is None) or (hours <= self.endTime))):
used.append(talk)
return used
def scaleTime(self, theTime):
"Return y-value corresponding to times given"
axisHeight = self.height - self.trackRowHeight
# compute fraction between 0 and 1, 0 is at start of period
proportionUp = ((theTime - self._startTime) / (self._endTime - self._startTime))
y = self.y + axisHeight - (axisHeight * proportionUp)
return y
def getTalkRect(self, startTime, duration, trackId, text):
"Return shapes for a specific talk"
g = Group()
y_bottom = self.scaleTime(startTime + duration)
y_top = self.scaleTime(startTime)
y_height = y_top - y_bottom
if trackId is None:
#spans all columns
x = self._colLeftEdges[1]
width = self.width - self._colWidths[0]
else:
#trackId is 1-based and these arrays have the margin info in column
#zero, so no need to add 1
x = self._colLeftEdges[trackId]
width = self._colWidths[trackId]
lab = Label()
lab.setText(text)
lab.setOrigin(x + 0.5*width, y_bottom+0.5*y_height)
lab.boxAnchor = 'c'
lab.width = width
lab.height = y_height
lab.fontSize = 6
r = Rect(x, y_bottom, width, y_height, fillColor=colors.cyan)
g.add(r)
g.add(lab)
#now for a label
# would expect to color-code and add text
return g
def draw(self):
self.computeSize()
g = Group()
# time column
g.add(Rect(self.x, self.y, self._colWidths[0], self.height - self.trackRowHeight, fillColor=colors.cornsilk))
# track headers
x = self.x + self._colWidths[0]
y = self.y + self.height - self.trackRowHeight
for trk in range(self._trackCount):
wid = self._colWidths[trk+1]
r = Rect(x, y, wid, self.trackRowHeight, fillColor=colors.yellow)
s = String(x + 0.5*wid, y, 'Track %d' % trk, align='middle')
g.add(r)
g.add(s)
x = x + wid
for talk in self._talksVisible:
(title, speaker, trackId, day, start, duration) = talk
r = self.getTalkRect(start, duration, trackId, title + '\n' + speaker)
g.add(r)
return g
def test():
"Make a conference event for day 1 of UP Python 2003"
d = Drawing(400,200)
cal = EventCalendar()
cal.x = 50
cal.y = 25
cal.data = [
# these might be better as objects instead of tuples, since I
# predict a large number of "optionsl" variables to affect
# formatting in future.
#title, speaker, track id, day, start time (hrs), duration (hrs)
# track ID is 1-based not zero-based!
('Keynote: Why design another programming language?', 'Guido van Rossum', None, 1, 9.0, 1.0),
('Siena Web Service Architecture', 'Marc-Andre Lemburg', 1, 1, 10.5, 1.5),
('Extreme Programming in Python', 'Chris Withers', 2, 1, 10.5, 1.5),
('Pattern Experiences in C++', 'Mark Radford', 3, 1, 10.5, 1.5),
('What is the Type of std::toupper()', 'Gabriel Dos Reis', 4, 1, 10.5, 1.5),
('Linguistic Variables: Clear Thinking with Fuzzy Logic ', 'Walter Banks', 5, 1, 10.5, 1.5),
('lunch, short presentations, vendor presentations', '', None, 1, 12.0, 2.0),
("CORBA? Isn't that obsolete", 'Duncan Grisby', 1, 1, 14.0, 1.5),
("Python Design Patterns", 'Duncan Booth', 2, 1, 14.0, 1.5),
("Inside Security Checks and Safe Exceptions", 'Brandon Bray', 3, 1, 14.0, 1.5),
("Studying at a Distance", 'Panel Discussion, Panel to include Alan Lenton & Francis Glassborow', 4, 1, 14.0, 1.5),
("Coding Standards - Given the ANSI C Standard why do I still need a coding Standard", 'Randy Marques', 5, 1, 14.0, 1.5),
("RESTful Python", 'Hamish Lawson', 1, 1, 16.0, 1.5),
("Parsing made easier - a radical old idea", 'Andrew Koenig', 2, 1, 16.0, 1.5),
("C++ & Multimethods", 'Julian Smith', 3, 1, 16.0, 1.5),
("C++ Threading", 'Kevlin Henney', 4, 1, 16.0, 1.5),
("The Organisation Strikes Back", 'Alan Griffiths & Sarah Lees', 5, 1, 16.0, 1.5),
('Birds of a Feather meeting', '', None, 1, 17.5, 2.0),
('Keynote: In the Spirit of C', 'Greg Colvin', None, 2, 9.0, 1.0),
('The Infinite Filing Cabinet - object storage in Python', 'Jacob Hallen', 1, 2, 10.5, 1.5),
('Introduction to Python and Jython for C++ and Java Programmers', 'Alex Martelli', 2, 2, 10.5, 1.5),
('Template metaprogramming in Haskell', 'Simon Peyton Jones', 3, 2, 10.5, 1.5),
('Plenty People Programming: C++ Programming in a Group, Workshop with a difference', 'Nico Josuttis', 4, 2, 10.5, 1.5),
('Design and Implementation of the Boost Graph Library', 'Jeremy Siek', 5, 2, 10.5, 1.5),
('lunch, short presentations, vendor presentations', '', None, 2, 12.0, 2.0),
("Building GUI Applications with PythonCard and PyCrust", 'Andy Todd', 1, 2, 14.0, 1.5),
("Integrating Python, C and C++", 'Duncan Booth', 2, 2, 14.0, 1.5),
("Secrets and Pitfalls of Templates", 'Nicolai Josuttis & David Vandevoorde', 3, 2, 14.0, 1.5),
("Being a Mentor", 'Panel Discussion, Panel to include Alan Lenton & Francis Glassborow', 4, 2, 14.0, 1.5),
("The Embedded C Extensions to C", 'Willem Wakker', 5, 2, 14.0, 1.5),
("Lightning Talks", 'Paul Brian', 1, 2, 16.0, 1.5),
("Scripting Java Applications with Jython", 'Anthony Eden', 2, 2, 16.0, 1.5),
("Metaprogramming and the Boost Metaprogramming Library", 'David Abrahams', 3, 2, 16.0, 1.5),
("A Common Vendor ABI for C++ -- GCC's why, what and not", 'Nathan Sidwell & Gabriel Dos Reis', 4, 2, 16.0, 1.5),
("The Timing and Cost of Choices", 'Hubert Matthews', 5, 2, 16.0, 1.5),
('Birds of a Feather meeting', '', None, 2, 17.5, 2.0),
('Keynote: The Cost of C & C++ Compatibility', 'Andy Koenig', None, 3, 9.0, 1.0),
('Prying Eyes: Generic Observer Implementations in C++', 'Andrei Alexandrescu', 1, 2, 10.5, 1.5),
('The Roadmap to Generative Programming With C++', 'Ulrich Eisenecker', 2, 2, 10.5, 1.5),
('Design Patterns in C++ and C# for the Common Language Runtime', 'Brandon Bray', 3, 2, 10.5, 1.5),
('Extreme Hour (XH): (workshop) - Jutta Eckstein and Nico Josuttis', 'Jutta Ecstein', 4, 2, 10.5, 1.5),
('The Lambda Library : Unnamed Functions for C++', 'Jaako Jarvi', 5, 2, 10.5, 1.5),
('lunch, short presentations, vendor presentations', '', None, 3, 12.0, 2.0),
('Reflective Metaprogramming', 'Daveed Vandevoorde', 1, 3, 14.0, 1.5),
('Advanced Template Issues and Solutions (double session)', 'Herb Sutter',2, 3, 14.0, 3),
('Concurrent Programming in Java (double session)', 'Angelika Langer', 3, 3, 14.0, 3),
('What can MISRA-C (2nd Edition) do for us?', 'Chris Hills', 4, 3, 14.0, 1.5),
('C++ Metaprogramming Concepts and Results', 'Walter E Brown', 5, 3, 14.0, 1.5),
('Binding C++ to Python with the Boost Python Library', 'David Abrahams', 1, 3, 16.0, 1.5),
('Using Aspect Oriented Programming for Enterprise Application Integration', 'Arno Schmidmeier', 4, 3, 16.0, 1.5),
('Defective C++', 'Marc Paterno', 5, 3, 16.0, 1.5),
("Speakers' Banquet & Birds of a Feather meeting", '', None, 3, 17.5, 2.0),
('Keynote: The Internet, Software and Computers - A Report Card', 'Alan Lenton', None, 4, 9.0, 1.0),
('Multi-Platform Software Development; Lessons from the Boost libraries', 'Beman Dawes', 1, 5, 10.5, 1.5),
('The Stability of the C++ ABI', 'Steve Clamage', 2, 5, 10.5, 1.5),
('Generic Build Support - A Pragmatic Approach to the Software Build Process', 'Randy Marques', 3, 5, 10.5, 1.5),
('How to Handle Project Managers: a survival guide', 'Barb Byro', 4, 5, 10.5, 1.5),
('lunch, ACCU AGM', '', None, 5, 12.0, 2.0),
('Sauce: An OO recursive descent parser; its design and implementation.', 'Jon Jagger', 1, 5, 14.0, 1.5),
('GNIRTS ESAC REWOL - Bringing the UNIX filters to the C++ iostream library.', 'JC van Winkel', 2, 5, 14.0, 1.5),
('Pattern Writing: Live and Direct', 'Frank Buschmann & Kevlin Henney', 3, 5, 14.0, 3.0),
('The Future of Programming Languages - A Goldfish Bowl', 'Francis Glassborow and friends', 3, 5, 14.0, 1.5),
('Honey, I Shrunk the Threads: Compile-time checked multithreaded transactions in C++', 'Andrei Alexandrescu', 1, 5, 16.0, 1.5),
('Fun and Functionality with Functors', 'Lois Goldthwaite', 2, 5, 16.0, 1.5),
('Agile Enough?', 'Alan Griffiths', 4, 5, 16.0, 1.5),
("Conference Closure: A brief plenary session", '', None, 5, 17.5, 0.5),
]
#return cal
cal.day = 1
d.add(cal)
for format in ['pdf']:#,'gif','png']:
out = d.asString(format)
open('eventcal.%s' % format, 'wb').write(out)
print('saved eventcal.%s' % format)
if __name__=='__main__':
test()
| gpl-3.0 |
hubert667/AIR | build/kombu/kombu/entity.py | 4 | 26357 | """
kombu.entity
================
Exchange and Queue declarations.
"""
from __future__ import absolute_import
from .abstract import MaybeChannelBound
from .exceptions import ContentDisallowed
from .serialization import prepare_accept_content
TRANSIENT_DELIVERY_MODE = 1
PERSISTENT_DELIVERY_MODE = 2
DELIVERY_MODES = {'transient': TRANSIENT_DELIVERY_MODE,
'persistent': PERSISTENT_DELIVERY_MODE}
__all__ = ['Exchange', 'Queue', 'binding']
def pretty_bindings(bindings):
return '[%s]' % (', '.join(map(str, bindings)))
class Exchange(MaybeChannelBound):
"""An Exchange declaration.
:keyword name: See :attr:`name`.
:keyword type: See :attr:`type`.
:keyword channel: See :attr:`channel`.
:keyword durable: See :attr:`durable`.
:keyword auto_delete: See :attr:`auto_delete`.
:keyword delivery_mode: See :attr:`delivery_mode`.
:keyword arguments: See :attr:`arguments`.
.. attribute:: name
Name of the exchange. Default is no name (the default exchange).
.. attribute:: type
*This description of AMQP exchange types was shamelessly stolen
from the blog post `AMQP in 10 minutes: Part 4`_ by
Rajith Attapattu. Reading this article is recommended if you're
new to amqp.*
"AMQP defines four default exchange types (routing algorithms) that
covers most of the common messaging use cases. An AMQP broker can
also define additional exchange types, so see your broker
manual for more information about available exchange types.
* `direct` (*default*)
Direct match between the routing key in the message, and the
routing criteria used when a queue is bound to this exchange.
* `topic`
Wildcard match between the routing key and the routing pattern
specified in the exchange/queue binding. The routing key is
treated as zero or more words delimited by `"."` and
supports special wildcard characters. `"*"` matches a
single word and `"#"` matches zero or more words.
* `fanout`
Queues are bound to this exchange with no arguments. Hence any
message sent to this exchange will be forwarded to all queues
bound to this exchange.
* `headers`
Queues are bound to this exchange with a table of arguments
containing headers and values (optional). A special argument
named "x-match" determines the matching algorithm, where
`"all"` implies an `AND` (all pairs must match) and
`"any"` implies `OR` (at least one pair must match).
:attr:`arguments` is used to specify the arguments.
.. _`AMQP in 10 minutes: Part 4`:
http://bit.ly/amqp-exchange-types
.. attribute:: channel
The channel the exchange is bound to (if bound).
.. attribute:: durable
Durable exchanges remain active when a server restarts. Non-durable
exchanges (transient exchanges) are purged when a server restarts.
Default is :const:`True`.
.. attribute:: auto_delete
If set, the exchange is deleted when all queues have finished
using it. Default is :const:`False`.
.. attribute:: delivery_mode
The default delivery mode used for messages. The value is an integer,
or alias string.
* 1 or `"transient"`
The message is transient. Which means it is stored in
memory only, and is lost if the server dies or restarts.
* 2 or "persistent" (*default*)
The message is persistent. Which means the message is
stored both in-memory, and on disk, and therefore
preserved if the server dies or restarts.
The default value is 2 (persistent).
.. attribute:: arguments
Additional arguments to specify when the exchange is declared.
"""
TRANSIENT_DELIVERY_MODE = TRANSIENT_DELIVERY_MODE
PERSISTENT_DELIVERY_MODE = PERSISTENT_DELIVERY_MODE
name = ''
type = 'direct'
durable = True
auto_delete = False
passive = False
delivery_mode = PERSISTENT_DELIVERY_MODE
attrs = (
('name', None),
('type', None),
('arguments', None),
('durable', bool),
('passive', bool),
('auto_delete', bool),
('delivery_mode', lambda m: DELIVERY_MODES.get(m) or m),
)
def __init__(self, name='', type='', channel=None, **kwargs):
super(Exchange, self).__init__(**kwargs)
self.name = name or self.name
self.type = type or self.type
self.maybe_bind(channel)
def __hash__(self):
return hash('E|%s' % (self.name, ))
def declare(self, nowait=False, passive=None):
"""Declare the exchange.
Creates the exchange on the broker.
:keyword nowait: If set the server will not respond, and a
response will not be waited for. Default is :const:`False`.
"""
passive = self.passive if passive is None else passive
if self.name:
return self.channel.exchange_declare(
exchange=self.name, type=self.type, durable=self.durable,
auto_delete=self.auto_delete, arguments=self.arguments,
nowait=nowait, passive=passive,
)
def bind_to(self, exchange='', routing_key='',
arguments=None, nowait=False, **kwargs):
"""Binds the exchange to another exchange.
:keyword nowait: If set the server will not respond, and the call
will not block waiting for a response. Default is :const:`False`.
"""
if isinstance(exchange, Exchange):
exchange = exchange.name
return self.channel.exchange_bind(destination=self.name,
source=exchange,
routing_key=routing_key,
nowait=nowait,
arguments=arguments)
def unbind_from(self, source='', routing_key='',
nowait=False, arguments=None):
"""Delete previously created exchange binding from the server."""
if isinstance(source, Exchange):
source = source.name
return self.channel.exchange_unbind(destination=self.name,
source=source,
routing_key=routing_key,
nowait=nowait,
arguments=arguments)
def Message(self, body, delivery_mode=None, priority=None,
content_type=None, content_encoding=None,
properties=None, headers=None):
"""Create message instance to be sent with :meth:`publish`.
:param body: Message body.
:keyword delivery_mode: Set custom delivery mode. Defaults
to :attr:`delivery_mode`.
:keyword priority: Message priority, 0 to 9. (currently not
supported by RabbitMQ).
:keyword content_type: The messages content_type. If content_type
is set, no serialization occurs as it is assumed this is either
a binary object, or you've done your own serialization.
Leave blank if using built-in serialization as our library
properly sets content_type.
:keyword content_encoding: The character set in which this object
is encoded. Use "binary" if sending in raw binary objects.
Leave blank if using built-in serialization as our library
properly sets content_encoding.
:keyword properties: Message properties.
:keyword headers: Message headers.
"""
properties = {} if properties is None else properties
dm = delivery_mode or self.delivery_mode
properties['delivery_mode'] = \
DELIVERY_MODES[dm] if (dm != 2 and dm != 1) else dm
return self.channel.prepare_message(body,
properties=properties,
priority=priority,
content_type=content_type,
content_encoding=content_encoding,
headers=headers)
def publish(self, message, routing_key=None, mandatory=False,
immediate=False, exchange=None):
"""Publish message.
:param message: :meth:`Message` instance to publish.
:param routing_key: Routing key.
:param mandatory: Currently not supported.
:param immediate: Currently not supported.
"""
exchange = exchange or self.name
return self.channel.basic_publish(message,
exchange=exchange,
routing_key=routing_key,
mandatory=mandatory,
immediate=immediate)
def delete(self, if_unused=False, nowait=False):
"""Delete the exchange declaration on server.
:keyword if_unused: Delete only if the exchange has no bindings.
Default is :const:`False`.
:keyword nowait: If set the server will not respond, and a
response will not be waited for. Default is :const:`False`.
"""
return self.channel.exchange_delete(exchange=self.name,
if_unused=if_unused,
nowait=nowait)
def binding(self, routing_key='', arguments=None, unbind_arguments=None):
return binding(self, routing_key, arguments, unbind_arguments)
def __eq__(self, other):
if isinstance(other, Exchange):
return (self.name == other.name and
self.type == other.type and
self.arguments == other.arguments and
self.durable == other.durable and
self.auto_delete == other.auto_delete and
self.delivery_mode == other.delivery_mode)
return NotImplemented
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return super(Exchange, self).__repr__(str(self))
def __str__(self):
return 'Exchange %s(%s)' % (self.name or repr(''), self.type)
@property
def can_cache_declaration(self):
return self.durable and not self.auto_delete
class binding(object):
"""Represents a queue or exchange binding.
:keyword exchange: Exchange to bind to.
:keyword routing_key: Routing key used as binding key.
:keyword arguments: Arguments for bind operation.
:keyword unbind_arguments: Arguments for unbind operation.
"""
def __init__(self, exchange=None, routing_key='',
arguments=None, unbind_arguments=None):
self.exchange = exchange
self.routing_key = routing_key
self.arguments = arguments
self.unbind_arguments = unbind_arguments
def declare(self, channel, nowait=False):
"""Declare destination exchange."""
if self.exchange and self.exchange.name:
ex = self.exchange(channel)
ex.declare(nowait=nowait)
def bind(self, entity, nowait=False):
"""Bind entity to this binding."""
entity.bind_to(exchange=self.exchange,
routing_key=self.routing_key,
arguments=self.arguments,
nowait=nowait)
def unbind(self, entity, nowait=False):
"""Unbind entity from this binding."""
entity.unbind_from(self.exchange,
routing_key=self.routing_key,
arguments=self.unbind_arguments,
nowait=nowait)
def __repr__(self):
return '<binding: %s>' % (self, )
def __str__(self):
return '%s->%s' % (self.exchange.name, self.routing_key)
class Queue(MaybeChannelBound):
"""A Queue declaration.
:keyword name: See :attr:`name`.
:keyword exchange: See :attr:`exchange`.
:keyword routing_key: See :attr:`routing_key`.
:keyword channel: See :attr:`channel`.
:keyword durable: See :attr:`durable`.
:keyword exclusive: See :attr:`exclusive`.
:keyword auto_delete: See :attr:`auto_delete`.
:keyword queue_arguments: See :attr:`queue_arguments`.
:keyword binding_arguments: See :attr:`binding_arguments`.
:keyword on_declared: See :attr:`on_declared`
.. attribute:: name
Name of the queue. Default is no name (default queue destination).
.. attribute:: exchange
The :class:`Exchange` the queue binds to.
.. attribute:: routing_key
The routing key (if any), also called *binding key*.
The interpretation of the routing key depends on
the :attr:`Exchange.type`.
* direct exchange
Matches if the routing key property of the message and
the :attr:`routing_key` attribute are identical.
* fanout exchange
Always matches, even if the binding does not have a key.
* topic exchange
Matches the routing key property of the message by a primitive
pattern matching scheme. The message routing key then consists
of words separated by dots (`"."`, like domain names), and
two special characters are available; star (`"*"`) and hash
(`"#"`). The star matches any word, and the hash matches
zero or more words. For example `"*.stock.#"` matches the
routing keys `"usd.stock"` and `"eur.stock.db"` but not
`"stock.nasdaq"`.
.. attribute:: channel
The channel the Queue is bound to (if bound).
.. attribute:: durable
Durable queues remain active when a server restarts.
Non-durable queues (transient queues) are purged if/when
a server restarts.
Note that durable queues do not necessarily hold persistent
messages, although it does not make sense to send
persistent messages to a transient queue.
Default is :const:`True`.
.. attribute:: exclusive
Exclusive queues may only be consumed from by the
current connection. Setting the 'exclusive' flag
always implies 'auto-delete'.
Default is :const:`False`.
.. attribute:: auto_delete
If set, the queue is deleted when all consumers have
finished using it. Last consumer can be cancelled
either explicitly or because its channel is closed. If
there was no consumer ever on the queue, it won't be
deleted.
.. attribute:: queue_arguments
Additional arguments used when declaring the queue.
.. attribute:: binding_arguments
Additional arguments used when binding the queue.
.. attribute:: alias
Unused in Kombu, but applications can take advantage of this.
For example to give alternate names to queues with automatically
generated queue names.
.. attribute:: on_declared
Optional callback to be applied when the queue has been
declared (the ``queue_declare`` operation is complete).
This must be a function with a signature that accepts at least 3
positional arguments: ``(name, messages, consumers)``.
"""
ContentDisallowed = ContentDisallowed
name = ''
exchange = Exchange('')
routing_key = ''
durable = True
exclusive = False
auto_delete = False
no_ack = False
attrs = (
('name', None),
('exchange', None),
('routing_key', None),
('queue_arguments', None),
('binding_arguments', None),
('durable', bool),
('exclusive', bool),
('auto_delete', bool),
('no_ack', None),
('alias', None),
('bindings', list),
)
def __init__(self, name='', exchange=None, routing_key='',
channel=None, bindings=None, on_declared=None,
**kwargs):
super(Queue, self).__init__(**kwargs)
self.name = name or self.name
self.exchange = exchange or self.exchange
self.routing_key = routing_key or self.routing_key
self.bindings = set(bindings or [])
self.on_declared = on_declared
# allows Queue('name', [binding(...), binding(...), ...])
if isinstance(exchange, (list, tuple, set)):
self.bindings |= set(exchange)
if self.bindings:
self.exchange = None
# exclusive implies auto-delete.
if self.exclusive:
self.auto_delete = True
self.maybe_bind(channel)
def bind(self, channel):
on_declared = self.on_declared
bound = super(Queue, self).bind(channel)
bound.on_declared = on_declared
return bound
def __hash__(self):
return hash('Q|%s' % (self.name, ))
def when_bound(self):
if self.exchange:
self.exchange = self.exchange(self.channel)
def declare(self, nowait=False):
"""Declares the queue, the exchange and binds the queue to
the exchange."""
# - declare main binding.
if self.exchange:
self.exchange.declare(nowait)
self.queue_declare(nowait, passive=False)
if self.exchange and self.exchange.name:
self.queue_bind(nowait)
# - declare extra/multi-bindings.
for B in self.bindings:
B.declare(self.channel)
B.bind(self, nowait=nowait)
return self.name
def queue_declare(self, nowait=False, passive=False):
"""Declare queue on the server.
:keyword nowait: Do not wait for a reply.
:keyword passive: If set, the server will not create the queue.
The client can use this to check whether a queue exists
without modifying the server state.
"""
ret = self.channel.queue_declare(queue=self.name,
passive=passive,
durable=self.durable,
exclusive=self.exclusive,
auto_delete=self.auto_delete,
arguments=self.queue_arguments,
nowait=nowait)
if not self.name:
self.name = ret[0]
if self.on_declared:
self.on_declared(*ret)
return ret
def queue_bind(self, nowait=False):
"""Create the queue binding on the server."""
return self.bind_to(self.exchange, self.routing_key,
self.binding_arguments, nowait=nowait)
def bind_to(self, exchange='', routing_key='',
arguments=None, nowait=False):
if isinstance(exchange, Exchange):
exchange = exchange.name
return self.channel.queue_bind(queue=self.name,
exchange=exchange,
routing_key=routing_key,
arguments=arguments,
nowait=nowait)
def get(self, no_ack=None, accept=None):
"""Poll the server for a new message.
Must return the message if a message was available,
or :const:`None` otherwise.
:keyword no_ack: If enabled the broker will automatically
ack messages.
:keyword accept: Custom list of accepted content types.
This method provides direct access to the messages in a
queue using a synchronous dialogue, designed for
specific types of applications where synchronous functionality
is more important than performance.
"""
no_ack = self.no_ack if no_ack is None else no_ack
message = self.channel.basic_get(queue=self.name, no_ack=no_ack)
if message is not None:
m2p = getattr(self.channel, 'message_to_python', None)
if m2p:
message = m2p(message)
if message.errors:
message._reraise_error()
message.accept = prepare_accept_content(accept)
return message
def purge(self, nowait=False):
"""Remove all ready messages from the queue."""
return self.channel.queue_purge(queue=self.name,
nowait=nowait) or 0
def consume(self, consumer_tag='', callback=None,
no_ack=None, nowait=False):
"""Start a queue consumer.
Consumers last as long as the channel they were created on, or
until the client cancels them.
:keyword consumer_tag: Unique identifier for the consumer. The
consumer tag is local to a connection, so two clients
can use the same consumer tags. If this field is empty
the server will generate a unique tag.
:keyword no_ack: If enabled the broker will automatically ack
messages.
:keyword nowait: Do not wait for a reply.
:keyword callback: callback called for each delivered message
"""
if no_ack is None:
no_ack = self.no_ack
return self.channel.basic_consume(queue=self.name,
no_ack=no_ack,
consumer_tag=consumer_tag or '',
callback=callback,
nowait=nowait)
def cancel(self, consumer_tag):
"""Cancel a consumer by consumer tag."""
return self.channel.basic_cancel(consumer_tag)
def delete(self, if_unused=False, if_empty=False, nowait=False):
"""Delete the queue.
:keyword if_unused: If set, the server will only delete the queue
if it has no consumers. A channel error will be raised
if the queue has consumers.
:keyword if_empty: If set, the server will only delete the queue
if it is empty. If it is not empty a channel error will be raised.
:keyword nowait: Do not wait for a reply.
"""
return self.channel.queue_delete(queue=self.name,
if_unused=if_unused,
if_empty=if_empty,
nowait=nowait)
def queue_unbind(self, arguments=None, nowait=False):
return self.unbind_from(self.exchange, self.routing_key,
arguments, nowait)
def unbind_from(self, exchange='', routing_key='',
arguments=None, nowait=False):
"""Unbind queue by deleting the binding from the server."""
return self.channel.queue_unbind(queue=self.name,
exchange=exchange.name,
routing_key=routing_key,
arguments=arguments,
nowait=nowait)
def __eq__(self, other):
if isinstance(other, Queue):
return (self.name == other.name and
self.exchange == other.exchange and
self.routing_key == other.routing_key and
self.queue_arguments == other.queue_arguments and
self.binding_arguments == other.binding_arguments and
self.durable == other.durable and
self.exclusive == other.exclusive and
self.auto_delete == other.auto_delete)
return NotImplemented
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
s = super(Queue, self).__repr__
if self.bindings:
return s('Queue {0.name} -> {bindings}'.format(
self, bindings=pretty_bindings(self.bindings),
))
return s(
'Queue {0.name} -> {0.exchange!r} -> {0.routing_key}'.format(
self))
@property
def can_cache_declaration(self):
return self.durable and not self.auto_delete
@classmethod
def from_dict(self, queue, **options):
binding_key = options.get('binding_key') or options.get('routing_key')
e_durable = options.get('exchange_durable')
if e_durable is None:
e_durable = options.get('durable')
e_auto_delete = options.get('exchange_auto_delete')
if e_auto_delete is None:
e_auto_delete = options.get('auto_delete')
q_durable = options.get('queue_durable')
if q_durable is None:
q_durable = options.get('durable')
q_auto_delete = options.get('queue_auto_delete')
if q_auto_delete is None:
q_auto_delete = options.get('auto_delete')
e_arguments = options.get('exchange_arguments')
q_arguments = options.get('queue_arguments')
b_arguments = options.get('binding_arguments')
bindings = options.get('bindings')
exchange = Exchange(options.get('exchange'),
type=options.get('exchange_type'),
delivery_mode=options.get('delivery_mode'),
routing_key=options.get('routing_key'),
durable=e_durable,
auto_delete=e_auto_delete,
arguments=e_arguments)
return Queue(queue,
exchange=exchange,
routing_key=binding_key,
durable=q_durable,
exclusive=options.get('exclusive'),
auto_delete=q_auto_delete,
no_ack=options.get('no_ack'),
queue_arguments=q_arguments,
binding_arguments=b_arguments,
bindings=bindings)
| gpl-3.0 |
antsmc2/mics | survey/tests/forms/test_location_details_form.py | 2 | 1673 | from django.test import TestCase
from survey.forms.location_details import LocationDetailsForm
class LocationDetailsFormTest(TestCase):
def test_should_know_the_fields(self):
details_form = LocationDetailsForm()
fields = ['required', 'has_code', 'length_of_code', 'levels']
[self.assertIn(field, details_form.fields) for field in fields]
def test_should_be_valid_if_all_fields_given(self):
data = {
'levels': 'Region',
'required': True,
'has_code': True,
'length_of_code': 3
}
details_form = LocationDetailsForm(data=data)
self.assertTrue(details_form.is_valid())
def test_should_be_invalid_if_levels_is_blank(self):
data = {
'levels': '',
'required': True,
'has_code': True,
'length_of_code': 3
}
details_form = LocationDetailsForm(data=data)
self.assertFalse(details_form.is_valid())
def test_should_not_be_valid_if_has_code_is_true_and_code_length_is_blank(self):
data = {
'levels': '',
'required': True,
'has_code': True,
'length_of_code': ''
}
details_form = LocationDetailsForm(data=data)
self.assertFalse(details_form.is_valid())
def test_should_not_be_valid_if_has_code_is_true_and_code_length_is_bigger_than_ten(self):
data = {
'levels': '',
'required': True,
'has_code': True,
'length_of_code': 200
}
details_form = LocationDetailsForm(data=data)
self.assertFalse(details_form.is_valid()) | bsd-3-clause |
drvinceknight/Axelrod | docs/strategies.py | 1 | 1047 | """
A script to generate the file needed for the strategy documentation.
Run:
python strategies.py > strategies.rst
"""
import os
import sys
sys.path.insert(0, os.path.abspath("../"))
from axelrod import basic_strategies
from axelrod import ordinary_strategies
from axelrod import cheating_strategies
def print_header(string, character):
print string
print character * len(string)
print ""
if __name__ == "__main__":
print ".. currentmodule:: axelrod.strategies"
print_header("Here is a list of strategies", '=')
print_header("Here are some of the basic strategies", '-')
for strategy in basic_strategies:
print ".. autoclass:: %s" % strategy.__name__
print ""
print_header("A list of all further (honest) strategies", '-')
for strategy in ordinary_strategies:
print ".. autoclass:: %s" % strategy.__name__
print ""
print_header("A list of the cheating strategies", '-')
for strategy in cheating_strategies:
print ".. autoclass:: %s" % strategy.__name__
| mit |
demon012/RedwoodMinecraftPack | Configs/idchecker.py | 1 | 1404 | #!/usr/bin/env python
import re
import os
import pdb
configs_dir = 'config'
id_files = {}
idre = re.compile(r'(?P<id>\d+)', re.I|re.DOTALL)
# item_line_re = re.compile(r'I:(?P<itemname>.+?)=(?P<itemid>\d+)')
# add hack for advancedmachine block for config\AdvancedMachines.cfg
# add hack to search for config\EnderStorage.cfg (block.id=251)
if os.path.exists(configs_dir):
for dirpath, dirnames, filenames in os.walk(configs_dir):
for filename in filenames:
file_path = os.path.join(dirpath,filename)
with open(file_path, 'r') as conf_file:
file_contents = conf_file.read()
match = idre.search(file_contents)
if match:
# print(file_path)
id_files[file_path] = {}
for match in idre.finditer(match.group('id')):
id_files[file_path][match.group('id')] = match.group('id')
else:
pass
# pdb.set_trace()
# check for conflicts
ids = {}
for mid in range(1,31999):
mid = str(mid)
ids[mid] = []
for id_file in id_files.keys():
if mid in id_files[id_file].keys():
ids[mid].append([id_file,id_files[id_file][mid]])
if len(ids[mid]) > 1:
filelist = ""
for file_name in ids[mid]:
filelist += "%s, " % file_name
print("id %s conflict between %s" % (mid, filelist))
| unlicense |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.