repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
tedi3231/openerp | openerp/addons/stock/wizard/stock_fill_inventory.py | 12 | 6502 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class stock_fill_inventory(osv.osv_memory):
_name = "stock.fill.inventory"
_description = "Import Inventory"
def _default_location(self, cr, uid, ids, context=None):
try:
loc_model, location_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'stock', 'stock_location_stock')
except ValueError, e:
return False
return location_id or False
_columns = {
'location_id': fields.many2one('stock.location', 'Location', required=True),
'recursive': fields.boolean("Include children",help="If checked, products contained in child locations of selected location will be included as well."),
'set_stock_zero': fields.boolean("Set to zero",help="If checked, all product quantities will be set to zero to help ensure a real physical inventory is done"),
}
_defaults = {
'location_id': _default_location,
}
def view_init(self, cr, uid, fields_list, context=None):
"""
Creates view dynamically and adding fields at runtime.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return: New arch of view with new columns.
"""
if context is None:
context = {}
super(stock_fill_inventory, self).view_init(cr, uid, fields_list, context=context)
if len(context.get('active_ids',[])) > 1:
raise osv.except_osv(_('Error!'), _('You cannot perform this operation on more than one Stock Inventories.'))
if context.get('active_id', False):
stock = self.pool.get('stock.inventory').browse(cr, uid, context.get('active_id', False))
return True
def fill_inventory(self, cr, uid, ids, context=None):
""" To Import stock inventory according to products available in the selected locations.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: the ID or list of IDs if we want more than one
@param context: A standard dictionary
@return:
"""
if context is None:
context = {}
inventory_line_obj = self.pool.get('stock.inventory.line')
location_obj = self.pool.get('stock.location')
move_obj = self.pool.get('stock.move')
uom_obj = self.pool.get('product.uom')
if ids and len(ids):
ids = ids[0]
else:
return {'type': 'ir.actions.act_window_close'}
fill_inventory = self.browse(cr, uid, ids, context=context)
res = {}
res_location = {}
if fill_inventory.recursive:
location_ids = location_obj.search(cr, uid, [('location_id',
'child_of', [fill_inventory.location_id.id])], order="id",
context=context)
else:
location_ids = [fill_inventory.location_id.id]
res = {}
flag = False
for location in location_ids:
datas = {}
res[location] = {}
move_ids = move_obj.search(cr, uid, ['|',('location_dest_id','=',location),('location_id','=',location),('state','=','done')], context=context)
for move in move_obj.browse(cr, uid, move_ids, context=context):
lot_id = move.prodlot_id.id
prod_id = move.product_id.id
if move.location_dest_id.id != move.location_id.id:
if move.location_dest_id.id == location:
qty = uom_obj._compute_qty(cr, uid, move.product_uom.id,move.product_qty, move.product_id.uom_id.id)
else:
qty = -uom_obj._compute_qty(cr, uid, move.product_uom.id,move.product_qty, move.product_id.uom_id.id)
if datas.get((prod_id, lot_id)):
qty += datas[(prod_id, lot_id)]['product_qty']
datas[(prod_id, lot_id)] = {'product_id': prod_id, 'location_id': location, 'product_qty': qty, 'product_uom': move.product_id.uom_id.id, 'prod_lot_id': lot_id}
if datas:
flag = True
res[location] = datas
if not flag:
raise osv.except_osv(_('Warning!'), _('No product in this location. Please select a location in the product form.'))
for stock_move in res.values():
for stock_move_details in stock_move.values():
stock_move_details.update({'inventory_id': context['active_ids'][0]})
domain = []
for field, value in stock_move_details.items():
if field == 'product_qty' and fill_inventory.set_stock_zero:
domain.append((field, 'in', [value,'0']))
continue
domain.append((field, '=', value))
if fill_inventory.set_stock_zero:
stock_move_details.update({'product_qty': 0})
line_ids = inventory_line_obj.search(cr, uid, domain, context=context)
if not line_ids:
inventory_line_obj.create(cr, uid, stock_move_details, context=context)
return {'type': 'ir.actions.act_window_close'}
stock_fill_inventory()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
aio-libs/aiomysql | examples/example_ssl.py | 1 | 1347 | import asyncio
import ssl
import aiomysql
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.check_hostname = False
ctx.load_verify_locations(cafile='../tests/ssl_resources/ssl/ca.pem')
async def main():
async with aiomysql.create_pool(
host='localhost', port=3306, user='root',
password='rootpw', ssl=ctx,
auth_plugin='mysql_clear_password') as pool:
async with pool.get() as conn:
async with conn.cursor() as cur:
# Run simple command
await cur.execute("SHOW DATABASES;")
value = await cur.fetchall()
values = [item[0] for item in value]
# Spot check the answers, we should at least have mysql
# and information_schema
assert 'mysql' in values, \
'Could not find the "mysql" table'
assert 'information_schema' in values, \
'Could not find the "mysql" table'
# Check TLS variables
await cur.execute("SHOW STATUS LIKE 'Ssl_version%';")
value = await cur.fetchone()
# The context has TLS
assert value[1].startswith('TLS'), \
'Not connected to the database with TLS'
asyncio.get_event_loop().run_until_complete(main())
| mit |
gautam1858/tensorflow | tensorflow/contrib/training/python/training/sampling_ops_threading_test.py | 25 | 2886 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=unused-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.training.python.training import sampling_ops
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import coordinator
from tensorflow.python.training import queue_runner_impl
class SamplingOpsThreadingTest(test.TestCase):
def testMultiThreadedEstimateDataDistribution(self):
num_classes = 10
# Set up graph.
random_seed.set_random_seed(1234)
label = math_ops.cast(
math_ops.round(random_ops.random_uniform([1]) * num_classes),
dtypes_lib.int32)
prob_estimate = sampling_ops._estimate_data_distribution( # pylint: disable=protected-access
label, num_classes)
# Check that prob_estimate is well-behaved in a multithreaded context.
_, _, [prob_estimate] = sampling_ops._verify_input( # pylint: disable=protected-access
[], label, [prob_estimate])
# Use queues to run multiple threads over the graph, each of which
# fetches `prob_estimate`.
queue = data_flow_ops.FIFOQueue(
capacity=25,
dtypes=[prob_estimate.dtype],
shapes=[prob_estimate.get_shape()])
enqueue_op = queue.enqueue([prob_estimate])
queue_runner_impl.add_queue_runner(
queue_runner_impl.QueueRunner(queue, [enqueue_op] * 25))
out_tensor = queue.dequeue()
# Run the multi-threaded session.
with self.cached_session() as sess:
# Need to initialize variables that keep running total of classes seen.
variables.global_variables_initializer().run()
coord = coordinator.Coordinator()
threads = queue_runner_impl.start_queue_runners(coord=coord)
for _ in range(25):
sess.run([out_tensor])
coord.request_stop()
coord.join(threads)
if __name__ == '__main__':
test.main()
| apache-2.0 |
fxia22/ASM_xf | PythonD/lib/python2.4/test/test_marshal.py | 13 | 7142 | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
from test import test_support
import marshal
import sys
import unittest
import os
class IntTestCase(unittest.TestCase):
def test_ints(self):
# Test the full range of Python ints.
n = sys.maxint
while n:
for expected in (-n, n):
s = marshal.dumps(expected)
got = marshal.loads(s)
self.assertEqual(expected, got)
marshal.dump(expected, file(test_support.TESTFN, "wb"))
got = marshal.load(file(test_support.TESTFN, "rb"))
self.assertEqual(expected, got)
n = n >> 1
os.unlink(test_support.TESTFN)
def test_int64(self):
# Simulate int marshaling on a 64-bit box. This is most interesting if
# we're running the test on a 32-bit box, of course.
def to_little_endian_string(value, nbytes):
bytes = []
for i in range(nbytes):
bytes.append(chr(value & 0xff))
value >>= 8
return ''.join(bytes)
maxint64 = (1L << 63) - 1
minint64 = -maxint64-1
for base in maxint64, minint64, -maxint64, -(minint64 >> 1):
while base:
s = 'I' + to_little_endian_string(base, 8)
got = marshal.loads(s)
self.assertEqual(base, got)
if base == -1: # a fixed-point for shifting right 1
base = 0
else:
base >>= 1
def test_bool(self):
for b in (True, False):
new = marshal.loads(marshal.dumps(b))
self.assertEqual(b, new)
self.assertEqual(type(b), type(new))
marshal.dump(b, file(test_support.TESTFN, "wb"))
new = marshal.load(file(test_support.TESTFN, "rb"))
self.assertEqual(b, new)
self.assertEqual(type(b), type(new))
class FloatTestCase(unittest.TestCase):
def test_floats(self):
# Test a few floats
small = 1e-25
n = sys.maxint * 3.7e250
while n > small:
for expected in (-n, n):
f = float(expected)
s = marshal.dumps(f)
got = marshal.loads(s)
self.assertEqual(f, got)
marshal.dump(f, file(test_support.TESTFN, "wb"))
got = marshal.load(file(test_support.TESTFN, "rb"))
self.assertEqual(f, got)
n /= 123.4567
f = 0.0
s = marshal.dumps(f)
got = marshal.loads(s)
self.assertEqual(f, got)
n = sys.maxint * 3.7e-250
while n < small:
for expected in (-n, n):
f = float(expected)
s = marshal.dumps(f)
got = marshal.loads(s)
self.assertEqual(f, got)
marshal.dump(f, file(test_support.TESTFN, "wb"))
got = marshal.load(file(test_support.TESTFN, "rb"))
self.assertEqual(f, got)
n *= 123.4567
os.unlink(test_support.TESTFN)
class StringTestCase(unittest.TestCase):
def test_unicode(self):
for s in [u"", u"Andrè Previn", u"abc", u" "*10000]:
new = marshal.loads(marshal.dumps(s))
self.assertEqual(s, new)
self.assertEqual(type(s), type(new))
marshal.dump(s, file(test_support.TESTFN, "wb"))
marshal.load(file(test_support.TESTFN, "rb"))
self.assertEqual(s, new)
self.assertEqual(type(s), type(new))
os.unlink(test_support.TESTFN)
def test_string(self):
for s in ["", "Andrè Previn", "abc", " "*10000]:
new = marshal.loads(marshal.dumps(s))
self.assertEqual(s, new)
self.assertEqual(type(s), type(new))
marshal.dump(s, file(test_support.TESTFN, "wb"))
marshal.load(file(test_support.TESTFN, "rb"))
self.assertEqual(s, new)
self.assertEqual(type(s), type(new))
os.unlink(test_support.TESTFN)
def test_buffer(self):
for s in ["", "Andrè Previn", "abc", " "*10000]:
b = buffer(s)
new = marshal.loads(marshal.dumps(b))
self.assertEqual(s, new)
marshal.dump(b, file(test_support.TESTFN, "wb"))
marshal.load(file(test_support.TESTFN, "rb"))
self.assertEqual(s, new)
os.unlink(test_support.TESTFN)
class ExceptionTestCase(unittest.TestCase):
def test_exceptions(self):
new = marshal.loads(marshal.dumps(StopIteration))
self.assertEqual(StopIteration, new)
class CodeTestCase(unittest.TestCase):
def test_code(self):
co = ExceptionTestCase.test_exceptions.func_code
new = marshal.loads(marshal.dumps(co))
self.assertEqual(co, new)
class ContainerTestCase(unittest.TestCase):
d = {'astring': 'foo@bar.baz.spam',
'afloat': 7283.43,
'anint': 2**20,
'ashortlong': 2L,
'alist': ['.zyx.41'],
'atuple': ('.zyx.41',)*10,
'aboolean': False,
'aunicode': u"Andrè Previn"
}
def test_dict(self):
new = marshal.loads(marshal.dumps(self.d))
self.assertEqual(self.d, new)
marshal.dump(self.d, file(test_support.TESTFN, "wb"))
marshal.load(file(test_support.TESTFN, "rb"))
self.assertEqual(self.d, new)
os.unlink(test_support.TESTFN)
def test_list(self):
lst = self.d.items()
new = marshal.loads(marshal.dumps(lst))
self.assertEqual(lst, new)
marshal.dump(lst, file(test_support.TESTFN, "wb"))
marshal.load(file(test_support.TESTFN, "rb"))
self.assertEqual(lst, new)
os.unlink(test_support.TESTFN)
def test_tuple(self):
t = tuple(self.d.keys())
new = marshal.loads(marshal.dumps(t))
self.assertEqual(t, new)
marshal.dump(t, file(test_support.TESTFN, "wb"))
marshal.load(file(test_support.TESTFN, "rb"))
self.assertEqual(t, new)
os.unlink(test_support.TESTFN)
class BugsTestCase(unittest.TestCase):
def test_bug_5888452(self):
# Simple-minded check for SF 588452: Debug build crashes
marshal.dumps([128] * 1000)
def test_patch_873224(self):
self.assertRaises(Exception, marshal.loads, '0')
self.assertRaises(Exception, marshal.loads, 'f')
self.assertRaises(Exception, marshal.loads, marshal.dumps(5L)[:-1])
def test_version_argument(self):
# Python 2.4.0 crashes for any call to marshal.dumps(x, y)
self.assertEquals(marshal.loads(marshal.dumps(5, 0)), 5)
self.assertEquals(marshal.loads(marshal.dumps(5, 1)), 5)
def test_main():
test_support.run_unittest(IntTestCase,
FloatTestCase,
StringTestCase,
CodeTestCase,
ContainerTestCase,
ExceptionTestCase,
BugsTestCase)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
martinhoaragao/hour-of-code | node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py | 1407 | 47697 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace('%', '%%')
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def EncodeRspFileList(args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args: return ''
if args[0].startswith('call '):
call, program = args[0].split(' ', 1)
program = call + ' ' + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
return _FindDirectXInstallation.dxsdk_dir
dxsdk_dir = os.environ.get('DXSDK_DIR')
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in p.communicate()[0].splitlines():
if 'InstallPath' in line:
dxsdk_dir = line.split(' ')[3] + "\\"
# Cache return value
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
return dxsdk_dir
def GetGlobalVSMacroEnv(vs_version):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents. Returns all variables that are independent of the target."""
env = {}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
if vs_version.Path():
env['$(VSInstallDir)'] = vs_version.Path()
env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
dxsdk_dir = _FindDirectXInstallation()
env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
return env
def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
"""Finds msvs_system_include_dirs that are common to all targets, removes
them from all targets, and returns an OrderedSet containing them."""
all_system_includes = OrderedSet(
configs[0].get('msvs_system_include_dirs', []))
for config in configs[1:]:
system_includes = config.get('msvs_system_include_dirs', [])
all_system_includes = all_system_includes & OrderedSet(system_includes)
if not all_system_includes:
return None
# Expand macros in all_system_includes.
env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
expanded_system_includes = OrderedSet([ExpandMacros(include, env)
for include in all_system_includes])
if any(['$' in include for include in expanded_system_includes]):
# Some path relies on target-specific variables, bail.
return None
# Remove system includes shared by all targets from the targets.
for config in configs:
includes = config.get('msvs_system_include_dirs', [])
if includes: # Don't insert a msvs_system_include_dirs key if not needed.
# This must check the unexpanded includes list:
new_includes = [i for i in includes if i not in all_system_includes]
config['msvs_system_include_dirs'] = new_includes
return expanded_system_includes
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
supported_fields = [
('msvs_configuration_attributes', dict),
('msvs_settings', dict),
('msvs_system_include_dirs', list),
('msvs_disabled_warnings', list),
('msvs_precompiled_header', str),
('msvs_precompiled_source', str),
('msvs_configuration_platform', str),
('msvs_target_platform', str),
]
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.iteritems():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
unsupported_fields = [
'msvs_prebuild',
'msvs_postbuild',
]
unsupported = []
for field in unsupported_fields:
for config in configs.values():
if field in config:
unsupported += ["%s not supported (target %s)." %
(field, spec['target_name'])]
if unsupported:
raise Exception('\n'.join(unsupported))
def GetExtension(self):
"""Returns the extension for the target, with no leading dot.
Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
the target type.
"""
ext = self.spec.get('product_extension', None)
if ext:
return ext
return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
target_name = self.spec.get('product_prefix', '') + \
self.spec.get('product_name', self.spec['target_name'])
target_dir = base_to_build + '\\' if base_to_build else ''
target_ext = '.' + self.GetExtension()
target_file_name = target_name + target_ext
replacements = {
'$(InputName)': '${root}',
'$(InputPath)': '${source}',
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(OutDir)\\': target_dir,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
'$(ProjectName)': self.spec['target_name'],
'$(TargetDir)\\': target_dir,
'$(TargetExt)': target_ext,
'$(TargetFileName)': target_file_name,
'$(TargetName)': target_name,
'$(TargetPath)': os.path.join(target_dir, target_file_name),
}
replacements.update(GetGlobalVSMacroEnv(self.vs_version))
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix='', default=None):
return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map)
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, '')
platform = self.msvs_target_platform.get(config, '')
if not platform: # If no specific override, use the configuration's.
platform = configuration_platform
# Map from platform to architecture.
return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
def _TargetConfig(self, config):
"""Returns the target-specific configuration."""
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release_x64'), and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
if arch == 'x64' and not config.endswith('_x64'):
config += '_x64'
if arch == 'x86' and config.endswith('_x64'):
config = config.rsplit('_', 1)[0]
return config
def _Setting(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings."""
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes."""
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path, default, prefix, append, map)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
"""Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE'))
if self._ConfigAttrib(['CharacterSet'], config) == '2':
defines.append('_MBCS')
defines.extend(self._Setting(
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
return defines
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(
('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
if pdbname:
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overriden map file name for a target or returns None
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
if map_file:
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
return map_file
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
generate_debug_info = self._Setting(
('VCLinkerTool', 'GenerateDebugInformation'), config)
if generate_debug_info == 'true':
if output_file:
return expand_special(self.ConvertVSMacros(output_file, config=config))
else:
return default
else:
return None
def GetNoImportLibrary(self, config):
"""If NoImportLibrary: true, ninja will not expect the output to include
an import library."""
config = self._TargetConfig(config)
noimplib = self._Setting(('NoImportLibrary',), config)
return noimplib == 'true'
def GetAsmflags(self, config):
"""Returns the flags that need to be added to ml invocations."""
config = self._TargetConfig(config)
asmflags = []
safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
if safeseh == 'true':
asmflags.append('/safeseh')
return asmflags
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('DisableSpecificWarnings', prefix='/wd')
cl('StringPooling', map={'true': '/GF'})
cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
cl('FloatingPointModel',
map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
default='0')
cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
cl('CallingConvention',
map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
cl('MinimalRebuild', map={'true': '/Gm'})
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('DefaultCharIsUnsigned', map={'true': '/J'})
cl('TreatWChar_tAsBuiltInType',
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
cl('EnableEnhancedInstructionSet',
map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e', '2015'):
# New flag required in 2013 to maintain previous PDB behavior.
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = os.path.split(self.msvs_precompiled_header[config])[1]
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._TargetConfig(config)
return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._TargetConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._TargetConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[])
libpaths = [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._TargetConfig(config)
libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags)
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
lib('AdditionalOptions')
return libflags
def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
return None
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(
('VCLinkerTool', 'ProfileGuidedDatabase'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, output_name, is_executable, build_dir):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
ld('TreatLinkerWarningAsErrors', prefix='/WX',
map={'true': '', 'false': ':NO'})
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
if pdb:
ldflags.append('/PDB:' + pdb)
pgd = self.GetPGDName(config, expand_special)
if pgd:
ldflags.append('/PGD:' + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
else '/MAP'})
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
minimum_required_version = self._Setting(
('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
if minimum_required_version:
minimum_required_version = ',' + minimum_required_version
ld('SubSystem',
map={'1': 'CONSOLE%s' % minimum_required_version,
'2': 'WINDOWS%s' % minimum_required_version},
prefix='/SUBSYSTEM:')
stack_reserve_size = self._Setting(
('VCLinkerTool', 'StackReserveSize'), config, default='')
if stack_reserve_size:
stack_commit_size = self._Setting(
('VCLinkerTool', 'StackCommitSize'), config, default='')
if stack_commit_size:
stack_commit_size = ',' + stack_commit_size
ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('BaseAddress', prefix='/BASE:')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('ForceSymbolReferences', prefix='/INCLUDE:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration',
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
'4': ':PGUPDATE'},
prefix='/LTCG')
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
if self.GetArch(config) == 'x86':
safeseh_default = 'true'
else:
safeseh_default = None
ld('ImageHasSafeExceptionHandlers',
map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
default=safeseh_default)
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
ldflags)
if not base_flags:
ldflags.append('/DYNAMICBASE')
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, intermediate_manifest, manifest_files = \
self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
is_executable and not have_def_file, build_dir)
ldflags.extend(manifest_flags)
return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
allow_isolation, build_dir):
"""Returns a 3-tuple:
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config,
default='true')
if generate_manifest != 'true':
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
return ['/MANIFEST:NO'], [], []
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
]
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
# include into the list of manifests. This allows us to avoid the need to
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
# still used, and the intermediate manifest is used to assert that the
# final manifest we get from merging all the additional manifest files
# (plus the one we generate here) isn't modified by merging the
# intermediate into it.
# Always NO, because we generate a manifest file that has what we want.
flags.append('/MANIFESTUAC:NO')
config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true')
manifest_files = []
generated_manifest_outer = \
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
"</assembly>"
if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0')
execution_level_map = {
'0': 'asInvoker',
'1': 'highestAvailable',
'2': 'requireAdministrator'
}
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false')
inner = '''
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='%s' uiAccess='%s' />
</requestedPrivileges>
</security>
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
else:
inner = ''
generated_manifest_contents = generated_manifest_outer % inner
generated_name = name + '.generated.manifest'
# Need to join with the build_dir here as we're writing it during
# generation time, but we return the un-joined version because the build
# will occur in that directory. We only write the file if the contents
# have changed so that simply regenerating the project files doesn't
# cause a relink.
build_dir_generated_name = os.path.join(build_dir, generated_name)
gyp.common.EnsureDirExists(build_dir_generated_name)
f = gyp.common.WriteOnDiff(build_dir_generated_name)
f.write(generated_manifest_contents)
f.close()
manifest_files = [generated_name]
if allow_isolation:
flags.append('/ALLOWISOLATION')
manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path)
return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
for f in files]
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
default='true')
return embed == 'true'
def IsLinkIncremental(self, config):
"""Returns whether the target should be linked incrementally."""
config = self._TargetConfig(config)
link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
return link_inc != '1'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags)
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
rcflags.append('/I' + gyp_to_ninja_path('.'))
rc('PreprocessorDefinitions', prefix='/d')
# /l arg must be in hex without leading '0x'
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
cd = ('cd %s' % path_to_base).replace('\\', '/')
args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
bash_cmd = ' '.join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
'bash -c "%s ; %s"' % (cd, bash_cmd))
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0
def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for a particular extension."""
for rule in spec.get('rules', []):
if rule['extension'] == extension:
return True
return False
def _HasExplicitIdlActions(self, spec):
"""Determine if an action should not run midl for .idl files."""
return any([action.get('explicit_idl_action', 0)
for action in spec.get('actions', [])])
def HasExplicitIdlRulesOrActions(self, spec):
"""Determine if there's an explicit rule or action for idl files. When
there isn't we need to generate implicit rules to build MIDL .idl files."""
return (self._HasExplicitRuleForExtension(spec, 'idl') or
self._HasExplicitIdlActions(spec))
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, 'asm')
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = ('.c',)
cc_exts = ('.cc', '.cxx', '.cpp')
return ((source_ext in c_exts and pch_source_ext in c_exts) or
(source_ext in cc_exts and pch_source_ext in cc_exts))
class PrecompiledHeader(object):
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
self.settings = settings
self.config = config
pch_source = self.settings.msvs_precompiled_source[self.config]
self.pch_source = gyp_to_build_path(pch_source)
filename, _ = os.path.splitext(pch_source)
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self.output_obj)]
return []
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(self, input, output, implicit, command,
cflags_c, cflags_cc, expand_special):
"""Get the modified cflags and implicit dependencies that should be used
for the pch compilation step."""
if input == self.pch_source:
pch_output = ['/Yc' + self._PchHeader()]
if command == 'cxx':
return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
self.output_obj, [])
elif command == 'cc':
return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
self.output_obj, [])
return [], output, implicit
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'),
allow_fallback=False)
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
system_includes, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
# Inject system includes from gyp files into INCLUDE.
if system_includes:
system_includes = system_includes | OrderedSet(
env.get('INCLUDE', '').split(';'))
env['INCLUDE'] = ';'.join(system_includes)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
no_specials = filter(lambda x: '$' not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = filter(lambda x: not os.path.exists(x), relative)
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
| mit |
lypnol/graph-theory | problem-01/submissions/ayoub.py | 1 | 3170 | from submission import Submission
import random
import networkx as nx
class AyoubSubmission(Submission):
def author(self):
return 'ayoub'
def run(self, input):
graph, start = input
V = set() # Noeuds visités
S = list(graph.keys()) # Liste de tous les noeuds dans le graph
N = len(S) # Nombre total des noeuds dans le graph
# On défini la table de routage suivante
# pour tout noeuds u, v du graphe:
# route[u][v] = [d, p], où
# d est la plus courte distance connue entre u et v (en nombre de pas)
# p est le prédécesseur de v qui permet d'atteindre u en empruntant le plus court chemin connu.
route = {v: {v: [float('inf'), None] for v in graph} for v in graph}
for v in graph:
route[v][v][0] = 0
# noeud de départ
s = start
if start is None:
s = random.choice(S)
# Cette variable servira à savoir si on se dirige vers un noeud connu
going_to = None
current = s
last = None
path = []
while True:
path.append(current)
# On marque le noeud courant comme visité
V.add(current)
# On met à jour la table de routage
if last is not None:
# Pour tout noeud visité on vérifie qu'on a bien le plus court
# chemin dans la table de routage
for v in V:
if route[v][current][0] > route[v][last][0] + 1:
route[v][current][0] = route[v][last][0] + 1
route[v][current][1] = last
# On met à jour la variable last
last = current
# Si on se dirige vers un noeud connu on utilise la tablde de routage construite
if going_to:
if current == going_to:
going_to = None
else:
current = route[going_to][current][1]
continue
# Sinon on choisi le noeud suivant selon la règle suivante:
# - Si parmi les successeurs du noeud courant on en trouve des non visités
# alors on se déplace vers l'un d'entre eux aléatoirement
# - Sinon on regarde dans notre table de routage si on a visité un noeud
# qui a des successeurs non visités, alors on s'y rend en utilisant
# la table de routage. S'il en existe plusieurs on choisi le plus proche.
# - Sinon si aucun noeud visité ne possède de successeurs non visités on s'arrête.
univisited = [u for u in graph[current] if u not in V]
if univisited:
current = random.choice(univisited)
else:
U = sorted([v for v in V if [u for u in graph[v] if u not in V]],
key=lambda x: route[x][current][0])
if U:
going_to = U[0]
current = route[going_to][current][1]
else:
break
return path
| mit |
fraferra/PlayPaloAltoServer | play_api/urls.py | 3 | 2087 | from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse_lazy
from django.views.generic import RedirectView
from play_api import views
urlpatterns = patterns('',
url(r'^api/registration/$', views.api_registration ,name='api_registration'),
url(r'^api/facebook_auth/$', views.facebook_auth ,name='facebook_auth'),
url(r'^api/v1/login/$', views.api_v1_login ,name='api_login'),
url(r'^api/v1/home/$', views.api_v1_home ,name='api_home'),
url(r'^api/v1/logout/$', views.api_v1_logout ,name='api_logout'),
url(r'^api/v1/my_events/$', views.api_v1_my_events ,name='api_my_events'),
url(r'^api/v1/events/$', views.api_v1_events ,name='api_events'),
url(r'^api/v1/my_coupons/$', views.api_v1_my_coupons ,name='api_my_coupons'),
url(r'^api/v1/coupons/$', views.api_v1_coupons ,name='api_coupons'),
url(r'^api/v1/leaderboard/$', views.api_v1_leaderboard ,name='api_leaderboard'),
url(r'^api/v1/history_events/$', views.api_v1_history_events ,name='api_history_events'),
url(r'^api/v1/history_coupons/$', views.api_v1_history_coupons ,name='api_history_coupons'),
url(r'^api/v2/login/$', views.api_v2_login ,name='api_login'),
url(r'^api/v2/add_event/$', views.api_v2_add_event ,name='api_add_event'),
url(r'^api/v2/add_coupon/$', views.api_v2_add_coupon ,name='api_add_coupon'),
url(r'^api/v2/home/$', views.api_v2_home ,name='api_home'),
url(r'^api/v2/logout/$', views.api_v2_logout ,name='api_logout'),
url(r'^api/v2/my_events/$', views.api_v2_my_events ,name='api_my_events'),
url(r'^api/v2/events/$', views.api_v2_events ,name='api_events'),
url(r'^api/v2/my_coupons/$', views.api_v2_my_coupons ,name='api_my_coupons'),
url(r'^api/v2/coupons/$', views.api_v2_coupons ,name='api_coupons'),
url(r'^api/v2/leaderboard/$', views.api_v2_leaderboard ,name='api_leaderboard'),
url(r'^api/v2/history_events/$', views.api_v2_history_events ,name='api_history_events'),
url(r'^api/v2/history_coupons/$', views.api_v2_history_coupons ,name='api_history_coupons'),
)
| mit |
BruceDLong/CodeDog | Scons/scons-local-4.1.0.post1/SCons/Tool/mwld.py | 5 | 3576 | """SCons.Tool.mwld
Tool-specific initialization for the Metrowerks CodeWarrior linker.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import SCons.Tool
def generate(env):
"""Add Builders and construction variables for lib to an Environment."""
SCons.Tool.createStaticLibBuilder(env)
SCons.Tool.createSharedLibBuilder(env)
SCons.Tool.createProgBuilder(env)
env['AR'] = 'mwld'
env['ARCOM'] = '$AR $ARFLAGS -library -o $TARGET $SOURCES'
env['LIBDIRPREFIX'] = '-L'
env['LIBDIRSUFFIX'] = ''
env['LIBLINKPREFIX'] = '-l'
env['LIBLINKSUFFIX'] = '.lib'
env['LINK'] = 'mwld'
env['LINKCOM'] = '$LINK $LINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = '$LINKFLAGS'
env['SHLINKCOM'] = shlib_action
env['SHLIBEMITTER']= shlib_emitter
env['LDMODULEEMITTER']= shlib_emitter
def exists(env):
import SCons.Tool.mwcc
return SCons.Tool.mwcc.set_vars(env)
def shlib_generator(target, source, env, for_signature):
cmd = ['$SHLINK', '$SHLINKFLAGS', '-shared']
no_import_lib = env.get('no_import_lib', 0)
if no_import_lib: cmd.extend('-noimplib')
dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX')
if dll: cmd.extend(['-o', dll])
implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX')
if implib: cmd.extend(['-implib', implib.get_string(for_signature)])
cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS'])
return [cmd]
def shlib_emitter(target, source, env):
dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX')
no_import_lib = env.get('no_import_lib', 0)
if not dll:
raise SCons.Errors.UserError("A shared library should have exactly one target with the suffix: %s" % env.subst("$SHLIBSUFFIX"))
if not no_import_lib and \
not env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX'):
# Append an import library to the list of targets.
target.append(env.ReplaceIxes(dll,
'SHLIBPREFIX', 'SHLIBSUFFIX',
'LIBPREFIX', 'LIBSUFFIX'))
return target, source
shlib_action = SCons.Action.Action(shlib_generator, generator=1)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| gpl-2.0 |
CLVsol/odoo_web2py_connector | models/menu.py | 1 | 6416 | # -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
#########################################################################
## Customize your APP title, subtitle and menus here
#########################################################################
# response.logo = A(B('web',SPAN(2),'py'),XML('™ '),
# _class="brand",_href="http://www.web2py.com/")
response.logo = A(B('Odoo web2py Connector'),
_class="brand",_href="https://github.com/CLVsol/odoo_web2py_connector")
response.title = request.application.replace('_',' ').title()
response.subtitle = ''
## read more at http://dev.w3.org/html5/markup/meta.name.html
response.meta.author = 'Carlos Vercelino <carlos.vercelino@clvsol.com>'
response.meta.description = 'Odoo web2py connector'
response.meta.keywords = 'web2py, python, framework'
response.meta.generator = 'Odoo Web2py Connector'
## your http://google.com/analytics id
response.google_analytics_id = None
#########################################################################
## this is the main application menu add/remove items as required
#########################################################################
response.menu = [
(T('Home'), False, URL('default', 'index'), [])
]
DEVELOPMENT_MENU = True
#########################################################################
## provide shortcuts for development. remove in production
#########################################################################
def _():
# shortcuts
app = request.application
ctr = request.controller
# useful links to internal and external resources
response.menu += [
(SPAN('web2py', _class='highlighted'), False, 'http://web2py.com', [
(T('My Sites'), False, URL('admin', 'default', 'site')),
(T('This App'), False, URL('admin', 'default', 'design/%s' % app), [
(T('Controller'), False,
URL(
'admin', 'default', 'edit/%s/controllers/%s.py' % (app, ctr))),
(T('View'), False,
URL(
'admin', 'default', 'edit/%s/views/%s' % (app, response.view))),
(T('Layout'), False,
URL(
'admin', 'default', 'edit/%s/views/layout.html' % app)),
(T('Stylesheet'), False,
URL(
'admin', 'default', 'edit/%s/static/css/web2py.css' % app)),
(T('DB Model'), False,
URL(
'admin', 'default', 'edit/%s/models/db.py' % app)),
(T('Menu Model'), False,
URL(
'admin', 'default', 'edit/%s/models/menu.py' % app)),
(T('Database'), False, URL(app, 'appadmin', 'index')),
(T('Errors'), False, URL(
'admin', 'default', 'errors/' + app)),
(T('About'), False, URL(
'admin', 'default', 'about/' + app)),
]),
('web2py.com', False, 'http://www.web2py.com', [
(T('Download'), False,
'http://www.web2py.com/examples/default/download'),
(T('Support'), False,
'http://www.web2py.com/examples/default/support'),
(T('Demo'), False, 'http://web2py.com/demo_admin'),
(T('Quick Examples'), False,
'http://web2py.com/examples/default/examples'),
(T('FAQ'), False, 'http://web2py.com/AlterEgo'),
(T('Videos'), False,
'http://www.web2py.com/examples/default/videos/'),
(T('Free Applications'),
False, 'http://web2py.com/appliances'),
(T('Plugins'), False, 'http://web2py.com/plugins'),
(T('Layouts'), False, 'http://web2py.com/layouts'),
(T('Recipes'), False, 'http://web2pyslices.com/'),
(T('Semantic'), False, 'http://web2py.com/semantic'),
]),
(T('Documentation'), False, 'http://www.web2py.com/book', [
(T('Preface'), False,
'http://www.web2py.com/book/default/chapter/00'),
(T('Introduction'), False,
'http://www.web2py.com/book/default/chapter/01'),
(T('Python'), False,
'http://www.web2py.com/book/default/chapter/02'),
(T('Overview'), False,
'http://www.web2py.com/book/default/chapter/03'),
(T('The Core'), False,
'http://www.web2py.com/book/default/chapter/04'),
(T('The Views'), False,
'http://www.web2py.com/book/default/chapter/05'),
(T('Database'), False,
'http://www.web2py.com/book/default/chapter/06'),
(T('Forms and Validators'), False,
'http://www.web2py.com/book/default/chapter/07'),
(T('Email and SMS'), False,
'http://www.web2py.com/book/default/chapter/08'),
(T('Access Control'), False,
'http://www.web2py.com/book/default/chapter/09'),
(T('Services'), False,
'http://www.web2py.com/book/default/chapter/10'),
(T('Ajax Recipes'), False,
'http://www.web2py.com/book/default/chapter/11'),
(T('Components and Plugins'), False,
'http://www.web2py.com/book/default/chapter/12'),
(T('Deployment Recipes'), False,
'http://www.web2py.com/book/default/chapter/13'),
(T('Other Recipes'), False,
'http://www.web2py.com/book/default/chapter/14'),
(T('Buy this book'), False,
'http://stores.lulu.com/web2py'),
]),
(T('Community'), False, None, [
(T('Groups'), False,
'http://www.web2py.com/examples/default/usergroups'),
(T('Twitter'), False, 'http://twitter.com/web2py'),
(T('Live Chat'), False,
'http://webchat.freenode.net/?channels=web2py'),
]),
(T('Plugins'), False, None, [
('plugin_wiki', False,
'http://web2py.com/examples/default/download'),
(T('Other Plugins'), False,
'http://web2py.com/plugins'),
(T('Layout Plugins'),
False, 'http://web2py.com/layouts'),
])
]
)]
if DEVELOPMENT_MENU: _()
if "auth" in locals(): auth.wikimenu()
| agpl-3.0 |
xchenum/quantum-bug | quantum/plugins/cisco/common/cisco_credentials_v2.py | 9 | 3007 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sumit Naiksatam, Cisco Systems, Inc.
import logging as LOG
from quantum.common.utils import find_config_file
from quantum.plugins.cisco.common import cisco_configparser as confp
from quantum.plugins.cisco.common import cisco_constants as const
from quantum.plugins.cisco.common import cisco_exceptions as cexc
from quantum.plugins.cisco.db import network_db_v2 as cdb
LOG.basicConfig(level=LOG.WARN)
LOG.getLogger(const.LOGGER_COMPONENT_NAME)
CREDENTIALS_FILE = find_config_file({'plugin': 'cisco'},
"credentials.ini")
TENANT = const.NETWORK_ADMIN
cp = confp.CiscoConfigParser(CREDENTIALS_FILE)
_creds_dictionary = cp.walk(cp.dummy)
class Store(object):
"""Credential Store"""
@staticmethod
def initialize():
for id in _creds_dictionary.keys():
try:
cdb.add_credential(TENANT, id,
_creds_dictionary[id][const.USERNAME],
_creds_dictionary[id][const.PASSWORD])
except cexc.CredentialAlreadyExists:
# We are quietly ignoring this, since it only happens
# if this class module is loaded more than once, in which
# case, the credentials are already populated
pass
@staticmethod
def put_credential(cred_name, username, password):
"""Set the username and password"""
credential = cdb.add_credential(TENANT, cred_name, username, password)
@staticmethod
def get_username(cred_name):
"""Get the username"""
credential = cdb.get_credential_name(TENANT, cred_name)
return credential[const.CREDENTIAL_USERNAME]
@staticmethod
def get_password(cred_name):
"""Get the password"""
credential = cdb.get_credential_name(TENANT, cred_name)
return credential[const.CREDENTIAL_PASSWORD]
@staticmethod
def get_credential(cred_name):
"""Get the username and password"""
credential = cdb.get_credential_name(TENANT, cred_name)
return {const.USERNAME: const.CREDENTIAL_USERNAME,
const.PASSWORD: const.CREDENTIAL_PASSWORD}
@staticmethod
def delete_credential(cred_name):
"""Delete a credential"""
cdb.remove_credential(TENANT, cred_name)
| apache-2.0 |
williballenthin/python-idb | scripts/extract_function_names.py | 1 | 1585 | #!/usr/bin/env python3
"""
Extract the names of functions within the given IDA Pro database.
author: Willi Ballenthin
email: willi.ballenthin@gmail.com
"""
import argparse
import logging
import sys
import idb
import idb.analysis
import idb.netnode
logger = logging.getLogger(__name__)
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(
description="Extract the names of functions within the given IDA Pro database."
)
parser.add_argument("idbpath", type=str, help="Path to input idb file")
parser.add_argument(
"-v", "--verbose", action="store_true", help="Enable debug logging"
)
parser.add_argument(
"-q", "--quiet", action="store_true", help="Disable all output but errors"
)
args = parser.parse_args(args=argv)
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger().setLevel(logging.DEBUG)
elif args.quiet:
logging.basicConfig(level=logging.ERROR)
logging.getLogger().setLevel(logging.ERROR)
else:
logging.basicConfig(level=logging.INFO)
logging.getLogger().setLevel(logging.INFO)
with idb.from_file(args.idbpath) as db:
root = idb.analysis.Root(db)
api = idb.IDAPython(db)
for fva in api.idautils.Functions():
print("%s:0x%x:%s" % (root.md5, fva, api.idc.GetFunctionName(fva)))
print(api.idc.GetType(fva))
return 0
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 |
tszym/ansible | lib/ansible/plugins/callback/debug.py | 137 | 1190 | from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.callback.default import CallbackModule as CallbackModule_default
class CallbackModule(CallbackModule_default): # pylint: disable=too-few-public-methods,no-init
'''
Override for the default callback module.
Render std err/out outside of the rest of the result which it prints with
indentation.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'debug'
def _dump_results(self, result):
'''Return the text to output for a result.'''
# Enable JSON identation
result['_ansible_verbose_always'] = True
save = {}
for key in ['stdout', 'stdout_lines', 'stderr', 'stderr_lines', 'msg']:
if key in result:
save[key] = result.pop(key)
output = CallbackModule_default._dump_results(self, result)
for key in ['stdout', 'stderr', 'msg']:
if key in save and save[key]:
output += '\n\n%s:\n\n%s\n' % (key.upper(), save[key])
for key, value in save.items():
result[key] = value
return output
| gpl-3.0 |
VenturaDelMonte/staticwebanalyzer | SDK/googlemaps-1.0.2/setup.py | 3 | 1741 | #!/usr/bin/env python
# Copyright 2009 John Kleint
#
# This is free software, licensed under the Lesser Affero General
# Public License, available in the accompanying LICENSE.txt file.
"""
Distutils setup script for googlemaps module.
"""
from distutils.core import setup
import sys
sys.path.insert(0, 'googlemaps')
import googlemaps
setup(name='googlemaps',
version=googlemaps.VERSION,
author='John Kleint',
author_email='py-googlemaps-general@lists.sourceforge.net',
url='http://sourceforge.net/projects/py-googlemaps/',
download_url='https://sourceforge.net/projects/py-googlemaps/files/',
description='Easy geocoding, reverse geocoding, driving directions, and local search in Python via Google.',
long_description=googlemaps.GoogleMaps.__doc__,
package_dir={'': 'googlemaps'},
py_modules=['googlemaps'],
provides=['googlemaps'],
requires=['simplejson'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Topic :: Internet',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Scientific/Engineering :: GIS',
],
keywords='google maps local search ajax api geocode geocoding directions navigation json',
license='Lesser Affero General Public License v3',
)
| mit |
johankaito/fufuka | microblog/old-flask/venv/lib/python2.7/site-packages/setuptools/command/upload_docs.py | 390 | 6782 | # -*- coding: utf-8 -*-
"""upload_docs
Implements a Distutils 'upload_docs' subcommand (upload documentation to
PyPI's pythonhosted.org).
"""
from base64 import standard_b64encode
from distutils import log
from distutils.errors import DistutilsOptionError
from distutils.command.upload import upload
import os
import socket
import zipfile
import tempfile
import sys
import shutil
from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
from pkg_resources import iter_entry_points
errors = 'surrogateescape' if PY3 else 'strict'
# This is not just a replacement for byte literals
# but works as a general purpose encoder
def b(s, encoding='utf-8'):
if isinstance(s, unicode):
return s.encode(encoding, errors)
return s
class upload_docs(upload):
description = 'Upload documentation to PyPI'
user_options = [
('repository=', 'r',
"url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
('show-response', None,
'display full response text from server'),
('upload-dir=', None, 'directory to upload'),
]
boolean_options = upload.boolean_options
def has_sphinx(self):
if self.upload_dir is None:
for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
return True
sub_commands = [('build_sphinx', has_sphinx)]
def initialize_options(self):
upload.initialize_options(self)
self.upload_dir = None
self.target_dir = None
def finalize_options(self):
upload.finalize_options(self)
if self.upload_dir is None:
if self.has_sphinx():
build_sphinx = self.get_finalized_command('build_sphinx')
self.target_dir = build_sphinx.builder_target_dir
else:
build = self.get_finalized_command('build')
self.target_dir = os.path.join(build.build_base, 'docs')
else:
self.ensure_dirname('upload_dir')
self.target_dir = self.upload_dir
self.announce('Using upload directory %s' % self.target_dir)
def create_zipfile(self, filename):
zip_file = zipfile.ZipFile(filename, "w")
try:
self.mkpath(self.target_dir) # just in case
for root, dirs, files in os.walk(self.target_dir):
if root == self.target_dir and not files:
raise DistutilsOptionError(
"no files found in upload directory '%s'"
% self.target_dir)
for name in files:
full = os.path.join(root, name)
relative = root[len(self.target_dir):].lstrip(os.path.sep)
dest = os.path.join(relative, name)
zip_file.write(full, dest)
finally:
zip_file.close()
def run(self):
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
tmp_dir = tempfile.mkdtemp()
name = self.distribution.metadata.get_name()
zip_file = os.path.join(tmp_dir, "%s.zip" % name)
try:
self.create_zipfile(zip_file)
self.upload_file(zip_file)
finally:
shutil.rmtree(tmp_dir)
def upload_file(self, filename):
f = open(filename, 'rb')
content = f.read()
f.close()
meta = self.distribution.metadata
data = {
':action': 'doc_upload',
'name': meta.get_name(),
'content': (os.path.basename(filename), content),
}
# set up the authentication
credentials = b(self.username + ':' + self.password)
credentials = standard_b64encode(credentials)
if PY3:
credentials = credentials.decode('ascii')
auth = "Basic " + credentials
# Build up the MIME payload for the POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = b('\n--') + b(boundary)
end_boundary = sep_boundary + b('--')
body = []
for key, values in iteritems(data):
title = '\nContent-Disposition: form-data; name="%s"' % key
# handle multiple entries for the same name
if not isinstance(values, list):
values = [values]
for value in values:
if type(value) is tuple:
title += '; filename="%s"' % value[0]
value = value[1]
else:
value = b(value)
body.append(sep_boundary)
body.append(b(title))
body.append(b("\n\n"))
body.append(value)
if value and value[-1:] == b('\r'):
body.append(b('\n')) # write an extra newline (lurve Macs)
body.append(end_boundary)
body.append(b("\n"))
body = b('').join(body)
self.announce("Submitting documentation to %s" % (self.repository),
log.INFO)
# build the Request
# We can't use urllib2 since we need to send the Basic
# auth right with the first request
schema, netloc, url, params, query, fragments = \
urlparse(self.repository)
assert not params and not query and not fragments
if schema == 'http':
conn = httplib.HTTPConnection(netloc)
elif schema == 'https':
conn = httplib.HTTPSConnection(netloc)
else:
raise AssertionError("unsupported schema " + schema)
data = ''
try:
conn.connect()
conn.putrequest("POST", url)
content_type = 'multipart/form-data; boundary=%s' % boundary
conn.putheader('Content-type', content_type)
conn.putheader('Content-length', str(len(body)))
conn.putheader('Authorization', auth)
conn.endheaders()
conn.send(body)
except socket.error as e:
self.announce(str(e), log.ERROR)
return
r = conn.getresponse()
if r.status == 200:
self.announce('Server response (%s): %s' % (r.status, r.reason),
log.INFO)
elif r.status == 301:
location = r.getheader('Location')
if location is None:
location = 'https://pythonhosted.org/%s/' % meta.get_name()
self.announce('Upload successful. Visit %s' % location,
log.INFO)
else:
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
log.ERROR)
if self.show_response:
print('-' * 75, r.read(), '-' * 75)
| apache-2.0 |
ralphbean/bugwarrior | tests/base.py | 1 | 3350 | from builtins import object
import shutil
import os.path
import tempfile
import unittest
import configparser
from unittest import mock
import responses
from bugwarrior import config
from bugwarrior.data import BugwarriorData
class AbstractServiceTest(object):
""" Ensures that certain test methods are implemented for each service. """
def test_to_taskwarrior(self):
""" Test Service.to_taskwarrior(). """
raise NotImplementedError
def test_issues(self):
"""
Test Service.issues().
- When the API is accessed via requests, use the responses library to
mock requests.
- When the API is accessed via a third party library, substitute a fake
implementation class for it.
"""
raise NotImplementedError
class ConfigTest(unittest.TestCase):
"""
Creates config files, configures the environment, and cleans up afterwards.
"""
def setUp(self):
self.old_environ = os.environ.copy()
self.tempdir = tempfile.mkdtemp(prefix='bugwarrior')
# Create temporary config files.
self.taskrc = os.path.join(self.tempdir, '.taskrc')
self.lists_path = os.path.join(self.tempdir, 'lists')
os.mkdir(self.lists_path)
with open(self.taskrc, 'w+') as fout:
fout.write('data.location=%s\n' % self.lists_path)
# Configure environment.
os.environ['HOME'] = self.tempdir
os.environ.pop(config.BUGWARRIORRC, None)
os.environ.pop('TASKRC', None)
os.environ.pop('XDG_CONFIG_HOME', None)
os.environ.pop('XDG_CONFIG_DIRS', None)
def tearDown(self):
shutil.rmtree(self.tempdir, ignore_errors=True)
os.environ = self.old_environ
class ServiceTest(ConfigTest):
GENERAL_CONFIG = {
'annotation_length': 100,
'description_length': 100,
}
SERVICE_CONFIG = {
}
@classmethod
def setUpClass(cls):
cls.maxDiff = None
def get_mock_service(
self, service_class, section='unspecified',
config_overrides=None, general_overrides=None
):
options = {
'general': self.GENERAL_CONFIG.copy(),
section: self.SERVICE_CONFIG.copy(),
}
if config_overrides:
options[section].update(config_overrides)
if general_overrides:
options['general'].update(general_overrides)
def has_option(section, name):
try:
return options[section][name]
except KeyError:
return False
def get_option(section, name):
try:
return options[section][name]
except KeyError:
raise configparser.NoOptionError(section, name)
def get_int(section, name):
return int(get_option(section, name))
config = mock.Mock()
config.has_option = mock.Mock(side_effect=has_option)
config.get = mock.Mock(side_effect=get_option)
config.getint = mock.Mock(side_effect=get_int)
config.data = BugwarriorData(self.lists_path)
service_instance = service_class(config, 'general', section)
return service_instance
@staticmethod
def add_response(url, **kwargs):
responses.add(responses.GET, url, match_querystring=True, **kwargs)
| gpl-3.0 |
slohse/ansible | test/units/modules/network/f5/test_bigiq_application_fastl4_udp.py | 21 | 5491 | # -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigiq_application_fastl4_udp import ApiParameters
from library.modules.bigiq_application_fastl4_udp import ModuleParameters
from library.modules.bigiq_application_fastl4_udp import ModuleManager
from library.modules.bigiq_application_fastl4_udp import ArgumentSpec
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigiq_application_fastl4_udp import ApiParameters
from ansible.modules.network.f5.bigiq_application_fastl4_udp import ModuleParameters
from ansible.modules.network.f5.bigiq_application_fastl4_udp import ModuleManager
from ansible.modules.network.f5.bigiq_application_fastl4_udp import ArgumentSpec
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
description='my description',
service_environment='bar',
servers=[
dict(
address='1.2.3.4',
port=8080
),
dict(
address='5.6.7.8',
port=8000
)
],
inbound_virtual=dict(
address='2.2.2.2',
netmask='255.255.255.255',
port=80
)
)
p = ModuleParameters(params=args)
assert p.name == 'foo'
assert p.config_set_name == 'foo'
assert p.sub_path == 'foo'
assert p.http_profile == 'profile_http'
assert p.service_environment == 'bar'
assert len(p.servers) == 2
assert 'address' in p.servers[0]
assert 'port' in p.servers[0]
assert 'address' in p.servers[1]
assert 'port' in p.servers[1]
assert p.servers[0]['address'] == '1.2.3.4'
assert p.servers[0]['port'] == 8080
assert p.servers[1]['address'] == '5.6.7.8'
assert p.servers[1]['port'] == 8000
assert 'address' in p.inbound_virtual
assert 'netmask' in p.inbound_virtual
assert 'port' in p.inbound_virtual
assert p.inbound_virtual['address'] == '2.2.2.2'
assert p.inbound_virtual['netmask'] == '255.255.255.255'
assert p.inbound_virtual['port'] == 80
@patch('ansible.module_utils.f5_utils.AnsibleF5Client._get_mgmt_root',
return_value=True)
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
self.patcher1 = patch('time.sleep')
self.patcher1.start()
def tearDown(self):
self.patcher1.stop()
def test_create(self, *args):
set_module_args(dict(
name='foo',
description='my description',
service_environment='bar',
servers=[
dict(
address='1.2.3.4',
port=8080
),
dict(
address='5.6.7.8',
port=8000
)
],
inbound_virtual=dict(
address='2.2.2.2',
netmask='255.255.255.255',
port=80
),
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.has_no_service_environment = Mock(return_value=False)
mm.wait_for_apply_template_task = Mock(return_value=True)
mm.create_on_device = Mock(return_value=True)
mm.exists = Mock(side_effect=[False, True])
results = mm.exec_module()
assert results['changed'] is True
assert results['description'] == 'my description'
| gpl-3.0 |
Neural-Network/TicTacToe | pybrain/rl/environments/shipsteer/shipsteer.py | 31 | 4170 | __author__ = 'Martin Felder, felder@in.tum.de'
from scipy import random
from pybrain.tools.networking.udpconnection import UDPServer
import threading
from pybrain.utilities import threaded
from time import sleep
from pybrain.rl.environments.environment import Environment
class ShipSteeringEnvironment(Environment):
"""
Simulates an ocean going ship with substantial inertia in both forward
motion and rotation, plus noise.
State space (continuous):
h heading of ship in degrees (North=0)
hdot angular velocity of heading in degrees/minute
v velocity of ship in knots
Action space (continuous):
rudder angle of rudder
thrust propulsion of ship forward
"""
# some (more or less) physical constants
dt = 4. # simulated time (in seconds) per step
mass = 1000. # mass of ship in unclear units
I = 1000. # rotational inertia of ship in unclear units
def __init__(self, render=True, ip="127.0.0.1", port="21580", numdir=1):
# initialize the environment (randomly)
self.action = [0.0, 0.0]
self.delay = False
self.numdir = numdir # number of directions in which ship starts
self.render = render
if self.render:
self.updateDone = True
self.updateLock = threading.Lock()
self.server = UDPServer(ip, port)
self.reset()
def step(self):
""" integrate state using simple rectangle rule """
thrust = float(self.action[0])
rudder = float(self.action[1])
h, hdot, v = self.sensors
rnd = random.normal(0, 1.0, size=3)
thrust = min(max(thrust, -1), +2)
rudder = min(max(rudder, -90), +90)
drag = 5 * h + (rudder ** 2 + rnd[0])
force = 30.0 * thrust - 2.0 * v - 0.02 * v * drag + rnd[1] * 3.0
v = v + self.dt * force / self.mass
v = min(max(v, -10), +40)
torque = -v * (rudder + h + 1.0 * hdot + rnd[2] * 10.)
last_hdot = hdot
hdot += torque / self.I
hdot = min(max(hdot, -180), 180)
h += (hdot + last_hdot) / 2.0
if h > 180.:
h -= 360.
elif h < -180.:
h += 360.
self.sensors = (h, hdot, v)
def closeSocket(self):
self.server.UDPInSock.close()
sleep(10)
def reset(self):
""" re-initializes the environment, setting the ship to rest at a random orientation.
"""
# [h, hdot, v]
self.sensors = [random.uniform(-30., 30.), 0.0, 0.0]
if self.render:
if self.server.clients > 0:
# If there are clients send them reset signal
self.server.send(["r", "r", "r"])
def getHeading(self):
""" auxiliary access to just the heading, to be used by GoNorthwardTask """
return self.sensors[0]
def getSpeed(self):
""" auxiliary access to just the speed, to be used by GoNorthwardTask """
return self.sensors[2]
def getSensors(self):
""" returns the state one step (dt) ahead in the future. stores the state in
self.sensors because it is needed for the next calculation.
"""
return self.sensors
def performAction(self, action):
""" stores the desired action for the next time step.
"""
self.action = action
self.step()
if self.render:
if self.updateDone:
self.updateRenderer()
if self.server.clients > 0:
sleep(0.2)
@threaded()
def updateRenderer(self):
self.updateDone = False
if not self.updateLock.acquire(False): return
# Listen for clients
self.server.listen()
if self.server.clients > 0:
# If there are clients send them the new data
self.server.send(self.sensors)
sleep(0.02)
self.updateLock.release()
self.updateDone = True
@property
def indim(self):
return len(self.action)
@property
def outdim(self):
return len(self.sensors)
| bsd-3-clause |
thoughtpalette/thoughts.thoughtpalette.com | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/styles/monokai.py | 364 | 5080 | # -*- coding: utf-8 -*-
"""
pygments.styles.monokai
~~~~~~~~~~~~~~~~~~~~~~~
Mimic the Monokai color scheme. Based on tango.py.
http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, Text, \
Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
class MonokaiStyle(Style):
"""
This style mimics the Monokai color scheme.
"""
background_color = "#272822"
highlight_color = "#49483e"
styles = {
# No corresponding class for the following:
Text: "#f8f8f2", # class: ''
Whitespace: "", # class: 'w'
Error: "#960050 bg:#1e0010", # class: 'err'
Other: "", # class 'x'
Comment: "#75715e", # class: 'c'
Comment.Multiline: "", # class: 'cm'
Comment.Preproc: "", # class: 'cp'
Comment.Single: "", # class: 'c1'
Comment.Special: "", # class: 'cs'
Keyword: "#66d9ef", # class: 'k'
Keyword.Constant: "", # class: 'kc'
Keyword.Declaration: "", # class: 'kd'
Keyword.Namespace: "#f92672", # class: 'kn'
Keyword.Pseudo: "", # class: 'kp'
Keyword.Reserved: "", # class: 'kr'
Keyword.Type: "", # class: 'kt'
Operator: "#f92672", # class: 'o'
Operator.Word: "", # class: 'ow' - like keywords
Punctuation: "#f8f8f2", # class: 'p'
Name: "#f8f8f2", # class: 'n'
Name.Attribute: "#a6e22e", # class: 'na' - to be revised
Name.Builtin: "", # class: 'nb'
Name.Builtin.Pseudo: "", # class: 'bp'
Name.Class: "#a6e22e", # class: 'nc' - to be revised
Name.Constant: "#66d9ef", # class: 'no' - to be revised
Name.Decorator: "#a6e22e", # class: 'nd' - to be revised
Name.Entity: "", # class: 'ni'
Name.Exception: "#a6e22e", # class: 'ne'
Name.Function: "#a6e22e", # class: 'nf'
Name.Property: "", # class: 'py'
Name.Label: "", # class: 'nl'
Name.Namespace: "", # class: 'nn' - to be revised
Name.Other: "#a6e22e", # class: 'nx'
Name.Tag: "#f92672", # class: 'nt' - like a keyword
Name.Variable: "", # class: 'nv' - to be revised
Name.Variable.Class: "", # class: 'vc' - to be revised
Name.Variable.Global: "", # class: 'vg' - to be revised
Name.Variable.Instance: "", # class: 'vi' - to be revised
Number: "#ae81ff", # class: 'm'
Number.Float: "", # class: 'mf'
Number.Hex: "", # class: 'mh'
Number.Integer: "", # class: 'mi'
Number.Integer.Long: "", # class: 'il'
Number.Oct: "", # class: 'mo'
Literal: "#ae81ff", # class: 'l'
Literal.Date: "#e6db74", # class: 'ld'
String: "#e6db74", # class: 's'
String.Backtick: "", # class: 'sb'
String.Char: "", # class: 'sc'
String.Doc: "", # class: 'sd' - like a comment
String.Double: "", # class: 's2'
String.Escape: "#ae81ff", # class: 'se'
String.Heredoc: "", # class: 'sh'
String.Interpol: "", # class: 'si'
String.Other: "", # class: 'sx'
String.Regex: "", # class: 'sr'
String.Single: "", # class: 's1'
String.Symbol: "", # class: 'ss'
Generic: "", # class: 'g'
Generic.Deleted: "", # class: 'gd',
Generic.Emph: "italic", # class: 'ge'
Generic.Error: "", # class: 'gr'
Generic.Heading: "", # class: 'gh'
Generic.Inserted: "", # class: 'gi'
Generic.Output: "", # class: 'go'
Generic.Prompt: "", # class: 'gp'
Generic.Strong: "bold", # class: 'gs'
Generic.Subheading: "", # class: 'gu'
Generic.Traceback: "", # class: 'gt'
}
| mit |
comealong/skyeye-plus | android/tools/gen-hw-config.py | 25 | 4283 | #!/usr/bin/env python
#
# This software is licensed under the terms of the GNU General Public
# License version 2, as published by the Free Software Foundation, and
# may be copied, distributed, and modified under those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# this script is used to generate 'android/avd/hw-config.h' by
# parsing 'android/avd/hardware-properties.ini'
#
#
import sys, os, string, re
# location of source file, relative to current program directory
relativeSourcePath = "../avd/hardware-properties.ini"
# location of target file, relative to current program directory
relativeTargetPath = "../avd/hw-config-defs.h"
def quoteStringForC(str):
"""quote a string so it can be used in C"""
return '\\"'.join('"'+p+'"' for p in str.split('"'))
# a dictionary that maps item types as they appear in the .ini
# file into macro names in the generated C header
#
typesToMacros = {
'integer': 'HWCFG_INT',
'string': 'HWCFG_STRING',
'boolean': 'HWCFG_BOOL',
'diskSize': 'HWCFG_DISKSIZE',
'double': 'HWCFG_DOUBLE'
}
# the list of macro names
macroNames = typesToMacros.values()
# target program header
targetHeader = """\
/* this file is automatically generated from 'hardware-properties.ini'
* DO NOT EDIT IT. To re-generate it, use android/tools/gen-hw-config.py'
*/
"""
# locate source and target
programDir = os.path.dirname(sys.argv[0])
if len(sys.argv) != 3:
print "Usage: %s source target\n" % os.path.basename(sys.argv[0])
sys.exit(1)
sourceFile = sys.argv[1]
targetFile = sys.argv[2]
# parse the source file and record items
# I would love to use Python's ConfigParser, but it doesn't
# support files without sections, or multiply defined items
#
items = []
lastItem = None
class Item:
def __init__(self,name):
self.name = name
self.type = type
self.default = None
self.abstract = ""
self.description = ""
def add(self,key,val):
if key == 'type':
self.type = val
elif key == 'default':
self.default = val
elif key == 'abstract':
self.abstract = val
elif key == 'description':
self.description = val
for line in open(sourceFile):
line = line.strip()
# ignore empty lines and comments
if len(line) == 0 or line[0] in ";#":
continue
key, value = line.split('=')
key = key.strip()
value = value.strip()
if key == 'name':
if lastItem: items.append(lastItem)
lastItem = Item(value)
else:
lastItem.add(key, value)
if lastItem:
items.append(lastItem)
if targetFile == '--':
out = sys.stdout
else:
out = open(targetFile,"wb")
out.write(targetHeader)
# write guards to prevent bad compiles
for m in macroNames:
out.write("""\
#ifndef %(macro)s
#error %(macro)s not defined
#endif
""" % { 'macro':m })
out.write("\n")
for item in items:
if item.type == None:
sys.stderr.write("ignoring config item with no type '%s'\n" % item.name)
continue
if not typesToMacros.has_key(item.type):
sys.stderr.write("ignoring config item with unknown type '%s': '%s'\n" % \
(item.type, item.name))
continue
if item.default == None:
sys.stderr.write("ignoring config item with no default '%s' */" % item.name)
continue
# convert dots into underscores
varMacro = typesToMacros[item.type]
varNameStr = quoteStringForC(item.name)
varName = item.name.replace(".","_")
varDefault = item.default
varAbstract = quoteStringForC(item.abstract)
varDesc = quoteStringForC(item.description)
if item.type in [ 'string', 'boolean', 'diskSize' ]:
# quote default value for strings
varDefault = quoteStringForC(varDefault)
out.write("%s(\n %s,\n %s,\n %s,\n %s,\n %s)\n\n" % \
(varMacro,varName,varNameStr,varDefault,varAbstract,varDesc))
for m in macroNames:
out.write("#undef %s\n" % m)
out.write("/* end of auto-generated file */\n")
out.close()
| gpl-2.0 |
maohongyuan/kbengine | kbe/src/lib/python/Lib/encodings/rot_13.py | 155 | 2428 | #!/usr/bin/env python
""" Python Character Mapping Codec for ROT13.
This codec de/encodes from str to str.
Written by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return (input.translate(rot13_map), len(input))
def decode(self, input, errors='strict'):
return (input.translate(rot13_map), len(input))
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return input.translate(rot13_map)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return input.translate(rot13_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='rot-13',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
_is_text_encoding=False,
)
### Map
rot13_map = codecs.make_identity_dict(range(256))
rot13_map.update({
0x0041: 0x004e,
0x0042: 0x004f,
0x0043: 0x0050,
0x0044: 0x0051,
0x0045: 0x0052,
0x0046: 0x0053,
0x0047: 0x0054,
0x0048: 0x0055,
0x0049: 0x0056,
0x004a: 0x0057,
0x004b: 0x0058,
0x004c: 0x0059,
0x004d: 0x005a,
0x004e: 0x0041,
0x004f: 0x0042,
0x0050: 0x0043,
0x0051: 0x0044,
0x0052: 0x0045,
0x0053: 0x0046,
0x0054: 0x0047,
0x0055: 0x0048,
0x0056: 0x0049,
0x0057: 0x004a,
0x0058: 0x004b,
0x0059: 0x004c,
0x005a: 0x004d,
0x0061: 0x006e,
0x0062: 0x006f,
0x0063: 0x0070,
0x0064: 0x0071,
0x0065: 0x0072,
0x0066: 0x0073,
0x0067: 0x0074,
0x0068: 0x0075,
0x0069: 0x0076,
0x006a: 0x0077,
0x006b: 0x0078,
0x006c: 0x0079,
0x006d: 0x007a,
0x006e: 0x0061,
0x006f: 0x0062,
0x0070: 0x0063,
0x0071: 0x0064,
0x0072: 0x0065,
0x0073: 0x0066,
0x0074: 0x0067,
0x0075: 0x0068,
0x0076: 0x0069,
0x0077: 0x006a,
0x0078: 0x006b,
0x0079: 0x006c,
0x007a: 0x006d,
})
### Filter API
def rot13(infile, outfile):
outfile.write(codecs.encode(infile.read(), 'rot-13'))
if __name__ == '__main__':
import sys
rot13(sys.stdin, sys.stdout)
| lgpl-3.0 |
yakky/django | django/forms/forms.py | 141 | 19457 | """
Form classes
"""
from __future__ import unicode_literals
import copy
from collections import OrderedDict
from django.core.exceptions import NON_FIELD_ERRORS, ValidationError
# BoundField is imported for backwards compatibility in Django 1.9
from django.forms.boundfield import BoundField # NOQA
from django.forms.fields import Field, FileField
# pretty_name is imported for backwards compatibility in Django 1.9
from django.forms.utils import ErrorDict, ErrorList, pretty_name # NOQA
from django.forms.widgets import Media, MediaDefiningClass
from django.utils import six
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.html import conditional_escape, html_safe
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
__all__ = ('BaseForm', 'Form')
class DeclarativeFieldsMetaclass(MediaDefiningClass):
"""
Metaclass that collects Fields declared on the base classes.
"""
def __new__(mcs, name, bases, attrs):
# Collect fields from current class.
current_fields = []
for key, value in list(attrs.items()):
if isinstance(value, Field):
current_fields.append((key, value))
attrs.pop(key)
current_fields.sort(key=lambda x: x[1].creation_counter)
attrs['declared_fields'] = OrderedDict(current_fields)
new_class = (super(DeclarativeFieldsMetaclass, mcs)
.__new__(mcs, name, bases, attrs))
# Walk through the MRO.
declared_fields = OrderedDict()
for base in reversed(new_class.__mro__):
# Collect fields from base class.
if hasattr(base, 'declared_fields'):
declared_fields.update(base.declared_fields)
# Field shadowing.
for attr, value in base.__dict__.items():
if value is None and attr in declared_fields:
declared_fields.pop(attr)
new_class.base_fields = declared_fields
new_class.declared_fields = declared_fields
return new_class
@html_safe
@python_2_unicode_compatible
class BaseForm(object):
# This is the main implementation of all the Form logic. Note that this
# class is different than Form. See the comments by the Form class for more
# information. Any improvements to the form API should be made to *this*
# class, not to the Form class.
field_order = None
prefix = None
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=None,
empty_permitted=False, field_order=None):
self.is_bound = data is not None or files is not None
self.data = data or {}
self.files = files or {}
self.auto_id = auto_id
if prefix is not None:
self.prefix = prefix
self.initial = initial or {}
self.error_class = error_class
# Translators: This is the default suffix added to form field labels
self.label_suffix = label_suffix if label_suffix is not None else _(':')
self.empty_permitted = empty_permitted
self._errors = None # Stores the errors after clean() has been called.
# The base_fields class attribute is the *class-wide* definition of
# fields. Because a particular *instance* of the class might want to
# alter self.fields, we create self.fields here by copying base_fields.
# Instances should always modify self.fields; they should not modify
# self.base_fields.
self.fields = copy.deepcopy(self.base_fields)
self._bound_fields_cache = {}
self.order_fields(self.field_order if field_order is None else field_order)
def order_fields(self, field_order):
"""
Rearranges the fields according to field_order.
field_order is a list of field names specifying the order. Fields not
included in the list are appended in the default order for backward
compatibility with subclasses not overriding field_order. If field_order
is None, all fields are kept in the order defined in the class.
Unknown fields in field_order are ignored to allow disabling fields in
form subclasses without redefining ordering.
"""
if field_order is None:
return
fields = OrderedDict()
for key in field_order:
try:
fields[key] = self.fields.pop(key)
except KeyError: # ignore unknown fields
pass
fields.update(self.fields) # add remaining fields in original order
self.fields = fields
def __str__(self):
return self.as_table()
def __repr__(self):
if self._errors is None:
is_valid = "Unknown"
else:
is_valid = self.is_bound and not bool(self._errors)
return '<%(cls)s bound=%(bound)s, valid=%(valid)s, fields=(%(fields)s)>' % {
'cls': self.__class__.__name__,
'bound': self.is_bound,
'valid': is_valid,
'fields': ';'.join(self.fields),
}
def __iter__(self):
for name in self.fields:
yield self[name]
def __getitem__(self, name):
"Returns a BoundField with the given name."
try:
field = self.fields[name]
except KeyError:
raise KeyError(
"Key %r not found in '%s'" % (name, self.__class__.__name__))
if name not in self._bound_fields_cache:
self._bound_fields_cache[name] = field.get_bound_field(self, name)
return self._bound_fields_cache[name]
@property
def errors(self):
"Returns an ErrorDict for the data provided for the form"
if self._errors is None:
self.full_clean()
return self._errors
def is_valid(self):
"""
Returns True if the form has no errors. Otherwise, False. If errors are
being ignored, returns False.
"""
return self.is_bound and not self.errors
def add_prefix(self, field_name):
"""
Returns the field name with a prefix appended, if this Form has a
prefix set.
Subclasses may wish to override.
"""
return '%s-%s' % (self.prefix, field_name) if self.prefix else field_name
def add_initial_prefix(self, field_name):
"""
Add a 'initial' prefix for checking dynamic initial values
"""
return 'initial-%s' % self.add_prefix(field_name)
def _html_output(self, normal_row, error_row, row_ender, help_text_html, errors_on_separate_row):
"Helper function for outputting HTML. Used by as_table(), as_ul(), as_p()."
top_errors = self.non_field_errors() # Errors that should be displayed above all fields.
output, hidden_fields = [], []
for name, field in self.fields.items():
html_class_attr = ''
bf = self[name]
# Escape and cache in local variable.
bf_errors = self.error_class([conditional_escape(error) for error in bf.errors])
if bf.is_hidden:
if bf_errors:
top_errors.extend(
[_('(Hidden field %(name)s) %(error)s') % {'name': name, 'error': force_text(e)}
for e in bf_errors])
hidden_fields.append(six.text_type(bf))
else:
# Create a 'class="..."' attribute if the row should have any
# CSS classes applied.
css_classes = bf.css_classes()
if css_classes:
html_class_attr = ' class="%s"' % css_classes
if errors_on_separate_row and bf_errors:
output.append(error_row % force_text(bf_errors))
if bf.label:
label = conditional_escape(force_text(bf.label))
label = bf.label_tag(label) or ''
else:
label = ''
if field.help_text:
help_text = help_text_html % force_text(field.help_text)
else:
help_text = ''
output.append(normal_row % {
'errors': force_text(bf_errors),
'label': force_text(label),
'field': six.text_type(bf),
'help_text': help_text,
'html_class_attr': html_class_attr,
'css_classes': css_classes,
'field_name': bf.html_name,
})
if top_errors:
output.insert(0, error_row % force_text(top_errors))
if hidden_fields: # Insert any hidden fields in the last row.
str_hidden = ''.join(hidden_fields)
if output:
last_row = output[-1]
# Chop off the trailing row_ender (e.g. '</td></tr>') and
# insert the hidden fields.
if not last_row.endswith(row_ender):
# This can happen in the as_p() case (and possibly others
# that users write): if there are only top errors, we may
# not be able to conscript the last row for our purposes,
# so insert a new, empty row.
last_row = (normal_row % {
'errors': '',
'label': '',
'field': '',
'help_text': '',
'html_class_attr': html_class_attr,
'css_classes': '',
'field_name': '',
})
output.append(last_row)
output[-1] = last_row[:-len(row_ender)] + str_hidden + row_ender
else:
# If there aren't any rows in the output, just append the
# hidden fields.
output.append(str_hidden)
return mark_safe('\n'.join(output))
def as_table(self):
"Returns this form rendered as HTML <tr>s -- excluding the <table></table>."
return self._html_output(
normal_row='<tr%(html_class_attr)s><th>%(label)s</th><td>%(errors)s%(field)s%(help_text)s</td></tr>',
error_row='<tr><td colspan="2">%s</td></tr>',
row_ender='</td></tr>',
help_text_html='<br /><span class="helptext">%s</span>',
errors_on_separate_row=False)
def as_ul(self):
"Returns this form rendered as HTML <li>s -- excluding the <ul></ul>."
return self._html_output(
normal_row='<li%(html_class_attr)s>%(errors)s%(label)s %(field)s%(help_text)s</li>',
error_row='<li>%s</li>',
row_ender='</li>',
help_text_html=' <span class="helptext">%s</span>',
errors_on_separate_row=False)
def as_p(self):
"Returns this form rendered as HTML <p>s."
return self._html_output(
normal_row='<p%(html_class_attr)s>%(label)s %(field)s%(help_text)s</p>',
error_row='%s',
row_ender='</p>',
help_text_html=' <span class="helptext">%s</span>',
errors_on_separate_row=True)
def non_field_errors(self):
"""
Returns an ErrorList of errors that aren't associated with a particular
field -- i.e., from Form.clean(). Returns an empty ErrorList if there
are none.
"""
return self.errors.get(NON_FIELD_ERRORS, self.error_class(error_class='nonfield'))
def add_error(self, field, error):
"""
Update the content of `self._errors`.
The `field` argument is the name of the field to which the errors
should be added. If its value is None the errors will be treated as
NON_FIELD_ERRORS.
The `error` argument can be a single error, a list of errors, or a
dictionary that maps field names to lists of errors. What we define as
an "error" can be either a simple string or an instance of
ValidationError with its message attribute set and what we define as
list or dictionary can be an actual `list` or `dict` or an instance
of ValidationError with its `error_list` or `error_dict` attribute set.
If `error` is a dictionary, the `field` argument *must* be None and
errors will be added to the fields that correspond to the keys of the
dictionary.
"""
if not isinstance(error, ValidationError):
# Normalize to ValidationError and let its constructor
# do the hard work of making sense of the input.
error = ValidationError(error)
if hasattr(error, 'error_dict'):
if field is not None:
raise TypeError(
"The argument `field` must be `None` when the `error` "
"argument contains errors for multiple fields."
)
else:
error = error.error_dict
else:
error = {field or NON_FIELD_ERRORS: error.error_list}
for field, error_list in error.items():
if field not in self.errors:
if field != NON_FIELD_ERRORS and field not in self.fields:
raise ValueError(
"'%s' has no field named '%s'." % (self.__class__.__name__, field))
if field == NON_FIELD_ERRORS:
self._errors[field] = self.error_class(error_class='nonfield')
else:
self._errors[field] = self.error_class()
self._errors[field].extend(error_list)
if field in self.cleaned_data:
del self.cleaned_data[field]
def has_error(self, field, code=None):
if code is None:
return field in self.errors
if field in self.errors:
for error in self.errors.as_data()[field]:
if error.code == code:
return True
return False
def full_clean(self):
"""
Cleans all of self.data and populates self._errors and
self.cleaned_data.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data has
# changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()
self._post_clean()
def _clean_fields(self):
for name, field in self.fields.items():
# value_from_datadict() gets the data from the data dictionaries.
# Each widget type knows how to retrieve its own data, because some
# widgets split data over several HTML fields.
if field.disabled:
value = self.initial.get(name, field.initial)
else:
value = field.widget.value_from_datadict(self.data, self.files, self.add_prefix(name))
try:
if isinstance(field, FileField):
initial = self.initial.get(name, field.initial)
value = field.clean(value, initial)
else:
value = field.clean(value)
self.cleaned_data[name] = value
if hasattr(self, 'clean_%s' % name):
value = getattr(self, 'clean_%s' % name)()
self.cleaned_data[name] = value
except ValidationError as e:
self.add_error(name, e)
def _clean_form(self):
try:
cleaned_data = self.clean()
except ValidationError as e:
self.add_error(None, e)
else:
if cleaned_data is not None:
self.cleaned_data = cleaned_data
def _post_clean(self):
"""
An internal hook for performing additional cleaning after form cleaning
is complete. Used for model validation in model forms.
"""
pass
def clean(self):
"""
Hook for doing any extra form-wide cleaning after Field.clean() has been
called on every field. Any ValidationError raised by this method will
not be associated with a particular field; it will have a special-case
association with the field named '__all__'.
"""
return self.cleaned_data
def has_changed(self):
"""
Returns True if data differs from initial.
"""
return bool(self.changed_data)
@cached_property
def changed_data(self):
data = []
for name, field in self.fields.items():
prefixed_name = self.add_prefix(name)
data_value = field.widget.value_from_datadict(self.data, self.files, prefixed_name)
if not field.show_hidden_initial:
initial_value = self.initial.get(name, field.initial)
if callable(initial_value):
initial_value = initial_value()
else:
initial_prefixed_name = self.add_initial_prefix(name)
hidden_widget = field.hidden_widget()
try:
initial_value = field.to_python(hidden_widget.value_from_datadict(
self.data, self.files, initial_prefixed_name))
except ValidationError:
# Always assume data has changed if validation fails.
data.append(name)
continue
if field.has_changed(initial_value, data_value):
data.append(name)
return data
@property
def media(self):
"""
Provide a description of all media required to render the widgets on this form
"""
media = Media()
for field in self.fields.values():
media = media + field.widget.media
return media
def is_multipart(self):
"""
Returns True if the form needs to be multipart-encoded, i.e. it has
FileInput. Otherwise, False.
"""
for field in self.fields.values():
if field.widget.needs_multipart_form:
return True
return False
def hidden_fields(self):
"""
Returns a list of all the BoundField objects that are hidden fields.
Useful for manual form layout in templates.
"""
return [field for field in self if field.is_hidden]
def visible_fields(self):
"""
Returns a list of BoundField objects that aren't hidden fields.
The opposite of the hidden_fields() method.
"""
return [field for field in self if not field.is_hidden]
class Form(six.with_metaclass(DeclarativeFieldsMetaclass, BaseForm)):
"A collection of Fields, plus their associated data."
# This is a separate class from BaseForm in order to abstract the way
# self.fields is specified. This class (Form) is the one that does the
# fancy metaclass stuff purely for the semantic sugar -- it allows one
# to define a form using declarative syntax.
# BaseForm itself has no way of designating self.fields.
| bsd-3-clause |
py-chemist/web_apps | mol2chemfig/options.py | 3 | 9150 | '''
option declarations. The options will be used to update the
settings dict in module common.
'''
from optionparser import *
def getParser():
'''
make sure the parser is created anew on each request
'''
parser = OptionParser()
parser.append(BoolOption(
"help",
"h",
default=False,
help_text="Print help message and exit"))
parser.append(BoolOption(
"version",
"b",
default=False,
help_text="Print program version and exit"))
parser.append(SelectOption(
"input",
"i",
key="input",
default="file",
valid_range="direct file pubchem".split(),
help_text="""How to interpret the argument. With 'file', mol2chemfig
expects a filename. With 'direct', the argument is
intrepreted directly; don't forget to put quotes around
it. With 'pubchem', the argument is treated as an
identifier for the PubChem database."""))
parser.append(BoolOption(
"terse",
"z",
default=False,
help_text="""Remove all whitespace and comments from the output.
If you can still read it afterwards, Bill Gates
wants your resume"""))
parser.append(BoolOption(
"strict",
"r",
default=True,
help_text="""Abide by Indigo's chemical structure validation.
If true, mol2chemfig will fail if Indigo reports
that something is wrong with the molecule, like
a carbon with five bonds. If false, mol2chemfig
will ignore such errors"""))
parser.append(IntOption(
"indent",
"d",
default=4,
help_text="""Number of spaces to use for indenting molecule
branches in generated code. Without effect when
'terse' option is passed. Affects only the generated tex code, not the rendered molecule"""))
parser.append(BoolOption(
"recalculate-coordinates",
"u",
key="recalculate_coordinates",
help_text="""Discard existing coordinate and calculate new
ones from covalent structure. For smiles input,
this is performed implicitly"""))
parser.append(FloatOption(
"angle",
"a",
key="rotate",
default=0.0,
help_text="Rotate molecule counterclockwise by this angle"))
parser.append(BoolOption(
"relative-angles",
"v",
key="relative_angles",
default=False,
help_text="Use relative bond angles"))
parser.append(BoolOption(
"flip",
"p",
key="flip_horizontal",
default=False,
help_text="Flip the structure horizontally"))
parser.append(BoolOption(
"flop",
"q",
key="flip_vertical",
default=False,
help_text="Flip the structure vertically"))
parser.append(BoolOption(
"show-carbons",
"c",
key="show_carbons",
help_text="Show element symbol for carbon atoms"))
parser.append(BoolOption(
"show-methyls",
"m",
key="show_methyls",
help_text='''Show element symbols for methyl groups
(implied if show-carbons is True)'''))
parser.append(SelectOption(
"hydrogens",
"y",
key="hydrogens",
# default="keep",
valid_range="keep add delete".split(),
help_text="""How to deal with explicit hydrogen atoms.
One of 'keep', 'add' or 'delete'. Note that
'add' will also trigger calculation of new
coordinates for the entire molecule.
Option 'keep' does nothing"""))
parser.append(BoolOption(
"aromatic-circles",
"o",
key="aromatic_circles",
default=False,
help_text="Draw circles instead of double bonds inside aromatic rings"))
parser.append(BoolOption(
"fancy-bonds",
"f",
key="fancy_bonds",
default=False,
help_text="Draw fancier double and triple bonds"))
parser.append(StringOption(
"markers",
"g",
help_text="""Give each atom and each bond a unique
marker that can be used for attaching
electron movement arrows.
With value 'a', atom 2 will be labeled
@{a2}, and its bond to atom 5 @{a2-5}."""))
parser.append(BoolOption(
"atom-numbers",
"n",
key="atom_numbers",
default=False,
help_text="""Show the molfile number of each atom next to it.
When this option is set, charges and implicit
hydrogens will not be shown"""))
parser.append(SelectOption(
"bond-scale",
"s",
key="bond_scale",
# default="normalize",
valid_range="normalize keep scale".split(),
help_text="""How to scale the lengths of bonds
(one of 'keep', 'scale', or 'normalize')"""))
parser.append(FloatOption(
"bond-stretch",
"t",
key="bond_stretch",
default=1.0,
help_text="""Used as scaling factor (with --bond-scale=scale)
or average (with --bond-scale=normalize) for bond
lengths"""))
parser.append(BoolOption(
"wrap-chemfig",
"w",
key="chemfig_command",
help_text=r"Wrap generated code into \chemfig{...} command"))
parser.append(StringOption(
"submol-name",
"l",
key="submol_name",
help_text=r"""If a name is given, wrap generated code into
chemfig \definesubmol{name}{...} command"""))
parser.append(IntOption(
"entry-atom",
"e",
key="entry_atom",
default=None,
help_text="""Number of first atom to be rendered. Relevant only
if generated code is to be used as sub-molecule"""))
parser.append(IntOption(
"exit-atom",
"x",
key="exit_atom",
default=None,
help_text="""Number of last atom to be rendered. Relevant only
if generated code is to be used as sub-molecule"""))
parser.append(RangeOption(
"cross-bond",
"k",
key="cross_bond",
default=None,
help_text="""Specify bonds that should be drawn on top of others
they cross over. Give the start and the end atoms.
Example for one bond: --cross-bond=5-6
Example for two bonds: --crossbond=4-8,12-13"""))
return parser
if __name__ == '__main__': # test code
parser = getParser()
print parser.format_help(indent=32,linewidth=80,separator='')
print
shorts, longs = parser.format_for_getopt()
print longs
print shorts
# list unused option letters
from string import ascii_lowercase as letters
print "unused short options:", ','.join(set(letters) - set(shorts))
#print
#tags = parser.form_tags()
#print tags
#for tag in tags:
#print tag
#print | gpl-3.0 |
michalliu/OpenWrt-Firefly-Libraries | staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/test/test_asyncio/test_events.py | 17 | 87937 | """Tests for events.py."""
import functools
import gc
import io
import os
import platform
import re
import signal
import socket
try:
import ssl
except ImportError:
ssl = None
import subprocess
import sys
import threading
import time
import errno
import unittest
from unittest import mock
import weakref
import asyncio
from asyncio import proactor_events
from asyncio import selector_events
from asyncio import sslproto
from asyncio import test_utils
try:
from test import support
except ImportError:
from asyncio import test_support as support
def data_file(filename):
if hasattr(support, 'TEST_HOME_DIR'):
fullname = os.path.join(support.TEST_HOME_DIR, filename)
if os.path.isfile(fullname):
return fullname
fullname = os.path.join(os.path.dirname(__file__), filename)
if os.path.isfile(fullname):
return fullname
raise FileNotFoundError(filename)
def osx_tiger():
"""Return True if the platform is Mac OS 10.4 or older."""
if sys.platform != 'darwin':
return False
version = platform.mac_ver()[0]
version = tuple(map(int, version.split('.')))
return version < (10, 5)
ONLYCERT = data_file('ssl_cert.pem')
ONLYKEY = data_file('ssl_key.pem')
SIGNED_CERTFILE = data_file('keycert3.pem')
SIGNING_CA = data_file('pycacert.pem')
class MyBaseProto(asyncio.Protocol):
connected = None
done = None
def __init__(self, loop=None):
self.transport = None
self.state = 'INITIAL'
self.nbytes = 0
if loop is not None:
self.connected = asyncio.Future(loop=loop)
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
if self.connected:
self.connected.set_result(None)
def data_received(self, data):
assert self.state == 'CONNECTED', self.state
self.nbytes += len(data)
def eof_received(self):
assert self.state == 'CONNECTED', self.state
self.state = 'EOF'
def connection_lost(self, exc):
assert self.state in ('CONNECTED', 'EOF'), self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class MyProto(MyBaseProto):
def connection_made(self, transport):
super().connection_made(transport)
transport.write(b'GET / HTTP/1.0\r\nHost: example.com\r\n\r\n')
class MyDatagramProto(asyncio.DatagramProtocol):
done = None
def __init__(self, loop=None):
self.state = 'INITIAL'
self.nbytes = 0
if loop is not None:
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'INITIALIZED'
def datagram_received(self, data, addr):
assert self.state == 'INITIALIZED', self.state
self.nbytes += len(data)
def error_received(self, exc):
assert self.state == 'INITIALIZED', self.state
def connection_lost(self, exc):
assert self.state == 'INITIALIZED', self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class MyReadPipeProto(asyncio.Protocol):
done = None
def __init__(self, loop=None):
self.state = ['INITIAL']
self.nbytes = 0
self.transport = None
if loop is not None:
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == ['INITIAL'], self.state
self.state.append('CONNECTED')
def data_received(self, data):
assert self.state == ['INITIAL', 'CONNECTED'], self.state
self.nbytes += len(data)
def eof_received(self):
assert self.state == ['INITIAL', 'CONNECTED'], self.state
self.state.append('EOF')
def connection_lost(self, exc):
if 'EOF' not in self.state:
self.state.append('EOF') # It is okay if EOF is missed.
assert self.state == ['INITIAL', 'CONNECTED', 'EOF'], self.state
self.state.append('CLOSED')
if self.done:
self.done.set_result(None)
class MyWritePipeProto(asyncio.BaseProtocol):
done = None
def __init__(self, loop=None):
self.state = 'INITIAL'
self.transport = None
if loop is not None:
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
def connection_lost(self, exc):
assert self.state == 'CONNECTED', self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class MySubprocessProtocol(asyncio.SubprocessProtocol):
def __init__(self, loop):
self.state = 'INITIAL'
self.transport = None
self.connected = asyncio.Future(loop=loop)
self.completed = asyncio.Future(loop=loop)
self.disconnects = {fd: asyncio.Future(loop=loop) for fd in range(3)}
self.data = {1: b'', 2: b''}
self.returncode = None
self.got_data = {1: asyncio.Event(loop=loop),
2: asyncio.Event(loop=loop)}
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
self.connected.set_result(None)
def connection_lost(self, exc):
assert self.state == 'CONNECTED', self.state
self.state = 'CLOSED'
self.completed.set_result(None)
def pipe_data_received(self, fd, data):
assert self.state == 'CONNECTED', self.state
self.data[fd] += data
self.got_data[fd].set()
def pipe_connection_lost(self, fd, exc):
assert self.state == 'CONNECTED', self.state
if exc:
self.disconnects[fd].set_exception(exc)
else:
self.disconnects[fd].set_result(exc)
def process_exited(self):
assert self.state == 'CONNECTED', self.state
self.returncode = self.transport.get_returncode()
class EventLoopTestsMixin:
def setUp(self):
super().setUp()
self.loop = self.create_event_loop()
self.set_event_loop(self.loop)
def tearDown(self):
# just in case if we have transport close callbacks
if not self.loop.is_closed():
test_utils.run_briefly(self.loop)
self.loop.close()
gc.collect()
super().tearDown()
def test_run_until_complete_nesting(self):
@asyncio.coroutine
def coro1():
yield
@asyncio.coroutine
def coro2():
self.assertTrue(self.loop.is_running())
self.loop.run_until_complete(coro1())
self.assertRaises(
RuntimeError, self.loop.run_until_complete, coro2())
# Note: because of the default Windows timing granularity of
# 15.6 msec, we use fairly long sleep times here (~100 msec).
def test_run_until_complete(self):
t0 = self.loop.time()
self.loop.run_until_complete(asyncio.sleep(0.1, loop=self.loop))
t1 = self.loop.time()
self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0)
def test_run_until_complete_stopped(self):
@asyncio.coroutine
def cb():
self.loop.stop()
yield from asyncio.sleep(0.1, loop=self.loop)
task = cb()
self.assertRaises(RuntimeError,
self.loop.run_until_complete, task)
def test_call_later(self):
results = []
def callback(arg):
results.append(arg)
self.loop.stop()
self.loop.call_later(0.1, callback, 'hello world')
t0 = time.monotonic()
self.loop.run_forever()
t1 = time.monotonic()
self.assertEqual(results, ['hello world'])
self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0)
def test_call_soon(self):
results = []
def callback(arg1, arg2):
results.append((arg1, arg2))
self.loop.stop()
self.loop.call_soon(callback, 'hello', 'world')
self.loop.run_forever()
self.assertEqual(results, [('hello', 'world')])
def test_call_soon_threadsafe(self):
results = []
lock = threading.Lock()
def callback(arg):
results.append(arg)
if len(results) >= 2:
self.loop.stop()
def run_in_thread():
self.loop.call_soon_threadsafe(callback, 'hello')
lock.release()
lock.acquire()
t = threading.Thread(target=run_in_thread)
t.start()
with lock:
self.loop.call_soon(callback, 'world')
self.loop.run_forever()
t.join()
self.assertEqual(results, ['hello', 'world'])
def test_call_soon_threadsafe_same_thread(self):
results = []
def callback(arg):
results.append(arg)
if len(results) >= 2:
self.loop.stop()
self.loop.call_soon_threadsafe(callback, 'hello')
self.loop.call_soon(callback, 'world')
self.loop.run_forever()
self.assertEqual(results, ['hello', 'world'])
def test_run_in_executor(self):
def run(arg):
return (arg, threading.get_ident())
f2 = self.loop.run_in_executor(None, run, 'yo')
res, thread_id = self.loop.run_until_complete(f2)
self.assertEqual(res, 'yo')
self.assertNotEqual(thread_id, threading.get_ident())
def test_reader_callback(self):
r, w = test_utils.socketpair()
r.setblocking(False)
bytes_read = bytearray()
def reader():
try:
data = r.recv(1024)
except BlockingIOError:
# Spurious readiness notifications are possible
# at least on Linux -- see man select.
return
if data:
bytes_read.extend(data)
else:
self.assertTrue(self.loop.remove_reader(r.fileno()))
r.close()
self.loop.add_reader(r.fileno(), reader)
self.loop.call_soon(w.send, b'abc')
test_utils.run_until(self.loop, lambda: len(bytes_read) >= 3)
self.loop.call_soon(w.send, b'def')
test_utils.run_until(self.loop, lambda: len(bytes_read) >= 6)
self.loop.call_soon(w.close)
self.loop.call_soon(self.loop.stop)
self.loop.run_forever()
self.assertEqual(bytes_read, b'abcdef')
def test_writer_callback(self):
r, w = test_utils.socketpair()
w.setblocking(False)
def writer(data):
w.send(data)
self.loop.stop()
data = b'x' * 1024
self.loop.add_writer(w.fileno(), writer, data)
self.loop.run_forever()
self.assertTrue(self.loop.remove_writer(w.fileno()))
self.assertFalse(self.loop.remove_writer(w.fileno()))
w.close()
read = r.recv(len(data) * 2)
r.close()
self.assertEqual(read, data)
def _basetest_sock_client_ops(self, httpd, sock):
if not isinstance(self.loop, proactor_events.BaseProactorEventLoop):
# in debug mode, socket operations must fail
# if the socket is not in blocking mode
self.loop.set_debug(True)
sock.setblocking(True)
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_connect(sock, httpd.address))
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n'))
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_recv(sock, 1024))
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_accept(sock))
# test in non-blocking mode
sock.setblocking(False)
self.loop.run_until_complete(
self.loop.sock_connect(sock, httpd.address))
self.loop.run_until_complete(
self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n'))
data = self.loop.run_until_complete(
self.loop.sock_recv(sock, 1024))
# consume data
self.loop.run_until_complete(
self.loop.sock_recv(sock, 1024))
sock.close()
self.assertTrue(data.startswith(b'HTTP/1.0 200 OK'))
def test_sock_client_ops(self):
with test_utils.run_test_server() as httpd:
sock = socket.socket()
self._basetest_sock_client_ops(httpd, sock)
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_unix_sock_client_ops(self):
with test_utils.run_test_unix_server() as httpd:
sock = socket.socket(socket.AF_UNIX)
self._basetest_sock_client_ops(httpd, sock)
def test_sock_client_fail(self):
# Make sure that we will get an unused port
address = None
try:
s = socket.socket()
s.bind(('127.0.0.1', 0))
address = s.getsockname()
finally:
s.close()
sock = socket.socket()
sock.setblocking(False)
with self.assertRaises(ConnectionRefusedError):
self.loop.run_until_complete(
self.loop.sock_connect(sock, address))
sock.close()
def test_sock_accept(self):
listener = socket.socket()
listener.setblocking(False)
listener.bind(('127.0.0.1', 0))
listener.listen(1)
client = socket.socket()
client.connect(listener.getsockname())
f = self.loop.sock_accept(listener)
conn, addr = self.loop.run_until_complete(f)
self.assertEqual(conn.gettimeout(), 0)
self.assertEqual(addr, client.getsockname())
self.assertEqual(client.getpeername(), listener.getsockname())
client.close()
conn.close()
listener.close()
@unittest.skipUnless(hasattr(signal, 'SIGKILL'), 'No SIGKILL')
def test_add_signal_handler(self):
caught = 0
def my_handler():
nonlocal caught
caught += 1
# Check error behavior first.
self.assertRaises(
TypeError, self.loop.add_signal_handler, 'boom', my_handler)
self.assertRaises(
TypeError, self.loop.remove_signal_handler, 'boom')
self.assertRaises(
ValueError, self.loop.add_signal_handler, signal.NSIG+1,
my_handler)
self.assertRaises(
ValueError, self.loop.remove_signal_handler, signal.NSIG+1)
self.assertRaises(
ValueError, self.loop.add_signal_handler, 0, my_handler)
self.assertRaises(
ValueError, self.loop.remove_signal_handler, 0)
self.assertRaises(
ValueError, self.loop.add_signal_handler, -1, my_handler)
self.assertRaises(
ValueError, self.loop.remove_signal_handler, -1)
self.assertRaises(
RuntimeError, self.loop.add_signal_handler, signal.SIGKILL,
my_handler)
# Removing SIGKILL doesn't raise, since we don't call signal().
self.assertFalse(self.loop.remove_signal_handler(signal.SIGKILL))
# Now set a handler and handle it.
self.loop.add_signal_handler(signal.SIGINT, my_handler)
os.kill(os.getpid(), signal.SIGINT)
test_utils.run_until(self.loop, lambda: caught)
# Removing it should restore the default handler.
self.assertTrue(self.loop.remove_signal_handler(signal.SIGINT))
self.assertEqual(signal.getsignal(signal.SIGINT),
signal.default_int_handler)
# Removing again returns False.
self.assertFalse(self.loop.remove_signal_handler(signal.SIGINT))
@unittest.skipUnless(hasattr(signal, 'SIGALRM'), 'No SIGALRM')
def test_signal_handling_while_selecting(self):
# Test with a signal actually arriving during a select() call.
caught = 0
def my_handler():
nonlocal caught
caught += 1
self.loop.stop()
self.loop.add_signal_handler(signal.SIGALRM, my_handler)
signal.setitimer(signal.ITIMER_REAL, 0.01, 0) # Send SIGALRM once.
self.loop.run_forever()
self.assertEqual(caught, 1)
@unittest.skipUnless(hasattr(signal, 'SIGALRM'), 'No SIGALRM')
def test_signal_handling_args(self):
some_args = (42,)
caught = 0
def my_handler(*args):
nonlocal caught
caught += 1
self.assertEqual(args, some_args)
self.loop.add_signal_handler(signal.SIGALRM, my_handler, *some_args)
signal.setitimer(signal.ITIMER_REAL, 0.1, 0) # Send SIGALRM once.
self.loop.call_later(0.5, self.loop.stop)
self.loop.run_forever()
self.assertEqual(caught, 1)
def _basetest_create_connection(self, connection_fut, check_sockname=True):
tr, pr = self.loop.run_until_complete(connection_fut)
self.assertIsInstance(tr, asyncio.Transport)
self.assertIsInstance(pr, asyncio.Protocol)
self.assertIs(pr.transport, tr)
if check_sockname:
self.assertIsNotNone(tr.get_extra_info('sockname'))
self.loop.run_until_complete(pr.done)
self.assertGreater(pr.nbytes, 0)
tr.close()
def test_create_connection(self):
with test_utils.run_test_server() as httpd:
conn_fut = self.loop.create_connection(
lambda: MyProto(loop=self.loop), *httpd.address)
self._basetest_create_connection(conn_fut)
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_connection(self):
# Issue #20682: On Mac OS X Tiger, getsockname() returns a
# zero-length address for UNIX socket.
check_sockname = not osx_tiger()
with test_utils.run_test_unix_server() as httpd:
conn_fut = self.loop.create_unix_connection(
lambda: MyProto(loop=self.loop), httpd.address)
self._basetest_create_connection(conn_fut, check_sockname)
def test_create_connection_sock(self):
with test_utils.run_test_server() as httpd:
sock = None
infos = self.loop.run_until_complete(
self.loop.getaddrinfo(
*httpd.address, type=socket.SOCK_STREAM))
for family, type, proto, cname, address in infos:
try:
sock = socket.socket(family=family, type=type, proto=proto)
sock.setblocking(False)
self.loop.run_until_complete(
self.loop.sock_connect(sock, address))
except:
pass
else:
break
else:
assert False, 'Can not create socket.'
f = self.loop.create_connection(
lambda: MyProto(loop=self.loop), sock=sock)
tr, pr = self.loop.run_until_complete(f)
self.assertIsInstance(tr, asyncio.Transport)
self.assertIsInstance(pr, asyncio.Protocol)
self.loop.run_until_complete(pr.done)
self.assertGreater(pr.nbytes, 0)
tr.close()
def _basetest_create_ssl_connection(self, connection_fut,
check_sockname=True):
tr, pr = self.loop.run_until_complete(connection_fut)
self.assertIsInstance(tr, asyncio.Transport)
self.assertIsInstance(pr, asyncio.Protocol)
self.assertTrue('ssl' in tr.__class__.__name__.lower())
if check_sockname:
self.assertIsNotNone(tr.get_extra_info('sockname'))
self.loop.run_until_complete(pr.done)
self.assertGreater(pr.nbytes, 0)
tr.close()
def _test_create_ssl_connection(self, httpd, create_connection,
check_sockname=True):
conn_fut = create_connection(ssl=test_utils.dummy_ssl_context())
self._basetest_create_ssl_connection(conn_fut, check_sockname)
# ssl.Purpose was introduced in Python 3.4
if hasattr(ssl, 'Purpose'):
def _dummy_ssl_create_context(purpose=ssl.Purpose.SERVER_AUTH, *,
cafile=None, capath=None,
cadata=None):
"""
A ssl.create_default_context() replacement that doesn't enable
cert validation.
"""
self.assertEqual(purpose, ssl.Purpose.SERVER_AUTH)
return test_utils.dummy_ssl_context()
# With ssl=True, ssl.create_default_context() should be called
with mock.patch('ssl.create_default_context',
side_effect=_dummy_ssl_create_context) as m:
conn_fut = create_connection(ssl=True)
self._basetest_create_ssl_connection(conn_fut, check_sockname)
self.assertEqual(m.call_count, 1)
# With the real ssl.create_default_context(), certificate
# validation will fail
with self.assertRaises(ssl.SSLError) as cm:
conn_fut = create_connection(ssl=True)
# Ignore the "SSL handshake failed" log in debug mode
with test_utils.disable_logger():
self._basetest_create_ssl_connection(conn_fut, check_sockname)
self.assertEqual(cm.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_ssl_connection(self):
with test_utils.run_test_server(use_ssl=True) as httpd:
create_connection = functools.partial(
self.loop.create_connection,
lambda: MyProto(loop=self.loop),
*httpd.address)
self._test_create_ssl_connection(httpd, create_connection)
def test_legacy_create_ssl_connection(self):
with test_utils.force_legacy_ssl_support():
self.test_create_ssl_connection()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_ssl_unix_connection(self):
# Issue #20682: On Mac OS X Tiger, getsockname() returns a
# zero-length address for UNIX socket.
check_sockname = not osx_tiger()
with test_utils.run_test_unix_server(use_ssl=True) as httpd:
create_connection = functools.partial(
self.loop.create_unix_connection,
lambda: MyProto(loop=self.loop), httpd.address,
server_hostname='127.0.0.1')
self._test_create_ssl_connection(httpd, create_connection,
check_sockname)
def test_legacy_create_ssl_unix_connection(self):
with test_utils.force_legacy_ssl_support():
self.test_create_ssl_unix_connection()
def test_create_connection_local_addr(self):
with test_utils.run_test_server() as httpd:
port = support.find_unused_port()
f = self.loop.create_connection(
lambda: MyProto(loop=self.loop),
*httpd.address, local_addr=(httpd.address[0], port))
tr, pr = self.loop.run_until_complete(f)
expected = pr.transport.get_extra_info('sockname')[1]
self.assertEqual(port, expected)
tr.close()
def test_create_connection_local_addr_in_use(self):
with test_utils.run_test_server() as httpd:
f = self.loop.create_connection(
lambda: MyProto(loop=self.loop),
*httpd.address, local_addr=httpd.address)
with self.assertRaises(OSError) as cm:
self.loop.run_until_complete(f)
self.assertEqual(cm.exception.errno, errno.EADDRINUSE)
self.assertIn(str(httpd.address), cm.exception.strerror)
def test_create_server(self):
proto = MyProto(self.loop)
f = self.loop.create_server(lambda: proto, '0.0.0.0', 0)
server = self.loop.run_until_complete(f)
self.assertEqual(len(server.sockets), 1)
sock = server.sockets[0]
host, port = sock.getsockname()
self.assertEqual(host, '0.0.0.0')
client = socket.socket()
client.connect(('127.0.0.1', port))
client.sendall(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('sockname'))
self.assertEqual('127.0.0.1',
proto.transport.get_extra_info('peername')[0])
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# close server
server.close()
def _make_unix_server(self, factory, **kwargs):
path = test_utils.gen_unix_socket_path()
self.addCleanup(lambda: os.path.exists(path) and os.unlink(path))
f = self.loop.create_unix_server(factory, path, **kwargs)
server = self.loop.run_until_complete(f)
return server, path
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server(self):
proto = MyProto(loop=self.loop)
server, path = self._make_unix_server(lambda: proto)
self.assertEqual(len(server.sockets), 1)
client = socket.socket(socket.AF_UNIX)
client.connect(path)
client.sendall(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# close server
server.close()
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_path_socket_error(self):
proto = MyProto(loop=self.loop)
sock = socket.socket()
with sock:
f = self.loop.create_unix_server(lambda: proto, '/test', sock=sock)
with self.assertRaisesRegex(ValueError,
'path and sock can not be specified '
'at the same time'):
self.loop.run_until_complete(f)
def _create_ssl_context(self, certfile, keyfile=None):
sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext.options |= ssl.OP_NO_SSLv2
sslcontext.load_cert_chain(certfile, keyfile)
return sslcontext
def _make_ssl_server(self, factory, certfile, keyfile=None):
sslcontext = self._create_ssl_context(certfile, keyfile)
f = self.loop.create_server(factory, '127.0.0.1', 0, ssl=sslcontext)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
host, port = sock.getsockname()
self.assertEqual(host, '127.0.0.1')
return server, host, port
def _make_ssl_unix_server(self, factory, certfile, keyfile=None):
sslcontext = self._create_ssl_context(certfile, keyfile)
return self._make_unix_server(factory, ssl=sslcontext)
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, ONLYCERT, ONLYKEY)
f_c = self.loop.create_connection(MyBaseProto, host, port,
ssl=test_utils.dummy_ssl_context())
client, pr = self.loop.run_until_complete(f_c)
client.write(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('sockname'))
self.assertEqual('127.0.0.1',
proto.transport.get_extra_info('peername')[0])
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# stop serving
server.close()
def test_legacy_create_server_ssl(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_ssl(self):
proto = MyProto(loop=self.loop)
server, path = self._make_ssl_unix_server(
lambda: proto, ONLYCERT, ONLYKEY)
f_c = self.loop.create_unix_connection(
MyBaseProto, path, ssl=test_utils.dummy_ssl_context(),
server_hostname='')
client, pr = self.loop.run_until_complete(f_c)
client.write(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# stop serving
server.close()
def test_legacy_create_unix_server_ssl(self):
with test_utils.force_legacy_ssl_support():
self.test_create_unix_server_ssl()
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl_verify_failed(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# no CA loaded
f_c = self.loop.create_connection(MyProto, host, port,
ssl=sslcontext_client)
with mock.patch.object(self.loop, 'call_exception_handler'):
with test_utils.disable_logger():
with self.assertRaisesRegex(ssl.SSLError,
'certificate verify failed '):
self.loop.run_until_complete(f_c)
# execute the loop to log the connection error
test_utils.run_briefly(self.loop)
# close connection
self.assertIsNone(proto.transport)
server.close()
def test_legacy_create_server_ssl_verify_failed(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl_verify_failed()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_ssl_verify_failed(self):
proto = MyProto(loop=self.loop)
server, path = self._make_ssl_unix_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# no CA loaded
f_c = self.loop.create_unix_connection(MyProto, path,
ssl=sslcontext_client,
server_hostname='invalid')
with mock.patch.object(self.loop, 'call_exception_handler'):
with test_utils.disable_logger():
with self.assertRaisesRegex(ssl.SSLError,
'certificate verify failed '):
self.loop.run_until_complete(f_c)
# execute the loop to log the connection error
test_utils.run_briefly(self.loop)
# close connection
self.assertIsNone(proto.transport)
server.close()
def test_legacy_create_unix_server_ssl_verify_failed(self):
with test_utils.force_legacy_ssl_support():
self.test_create_unix_server_ssl_verify_failed()
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl_match_failed(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
sslcontext_client.load_verify_locations(
cafile=SIGNING_CA)
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# incorrect server_hostname
f_c = self.loop.create_connection(MyProto, host, port,
ssl=sslcontext_client)
with mock.patch.object(self.loop, 'call_exception_handler'):
with test_utils.disable_logger():
with self.assertRaisesRegex(
ssl.CertificateError,
"hostname '127.0.0.1' doesn't match 'localhost'"):
self.loop.run_until_complete(f_c)
# close connection
proto.transport.close()
server.close()
def test_legacy_create_server_ssl_match_failed(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl_match_failed()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_ssl_verified(self):
proto = MyProto(loop=self.loop)
server, path = self._make_ssl_unix_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
sslcontext_client.load_verify_locations(cafile=SIGNING_CA)
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# Connection succeeds with correct CA and server hostname.
f_c = self.loop.create_unix_connection(MyProto, path,
ssl=sslcontext_client,
server_hostname='localhost')
client, pr = self.loop.run_until_complete(f_c)
# close connection
proto.transport.close()
client.close()
server.close()
self.loop.run_until_complete(proto.done)
def test_legacy_create_unix_server_ssl_verified(self):
with test_utils.force_legacy_ssl_support():
self.test_create_unix_server_ssl_verified()
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl_verified(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
sslcontext_client.load_verify_locations(cafile=SIGNING_CA)
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# Connection succeeds with correct CA and server hostname.
f_c = self.loop.create_connection(MyProto, host, port,
ssl=sslcontext_client,
server_hostname='localhost')
client, pr = self.loop.run_until_complete(f_c)
# close connection
proto.transport.close()
client.close()
server.close()
self.loop.run_until_complete(proto.done)
def test_legacy_create_server_ssl_verified(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl_verified()
def test_create_server_sock(self):
proto = asyncio.Future(loop=self.loop)
class TestMyProto(MyProto):
def connection_made(self, transport):
super().connection_made(transport)
proto.set_result(self)
sock_ob = socket.socket(type=socket.SOCK_STREAM)
sock_ob.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock_ob.bind(('0.0.0.0', 0))
f = self.loop.create_server(TestMyProto, sock=sock_ob)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
self.assertIs(sock, sock_ob)
host, port = sock.getsockname()
self.assertEqual(host, '0.0.0.0')
client = socket.socket()
client.connect(('127.0.0.1', port))
client.send(b'xxx')
client.close()
server.close()
def test_create_server_addr_in_use(self):
sock_ob = socket.socket(type=socket.SOCK_STREAM)
sock_ob.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock_ob.bind(('0.0.0.0', 0))
f = self.loop.create_server(MyProto, sock=sock_ob)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
host, port = sock.getsockname()
f = self.loop.create_server(MyProto, host=host, port=port)
with self.assertRaises(OSError) as cm:
self.loop.run_until_complete(f)
self.assertEqual(cm.exception.errno, errno.EADDRINUSE)
server.close()
@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 not supported or enabled')
def test_create_server_dual_stack(self):
f_proto = asyncio.Future(loop=self.loop)
class TestMyProto(MyProto):
def connection_made(self, transport):
super().connection_made(transport)
f_proto.set_result(self)
try_count = 0
while True:
try:
port = support.find_unused_port()
f = self.loop.create_server(TestMyProto, host=None, port=port)
server = self.loop.run_until_complete(f)
except OSError as ex:
if ex.errno == errno.EADDRINUSE:
try_count += 1
self.assertGreaterEqual(5, try_count)
continue
else:
raise
else:
break
client = socket.socket()
client.connect(('127.0.0.1', port))
client.send(b'xxx')
proto = self.loop.run_until_complete(f_proto)
proto.transport.close()
client.close()
f_proto = asyncio.Future(loop=self.loop)
client = socket.socket(socket.AF_INET6)
client.connect(('::1', port))
client.send(b'xxx')
proto = self.loop.run_until_complete(f_proto)
proto.transport.close()
client.close()
server.close()
def test_server_close(self):
f = self.loop.create_server(MyProto, '0.0.0.0', 0)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
host, port = sock.getsockname()
client = socket.socket()
client.connect(('127.0.0.1', port))
client.send(b'xxx')
client.close()
server.close()
client = socket.socket()
self.assertRaises(
ConnectionRefusedError, client.connect, ('127.0.0.1', port))
client.close()
def test_create_datagram_endpoint(self):
class TestMyDatagramProto(MyDatagramProto):
def __init__(inner_self):
super().__init__(loop=self.loop)
def datagram_received(self, data, addr):
super().datagram_received(data, addr)
self.transport.sendto(b'resp:'+data, addr)
coro = self.loop.create_datagram_endpoint(
TestMyDatagramProto, local_addr=('127.0.0.1', 0))
s_transport, server = self.loop.run_until_complete(coro)
host, port = s_transport.get_extra_info('sockname')
self.assertIsInstance(s_transport, asyncio.Transport)
self.assertIsInstance(server, TestMyDatagramProto)
self.assertEqual('INITIALIZED', server.state)
self.assertIs(server.transport, s_transport)
coro = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(loop=self.loop),
remote_addr=(host, port))
transport, client = self.loop.run_until_complete(coro)
self.assertIsInstance(transport, asyncio.Transport)
self.assertIsInstance(client, MyDatagramProto)
self.assertEqual('INITIALIZED', client.state)
self.assertIs(client.transport, transport)
transport.sendto(b'xxx')
test_utils.run_until(self.loop, lambda: server.nbytes)
self.assertEqual(3, server.nbytes)
test_utils.run_until(self.loop, lambda: client.nbytes)
# received
self.assertEqual(8, client.nbytes)
# extra info is available
self.assertIsNotNone(transport.get_extra_info('sockname'))
# close connection
transport.close()
self.loop.run_until_complete(client.done)
self.assertEqual('CLOSED', client.state)
server.transport.close()
def test_internal_fds(self):
loop = self.create_event_loop()
if not isinstance(loop, selector_events.BaseSelectorEventLoop):
loop.close()
self.skipTest('loop is not a BaseSelectorEventLoop')
self.assertEqual(1, loop._internal_fds)
loop.close()
self.assertEqual(0, loop._internal_fds)
self.assertIsNone(loop._csock)
self.assertIsNone(loop._ssock)
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_read_pipe(self):
proto = MyReadPipeProto(loop=self.loop)
rpipe, wpipe = os.pipe()
pipeobj = io.open(rpipe, 'rb', 1024)
@asyncio.coroutine
def connect():
t, p = yield from self.loop.connect_read_pipe(
lambda: proto, pipeobj)
self.assertIs(p, proto)
self.assertIs(t, proto.transport)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(0, proto.nbytes)
self.loop.run_until_complete(connect())
os.write(wpipe, b'1')
test_utils.run_until(self.loop, lambda: proto.nbytes >= 1)
self.assertEqual(1, proto.nbytes)
os.write(wpipe, b'2345')
test_utils.run_until(self.loop, lambda: proto.nbytes >= 5)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(5, proto.nbytes)
os.close(wpipe)
self.loop.run_until_complete(proto.done)
self.assertEqual(
['INITIAL', 'CONNECTED', 'EOF', 'CLOSED'], proto.state)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
# select, poll and kqueue don't support character devices (PTY) on Mac OS X
# older than 10.6 (Snow Leopard)
@support.requires_mac_ver(10, 6)
# Issue #20495: The test hangs on FreeBSD 7.2 but pass on FreeBSD 9
@support.requires_freebsd_version(8)
def test_read_pty_output(self):
proto = MyReadPipeProto(loop=self.loop)
master, slave = os.openpty()
master_read_obj = io.open(master, 'rb', 0)
@asyncio.coroutine
def connect():
t, p = yield from self.loop.connect_read_pipe(lambda: proto,
master_read_obj)
self.assertIs(p, proto)
self.assertIs(t, proto.transport)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(0, proto.nbytes)
self.loop.run_until_complete(connect())
os.write(slave, b'1')
test_utils.run_until(self.loop, lambda: proto.nbytes)
self.assertEqual(1, proto.nbytes)
os.write(slave, b'2345')
test_utils.run_until(self.loop, lambda: proto.nbytes >= 5)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(5, proto.nbytes)
os.close(slave)
self.loop.run_until_complete(proto.done)
self.assertEqual(
['INITIAL', 'CONNECTED', 'EOF', 'CLOSED'], proto.state)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_write_pipe(self):
rpipe, wpipe = os.pipe()
pipeobj = io.open(wpipe, 'wb', 1024)
proto = MyWritePipeProto(loop=self.loop)
connect = self.loop.connect_write_pipe(lambda: proto, pipeobj)
transport, p = self.loop.run_until_complete(connect)
self.assertIs(p, proto)
self.assertIs(transport, proto.transport)
self.assertEqual('CONNECTED', proto.state)
transport.write(b'1')
data = bytearray()
def reader(data):
chunk = os.read(rpipe, 1024)
data += chunk
return len(data)
test_utils.run_until(self.loop, lambda: reader(data) >= 1)
self.assertEqual(b'1', data)
transport.write(b'2345')
test_utils.run_until(self.loop, lambda: reader(data) >= 5)
self.assertEqual(b'12345', data)
self.assertEqual('CONNECTED', proto.state)
os.close(rpipe)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_write_pipe_disconnect_on_close(self):
rsock, wsock = test_utils.socketpair()
rsock.setblocking(False)
pipeobj = io.open(wsock.detach(), 'wb', 1024)
proto = MyWritePipeProto(loop=self.loop)
connect = self.loop.connect_write_pipe(lambda: proto, pipeobj)
transport, p = self.loop.run_until_complete(connect)
self.assertIs(p, proto)
self.assertIs(transport, proto.transport)
self.assertEqual('CONNECTED', proto.state)
transport.write(b'1')
data = self.loop.run_until_complete(self.loop.sock_recv(rsock, 1024))
self.assertEqual(b'1', data)
rsock.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
# select, poll and kqueue don't support character devices (PTY) on Mac OS X
# older than 10.6 (Snow Leopard)
@support.requires_mac_ver(10, 6)
def test_write_pty(self):
master, slave = os.openpty()
slave_write_obj = io.open(slave, 'wb', 0)
proto = MyWritePipeProto(loop=self.loop)
connect = self.loop.connect_write_pipe(lambda: proto, slave_write_obj)
transport, p = self.loop.run_until_complete(connect)
self.assertIs(p, proto)
self.assertIs(transport, proto.transport)
self.assertEqual('CONNECTED', proto.state)
transport.write(b'1')
data = bytearray()
def reader(data):
chunk = os.read(master, 1024)
data += chunk
return len(data)
test_utils.run_until(self.loop, lambda: reader(data) >= 1,
timeout=10)
self.assertEqual(b'1', data)
transport.write(b'2345')
test_utils.run_until(self.loop, lambda: reader(data) >= 5,
timeout=10)
self.assertEqual(b'12345', data)
self.assertEqual('CONNECTED', proto.state)
os.close(master)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
def test_prompt_cancellation(self):
r, w = test_utils.socketpair()
r.setblocking(False)
f = self.loop.sock_recv(r, 1)
ov = getattr(f, 'ov', None)
if ov is not None:
self.assertTrue(ov.pending)
@asyncio.coroutine
def main():
try:
self.loop.call_soon(f.cancel)
yield from f
except asyncio.CancelledError:
res = 'cancelled'
else:
res = None
finally:
self.loop.stop()
return res
start = time.monotonic()
t = asyncio.Task(main(), loop=self.loop)
self.loop.run_forever()
elapsed = time.monotonic() - start
self.assertLess(elapsed, 0.1)
self.assertEqual(t.result(), 'cancelled')
self.assertRaises(asyncio.CancelledError, f.result)
if ov is not None:
self.assertFalse(ov.pending)
self.loop._stop_serving(r)
r.close()
w.close()
def test_timeout_rounding(self):
def _run_once():
self.loop._run_once_counter += 1
orig_run_once()
orig_run_once = self.loop._run_once
self.loop._run_once_counter = 0
self.loop._run_once = _run_once
@asyncio.coroutine
def wait():
loop = self.loop
yield from asyncio.sleep(1e-2, loop=loop)
yield from asyncio.sleep(1e-4, loop=loop)
yield from asyncio.sleep(1e-6, loop=loop)
yield from asyncio.sleep(1e-8, loop=loop)
yield from asyncio.sleep(1e-10, loop=loop)
self.loop.run_until_complete(wait())
# The ideal number of call is 12, but on some platforms, the selector
# may sleep at little bit less than timeout depending on the resolution
# of the clock used by the kernel. Tolerate a few useless calls on
# these platforms.
self.assertLessEqual(self.loop._run_once_counter, 20,
{'clock_resolution': self.loop._clock_resolution,
'selector': self.loop._selector.__class__.__name__})
def test_sock_connect_address(self):
# In debug mode, sock_connect() must ensure that the address is already
# resolved (call _check_resolved_address())
self.loop.set_debug(True)
addresses = [(socket.AF_INET, ('www.python.org', 80))]
if support.IPV6_ENABLED:
addresses.extend((
(socket.AF_INET6, ('www.python.org', 80)),
(socket.AF_INET6, ('www.python.org', 80, 0, 0)),
))
for family, address in addresses:
for sock_type in (socket.SOCK_STREAM, socket.SOCK_DGRAM):
sock = socket.socket(family, sock_type)
with sock:
sock.setblocking(False)
connect = self.loop.sock_connect(sock, address)
with self.assertRaises(ValueError) as cm:
self.loop.run_until_complete(connect)
self.assertIn('address must be resolved',
str(cm.exception))
def test_remove_fds_after_closing(self):
loop = self.create_event_loop()
callback = lambda: None
r, w = test_utils.socketpair()
self.addCleanup(r.close)
self.addCleanup(w.close)
loop.add_reader(r, callback)
loop.add_writer(w, callback)
loop.close()
self.assertFalse(loop.remove_reader(r))
self.assertFalse(loop.remove_writer(w))
def test_add_fds_after_closing(self):
loop = self.create_event_loop()
callback = lambda: None
r, w = test_utils.socketpair()
self.addCleanup(r.close)
self.addCleanup(w.close)
loop.close()
with self.assertRaises(RuntimeError):
loop.add_reader(r, callback)
with self.assertRaises(RuntimeError):
loop.add_writer(w, callback)
def test_close_running_event_loop(self):
@asyncio.coroutine
def close_loop(loop):
self.loop.close()
coro = close_loop(self.loop)
with self.assertRaises(RuntimeError):
self.loop.run_until_complete(coro)
def test_close(self):
self.loop.close()
@asyncio.coroutine
def test():
pass
func = lambda: False
coro = test()
self.addCleanup(coro.close)
# operation blocked when the loop is closed
with self.assertRaises(RuntimeError):
self.loop.run_forever()
with self.assertRaises(RuntimeError):
fut = asyncio.Future(loop=self.loop)
self.loop.run_until_complete(fut)
with self.assertRaises(RuntimeError):
self.loop.call_soon(func)
with self.assertRaises(RuntimeError):
self.loop.call_soon_threadsafe(func)
with self.assertRaises(RuntimeError):
self.loop.call_later(1.0, func)
with self.assertRaises(RuntimeError):
self.loop.call_at(self.loop.time() + .0, func)
with self.assertRaises(RuntimeError):
self.loop.run_in_executor(None, func)
with self.assertRaises(RuntimeError):
self.loop.create_task(coro)
with self.assertRaises(RuntimeError):
self.loop.add_signal_handler(signal.SIGTERM, func)
class SubprocessTestsMixin:
def check_terminated(self, returncode):
if sys.platform == 'win32':
self.assertIsInstance(returncode, int)
# expect 1 but sometimes get 0
else:
self.assertEqual(-signal.SIGTERM, returncode)
def check_killed(self, returncode):
if sys.platform == 'win32':
self.assertIsInstance(returncode, int)
# expect 1 but sometimes get 0
else:
self.assertEqual(-signal.SIGKILL, returncode)
def test_subprocess_exec(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
stdin = transp.get_pipe_transport(0)
stdin.write(b'Python The Winner')
self.loop.run_until_complete(proto.got_data[1].wait())
with test_utils.disable_logger():
transp.close()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
self.assertEqual(b'Python The Winner', proto.data[1])
def test_subprocess_interactive(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
stdin = transp.get_pipe_transport(0)
stdin.write(b'Python ')
self.loop.run_until_complete(proto.got_data[1].wait())
proto.got_data[1].clear()
self.assertEqual(b'Python ', proto.data[1])
stdin.write(b'The Winner')
self.loop.run_until_complete(proto.got_data[1].wait())
self.assertEqual(b'Python The Winner', proto.data[1])
with test_utils.disable_logger():
transp.close()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
def test_subprocess_shell(self):
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'echo Python')
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.get_pipe_transport(0).close()
self.loop.run_until_complete(proto.completed)
self.assertEqual(0, proto.returncode)
self.assertTrue(all(f.done() for f in proto.disconnects.values()))
self.assertEqual(proto.data[1].rstrip(b'\r\n'), b'Python')
self.assertEqual(proto.data[2], b'')
transp.close()
def test_subprocess_exitcode(self):
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'exit 7', stdin=None, stdout=None, stderr=None)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.completed)
self.assertEqual(7, proto.returncode)
transp.close()
def test_subprocess_close_after_finish(self):
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'exit 7', stdin=None, stdout=None, stderr=None)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.assertIsNone(transp.get_pipe_transport(0))
self.assertIsNone(transp.get_pipe_transport(1))
self.assertIsNone(transp.get_pipe_transport(2))
self.loop.run_until_complete(proto.completed)
self.assertEqual(7, proto.returncode)
self.assertIsNone(transp.close())
def test_subprocess_kill(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.kill()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
transp.close()
def test_subprocess_terminate(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.terminate()
self.loop.run_until_complete(proto.completed)
self.check_terminated(proto.returncode)
transp.close()
@unittest.skipIf(sys.platform == 'win32', "Don't have SIGHUP")
def test_subprocess_send_signal(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.send_signal(signal.SIGHUP)
self.loop.run_until_complete(proto.completed)
self.assertEqual(-signal.SIGHUP, proto.returncode)
transp.close()
def test_subprocess_stderr(self):
prog = os.path.join(os.path.dirname(__file__), 'echo2.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
stdin = transp.get_pipe_transport(0)
stdin.write(b'test')
self.loop.run_until_complete(proto.completed)
transp.close()
self.assertEqual(b'OUT:test', proto.data[1])
self.assertTrue(proto.data[2].startswith(b'ERR:test'), proto.data[2])
self.assertEqual(0, proto.returncode)
def test_subprocess_stderr_redirect_to_stdout(self):
prog = os.path.join(os.path.dirname(__file__), 'echo2.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog, stderr=subprocess.STDOUT)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
stdin = transp.get_pipe_transport(0)
self.assertIsNotNone(transp.get_pipe_transport(1))
self.assertIsNone(transp.get_pipe_transport(2))
stdin.write(b'test')
self.loop.run_until_complete(proto.completed)
self.assertTrue(proto.data[1].startswith(b'OUT:testERR:test'),
proto.data[1])
self.assertEqual(b'', proto.data[2])
transp.close()
self.assertEqual(0, proto.returncode)
def test_subprocess_close_client_stream(self):
prog = os.path.join(os.path.dirname(__file__), 'echo3.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
stdin = transp.get_pipe_transport(0)
stdout = transp.get_pipe_transport(1)
stdin.write(b'test')
self.loop.run_until_complete(proto.got_data[1].wait())
self.assertEqual(b'OUT:test', proto.data[1])
stdout.close()
self.loop.run_until_complete(proto.disconnects[1])
stdin.write(b'xxx')
self.loop.run_until_complete(proto.got_data[2].wait())
if sys.platform != 'win32':
self.assertEqual(b'ERR:BrokenPipeError', proto.data[2])
else:
# After closing the read-end of a pipe, writing to the
# write-end using os.write() fails with errno==EINVAL and
# GetLastError()==ERROR_INVALID_NAME on Windows!?! (Using
# WriteFile() we get ERROR_BROKEN_PIPE as expected.)
self.assertEqual(b'ERR:OSError', proto.data[2])
with test_utils.disable_logger():
transp.close()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
def test_subprocess_wait_no_same_group(self):
# start the new process in a new session
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'exit 7', stdin=None, stdout=None, stderr=None,
start_new_session=True)
_, proto = yield self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.completed)
self.assertEqual(7, proto.returncode)
def test_subprocess_exec_invalid_args(self):
@asyncio.coroutine
def connect(**kwds):
yield from self.loop.subprocess_exec(
asyncio.SubprocessProtocol,
'pwd', **kwds)
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(universal_newlines=True))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(bufsize=4096))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(shell=True))
def test_subprocess_shell_invalid_args(self):
@asyncio.coroutine
def connect(cmd=None, **kwds):
if not cmd:
cmd = 'pwd'
yield from self.loop.subprocess_shell(
asyncio.SubprocessProtocol,
cmd, **kwds)
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(['ls', '-l']))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(universal_newlines=True))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(bufsize=4096))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(shell=False))
if sys.platform == 'win32':
class SelectEventLoopTests(EventLoopTestsMixin, test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop()
class ProactorEventLoopTests(EventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.ProactorEventLoop()
if not sslproto._is_sslproto_available():
def test_create_ssl_connection(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl_verify_failed(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl_match_failed(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl_verified(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_legacy_create_ssl_connection(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl_verify_failed(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl_match_failed(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl_verified(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_reader_callback(self):
raise unittest.SkipTest("IocpEventLoop does not have add_reader()")
def test_reader_callback_cancel(self):
raise unittest.SkipTest("IocpEventLoop does not have add_reader()")
def test_writer_callback(self):
raise unittest.SkipTest("IocpEventLoop does not have add_writer()")
def test_writer_callback_cancel(self):
raise unittest.SkipTest("IocpEventLoop does not have add_writer()")
def test_create_datagram_endpoint(self):
raise unittest.SkipTest(
"IocpEventLoop does not have create_datagram_endpoint()")
def test_remove_fds_after_closing(self):
raise unittest.SkipTest("IocpEventLoop does not have add_reader()")
else:
from asyncio import selectors
class UnixEventLoopTestsMixin(EventLoopTestsMixin):
def setUp(self):
super().setUp()
watcher = asyncio.SafeChildWatcher()
watcher.attach_loop(self.loop)
asyncio.set_child_watcher(watcher)
def tearDown(self):
asyncio.set_child_watcher(None)
super().tearDown()
if hasattr(selectors, 'KqueueSelector'):
class KqueueEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(
selectors.KqueueSelector())
# kqueue doesn't support character devices (PTY) on Mac OS X older
# than 10.9 (Maverick)
@support.requires_mac_ver(10, 9)
# Issue #20667: KqueueEventLoopTests.test_read_pty_output()
# hangs on OpenBSD 5.5
@unittest.skipIf(sys.platform.startswith('openbsd'),
'test hangs on OpenBSD')
def test_read_pty_output(self):
super().test_read_pty_output()
# kqueue doesn't support character devices (PTY) on Mac OS X older
# than 10.9 (Maverick)
@support.requires_mac_ver(10, 9)
def test_write_pty(self):
super().test_write_pty()
if hasattr(selectors, 'EpollSelector'):
class EPollEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(selectors.EpollSelector())
if hasattr(selectors, 'PollSelector'):
class PollEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(selectors.PollSelector())
# Should always exist.
class SelectEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(selectors.SelectSelector())
def noop(*args):
pass
class HandleTests(test_utils.TestCase):
def setUp(self):
self.loop = mock.Mock()
self.loop.get_debug.return_value = True
def test_handle(self):
def callback(*args):
return args
args = ()
h = asyncio.Handle(callback, args, self.loop)
self.assertIs(h._callback, callback)
self.assertIs(h._args, args)
self.assertFalse(h._cancelled)
h.cancel()
self.assertTrue(h._cancelled)
def test_handle_from_handle(self):
def callback(*args):
return args
h1 = asyncio.Handle(callback, (), loop=self.loop)
self.assertRaises(
AssertionError, asyncio.Handle, h1, (), self.loop)
def test_callback_with_exception(self):
def callback():
raise ValueError()
self.loop = mock.Mock()
self.loop.call_exception_handler = mock.Mock()
h = asyncio.Handle(callback, (), self.loop)
h._run()
self.loop.call_exception_handler.assert_called_with({
'message': test_utils.MockPattern('Exception in callback.*'),
'exception': mock.ANY,
'handle': h,
'source_traceback': h._source_traceback,
})
def test_handle_weakref(self):
wd = weakref.WeakValueDictionary()
h = asyncio.Handle(lambda: None, (), self.loop)
wd['h'] = h # Would fail without __weakref__ slot.
def test_handle_repr(self):
self.loop.get_debug.return_value = False
# simple function
h = asyncio.Handle(noop, (1, 2), self.loop)
filename, lineno = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<Handle noop(1, 2) at %s:%s>'
% (filename, lineno))
# cancelled handle
h.cancel()
self.assertEqual(repr(h),
'<Handle cancelled>')
# decorated function
cb = asyncio.coroutine(noop)
h = asyncio.Handle(cb, (), self.loop)
self.assertEqual(repr(h),
'<Handle noop() at %s:%s>'
% (filename, lineno))
# partial function
cb = functools.partial(noop, 1, 2)
h = asyncio.Handle(cb, (3,), self.loop)
regex = (r'^<Handle noop\(1, 2\)\(3\) at %s:%s>$'
% (re.escape(filename), lineno))
self.assertRegex(repr(h), regex)
# partial method
if sys.version_info >= (3, 4):
method = HandleTests.test_handle_repr
cb = functools.partialmethod(method)
filename, lineno = test_utils.get_function_source(method)
h = asyncio.Handle(cb, (), self.loop)
cb_regex = r'<function HandleTests.test_handle_repr .*>'
cb_regex = (r'functools.partialmethod\(%s, , \)\(\)' % cb_regex)
regex = (r'^<Handle %s at %s:%s>$'
% (cb_regex, re.escape(filename), lineno))
self.assertRegex(repr(h), regex)
def test_handle_repr_debug(self):
self.loop.get_debug.return_value = True
# simple function
create_filename = __file__
create_lineno = sys._getframe().f_lineno + 1
h = asyncio.Handle(noop, (1, 2), self.loop)
filename, lineno = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<Handle noop(1, 2) at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
# cancelled handle
h.cancel()
self.assertEqual(
repr(h),
'<Handle cancelled noop(1, 2) at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
# double cancellation won't overwrite _repr
h.cancel()
self.assertEqual(
repr(h),
'<Handle cancelled noop(1, 2) at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
def test_handle_source_traceback(self):
loop = asyncio.get_event_loop_policy().new_event_loop()
loop.set_debug(True)
self.set_event_loop(loop)
def check_source_traceback(h):
lineno = sys._getframe(1).f_lineno - 1
self.assertIsInstance(h._source_traceback, list)
self.assertEqual(h._source_traceback[-1][:3],
(__file__,
lineno,
'test_handle_source_traceback'))
# call_soon
h = loop.call_soon(noop)
check_source_traceback(h)
# call_soon_threadsafe
h = loop.call_soon_threadsafe(noop)
check_source_traceback(h)
# call_later
h = loop.call_later(0, noop)
check_source_traceback(h)
# call_at
h = loop.call_later(0, noop)
check_source_traceback(h)
class TimerTests(unittest.TestCase):
def setUp(self):
self.loop = mock.Mock()
def test_hash(self):
when = time.monotonic()
h = asyncio.TimerHandle(when, lambda: False, (),
mock.Mock())
self.assertEqual(hash(h), hash(when))
def test_timer(self):
def callback(*args):
return args
args = (1, 2, 3)
when = time.monotonic()
h = asyncio.TimerHandle(when, callback, args, mock.Mock())
self.assertIs(h._callback, callback)
self.assertIs(h._args, args)
self.assertFalse(h._cancelled)
# cancel
h.cancel()
self.assertTrue(h._cancelled)
self.assertIsNone(h._callback)
self.assertIsNone(h._args)
# when cannot be None
self.assertRaises(AssertionError,
asyncio.TimerHandle, None, callback, args,
self.loop)
def test_timer_repr(self):
self.loop.get_debug.return_value = False
# simple function
h = asyncio.TimerHandle(123, noop, (), self.loop)
src = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<TimerHandle when=123 noop() at %s:%s>' % src)
# cancelled handle
h.cancel()
self.assertEqual(repr(h),
'<TimerHandle cancelled when=123>')
def test_timer_repr_debug(self):
self.loop.get_debug.return_value = True
# simple function
create_filename = __file__
create_lineno = sys._getframe().f_lineno + 1
h = asyncio.TimerHandle(123, noop, (), self.loop)
filename, lineno = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<TimerHandle when=123 noop() '
'at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
# cancelled handle
h.cancel()
self.assertEqual(repr(h),
'<TimerHandle cancelled when=123 noop() '
'at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
def test_timer_comparison(self):
def callback(*args):
return args
when = time.monotonic()
h1 = asyncio.TimerHandle(when, callback, (), self.loop)
h2 = asyncio.TimerHandle(when, callback, (), self.loop)
# TODO: Use assertLess etc.
self.assertFalse(h1 < h2)
self.assertFalse(h2 < h1)
self.assertTrue(h1 <= h2)
self.assertTrue(h2 <= h1)
self.assertFalse(h1 > h2)
self.assertFalse(h2 > h1)
self.assertTrue(h1 >= h2)
self.assertTrue(h2 >= h1)
self.assertTrue(h1 == h2)
self.assertFalse(h1 != h2)
h2.cancel()
self.assertFalse(h1 == h2)
h1 = asyncio.TimerHandle(when, callback, (), self.loop)
h2 = asyncio.TimerHandle(when + 10.0, callback, (), self.loop)
self.assertTrue(h1 < h2)
self.assertFalse(h2 < h1)
self.assertTrue(h1 <= h2)
self.assertFalse(h2 <= h1)
self.assertFalse(h1 > h2)
self.assertTrue(h2 > h1)
self.assertFalse(h1 >= h2)
self.assertTrue(h2 >= h1)
self.assertFalse(h1 == h2)
self.assertTrue(h1 != h2)
h3 = asyncio.Handle(callback, (), self.loop)
self.assertIs(NotImplemented, h1.__eq__(h3))
self.assertIs(NotImplemented, h1.__ne__(h3))
class AbstractEventLoopTests(unittest.TestCase):
def test_not_implemented(self):
f = mock.Mock()
loop = asyncio.AbstractEventLoop()
self.assertRaises(
NotImplementedError, loop.run_forever)
self.assertRaises(
NotImplementedError, loop.run_until_complete, None)
self.assertRaises(
NotImplementedError, loop.stop)
self.assertRaises(
NotImplementedError, loop.is_running)
self.assertRaises(
NotImplementedError, loop.is_closed)
self.assertRaises(
NotImplementedError, loop.close)
self.assertRaises(
NotImplementedError, loop.create_task, None)
self.assertRaises(
NotImplementedError, loop.call_later, None, None)
self.assertRaises(
NotImplementedError, loop.call_at, f, f)
self.assertRaises(
NotImplementedError, loop.call_soon, None)
self.assertRaises(
NotImplementedError, loop.time)
self.assertRaises(
NotImplementedError, loop.call_soon_threadsafe, None)
self.assertRaises(
NotImplementedError, loop.run_in_executor, f, f)
self.assertRaises(
NotImplementedError, loop.set_default_executor, f)
self.assertRaises(
NotImplementedError, loop.getaddrinfo, 'localhost', 8080)
self.assertRaises(
NotImplementedError, loop.getnameinfo, ('localhost', 8080))
self.assertRaises(
NotImplementedError, loop.create_connection, f)
self.assertRaises(
NotImplementedError, loop.create_server, f)
self.assertRaises(
NotImplementedError, loop.create_datagram_endpoint, f)
self.assertRaises(
NotImplementedError, loop.add_reader, 1, f)
self.assertRaises(
NotImplementedError, loop.remove_reader, 1)
self.assertRaises(
NotImplementedError, loop.add_writer, 1, f)
self.assertRaises(
NotImplementedError, loop.remove_writer, 1)
self.assertRaises(
NotImplementedError, loop.sock_recv, f, 10)
self.assertRaises(
NotImplementedError, loop.sock_sendall, f, 10)
self.assertRaises(
NotImplementedError, loop.sock_connect, f, f)
self.assertRaises(
NotImplementedError, loop.sock_accept, f)
self.assertRaises(
NotImplementedError, loop.add_signal_handler, 1, f)
self.assertRaises(
NotImplementedError, loop.remove_signal_handler, 1)
self.assertRaises(
NotImplementedError, loop.remove_signal_handler, 1)
self.assertRaises(
NotImplementedError, loop.connect_read_pipe, f,
mock.sentinel.pipe)
self.assertRaises(
NotImplementedError, loop.connect_write_pipe, f,
mock.sentinel.pipe)
self.assertRaises(
NotImplementedError, loop.subprocess_shell, f,
mock.sentinel)
self.assertRaises(
NotImplementedError, loop.subprocess_exec, f)
self.assertRaises(
NotImplementedError, loop.set_exception_handler, f)
self.assertRaises(
NotImplementedError, loop.default_exception_handler, f)
self.assertRaises(
NotImplementedError, loop.call_exception_handler, f)
self.assertRaises(
NotImplementedError, loop.get_debug)
self.assertRaises(
NotImplementedError, loop.set_debug, f)
class ProtocolsAbsTests(unittest.TestCase):
def test_empty(self):
f = mock.Mock()
p = asyncio.Protocol()
self.assertIsNone(p.connection_made(f))
self.assertIsNone(p.connection_lost(f))
self.assertIsNone(p.data_received(f))
self.assertIsNone(p.eof_received())
dp = asyncio.DatagramProtocol()
self.assertIsNone(dp.connection_made(f))
self.assertIsNone(dp.connection_lost(f))
self.assertIsNone(dp.error_received(f))
self.assertIsNone(dp.datagram_received(f, f))
sp = asyncio.SubprocessProtocol()
self.assertIsNone(sp.connection_made(f))
self.assertIsNone(sp.connection_lost(f))
self.assertIsNone(sp.pipe_data_received(1, f))
self.assertIsNone(sp.pipe_connection_lost(1, f))
self.assertIsNone(sp.process_exited())
class PolicyTests(unittest.TestCase):
def test_event_loop_policy(self):
policy = asyncio.AbstractEventLoopPolicy()
self.assertRaises(NotImplementedError, policy.get_event_loop)
self.assertRaises(NotImplementedError, policy.set_event_loop, object())
self.assertRaises(NotImplementedError, policy.new_event_loop)
self.assertRaises(NotImplementedError, policy.get_child_watcher)
self.assertRaises(NotImplementedError, policy.set_child_watcher,
object())
def test_get_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
self.assertIsNone(policy._local._loop)
loop = policy.get_event_loop()
self.assertIsInstance(loop, asyncio.AbstractEventLoop)
self.assertIs(policy._local._loop, loop)
self.assertIs(loop, policy.get_event_loop())
loop.close()
def test_get_event_loop_calls_set_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
with mock.patch.object(
policy, "set_event_loop",
wraps=policy.set_event_loop) as m_set_event_loop:
loop = policy.get_event_loop()
# policy._local._loop must be set through .set_event_loop()
# (the unix DefaultEventLoopPolicy needs this call to attach
# the child watcher correctly)
m_set_event_loop.assert_called_with(loop)
loop.close()
def test_get_event_loop_after_set_none(self):
policy = asyncio.DefaultEventLoopPolicy()
policy.set_event_loop(None)
self.assertRaises(RuntimeError, policy.get_event_loop)
@mock.patch('asyncio.events.threading.current_thread')
def test_get_event_loop_thread(self, m_current_thread):
def f():
policy = asyncio.DefaultEventLoopPolicy()
self.assertRaises(RuntimeError, policy.get_event_loop)
th = threading.Thread(target=f)
th.start()
th.join()
def test_new_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
loop = policy.new_event_loop()
self.assertIsInstance(loop, asyncio.AbstractEventLoop)
loop.close()
def test_set_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
old_loop = policy.get_event_loop()
self.assertRaises(AssertionError, policy.set_event_loop, object())
loop = policy.new_event_loop()
policy.set_event_loop(loop)
self.assertIs(loop, policy.get_event_loop())
self.assertIsNot(old_loop, policy.get_event_loop())
loop.close()
old_loop.close()
def test_get_event_loop_policy(self):
policy = asyncio.get_event_loop_policy()
self.assertIsInstance(policy, asyncio.AbstractEventLoopPolicy)
self.assertIs(policy, asyncio.get_event_loop_policy())
def test_set_event_loop_policy(self):
self.assertRaises(
AssertionError, asyncio.set_event_loop_policy, object())
old_policy = asyncio.get_event_loop_policy()
policy = asyncio.DefaultEventLoopPolicy()
asyncio.set_event_loop_policy(policy)
self.assertIs(policy, asyncio.get_event_loop_policy())
self.assertIsNot(policy, old_policy)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
arpho/mmasgis5 | mmasgis/albero.py | 1 | 16405 | """
/***************************************************************************
Albero
A QGIS plugin
permette di selezionare rapidamente gli elementi di un progetto qgis tramite una struttura ad albero
-------------------
begin : 2012-04-06
author : (C) 2012 by Giuseppe D'Amico
email : damicogiuseppe77@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# Import the PyQt and QGIS libraries
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from tree_sqlalchemy_class import *
from sqlalchemy import MetaData, Column, Table, ForeignKey
from sqlalchemy import *
from sqlalchemy.orm import backref, mapper, relation, sessionmaker
from tree_sqlalchemy_class import *
import PyQt4
import re
from qgis.core import *
from Tree import *
from progressBar import *
from functools import partial
# Initialize Qt resources from file resources.py
import resources
# Import the code for the dialog
from alberodialog import *
import MySQLdb
class Albero:
def makeList(self):
""""
genera la lista delle selezioni e' invocato direttamente dal metodo run
"""
#ottengo l'elenco dei layers caricati nel progetto
layersmap=QgsMapLayerRegistry.instance().mapLayers()
curLayer = self.iface.mapCanvas().currentLayer()
mc=self.iface.mapCanvas()
#self.log("nLayers test.py 194",self.nLayers)
# rimuovo il layer aggiunto dal plugin openLayers
#non posso cambiare il dict mentre sono in un ciclo
#quindi creo un dict con i soli layers che mi interessano
dummymap={}
print "inizio generazione albero"
for key in layersmap.iterkeys():
if (self.search(str(key),"_g"))or(self.match(str(key),'Cap')):
dummymap[key]=layersmap[key]
#self.log("dummymap={0}".format(dummymap),"203 test.py")
#self.log("layersmap={0}".format(layersmap),"204 test.py")
#self.log("lunghezza dummy={0}".format(len(dummymap)),"lunghezza original={0}".format(len(layersmap)))
for key in dummymap.iterkeys():
# self.log("esamino layers {0}".format(key),"test.py 212")
# self.log("type(key {0}".format(type(key))," ###")
curLayer=layersmap[key]
#print "curLayer {0} for key {1}".format(curLayer,key)
fProvider = curLayer.dataProvider()
myFields = fProvider.fields()
####settrace()()
#self.log("attributi nel layer {0}: {1}".format(curLayer.name(),[f.name() for f in myFields.values()]),"mmasgis.py 245")
# verificato cicla una volta
#aggiungo la lista dei campi selezionati sul layer
chiave=str(key[0:3])
#self.log("full key {0}".format(key),"test.py 214")
#self.log("key troncata test.py 215",key[0:3])
#self.log("chiave convertita test.py 216",self.cons.typeItem[chiave.lower()])
self.ui_tree.addLayer(str(key)[0:3], curLayer)
self.analizeLayers(curLayer,chiave.lower())
####settrace()()
#self.log("nome layer for key={0} in selectionList test.py 161".format(key),self.selectionListget[0])
# self.log("selectionList mmasgist.py 283",self.selectionList.getList())
def analizeLayers(self,curLayer,key,nameLayer):
"""
interroga il layer passato come parametro e ritorna la lista delle selezioni sul layer
@param curLayer: oggetto Layer di Qgis
@param key: String tipo di selezione'reg'/'com'/'pro'
"""
layersName={}
layersName['reg']="delle regioni"
layersName['com']=" dei comuni"
layersName['cap']=" dei cap"
layersName['pro']="delle province"
screen = QtGui.QDesktopWidget().screenGeometry()
size = self.progressBar.geometry()
self.progressBar.setValueLabel("analisi del layer"+layersName[key] +" in corso")
#print "XXXlayer",key
self.progressBar.move((screen.width()/2)-size.width()/2,(screen.height()/2)-size.height())
self.progressBar.show()
#recupero gli id degli items selezionati
featids=curLayer.selectedFeaturesIds()
#self.log( "chiave in getselections={0}".format(key),"test.py 102")
#featids = range(curLayer.featureCount())
fProvider = curLayer.dataProvider()
self.progressBar.setMaximumPartial(fProvider.featureCount())
feat = QgsFeature()
allAttrs = fProvider.attributeIndexes()
fProvider.select(allAttrs)
attrmap=QgsFeature().attributeMap()
c={}# dict delle categorie da settare nel Nodo
c['pro']="provincia"
c['com']='comune'
c['Cap']='Cap'
c['reg']='regione'
adder={}
adder['pro']=self.tree.addProvincia
adder['com']=self.tree.addComune
adder['Cap']=self.tree.addCap
adder['reg']=self.tree.addRegione
fields={}
fields['pro']={'header':lambda x:str(x[2][1]),'father':lambda x:x[0][1].toInt()[0],'Id':lambda x:x[1][1].toInt()[0]}
fields['com']={'header':lambda x: unicode(x[3][1]),'father':lambda x : x[1][1].toInt()[0],'Id':lambda x: unicode( x[3][1])}# la lambda function per lo id dovrebbe essere x[2][1].toInt()[0] ma il father_id del cap e' il nome del comune, quindi lo header del comune e' pure il suo id
fields['Cap']={'header':lambda x: unicode(x[1][1]),'father':lambda x: unicode(x[6][1]),'Id':lambda x: x[0][1].toInt()[0]}
fields['reg']={'header':lambda x:str(x[1][1]),'father':lambda x:0,'Id':lambda x:x[0][1].toInt()[0]}
###settrace()()
n=0
while fProvider.nextFeature(feat):
#esploro gli attributi di feat non sono uguali tra i vari layer
d=[(key1,feat.attributeMap()[key1].toString()) for key1 in feat.attributeMap().iterkeys()]
#print d
cat=str(feat.typeName())[0:3]
header=fields[cat]['header'](d)
father=fields[cat]['father'](d)
Id=fields[cat]['Id'](d)
#self.log("header",header)
#self.log('father',father)
#self.log("id",Id)
nodo=Nodo(header,Id,c[cat],father,feat.id())#(header,Id,cathegory,father)
adder[cat](nodo)
#print "aggiunto nodo per",cat
n=n+1
#print "analisi layer {1} eseguita al {0}% ".format((n/fProvider.featureCount())*100,nameLayer)
self.progressBar.setValueParticular(n)
def match(self,string,pattern):
match=re.compile(pattern)
b=False
if match.match(string):
b=True
return b
"""
@param iface: e' l'oggetto richiesto da qgis per avviare il plugin dell'albero
@param user: oggetto user creato dalla procedura di login contiene le informazioni relative al db attivo
@note: con l'introduzione del secondo parametro la classe Albero non puo' esssere usata per il plugin albero
"""
def __init__(self, iface,user):
# Save reference to the QGIS interface
self.iface = iface
self.message=None
self.tree=Tree(user)
self.message="selected"
self.progressBar=ProgressBarWindow(True,True)# apro la progress bar con due barre
self.progressBar.setMaximumOverall(4)
self.ui_tree=MainWindowAlbero(iface,self.tree)
#self.tree
def testExport(self,L):
f=open("/home/giuseppe/Scrivania/treeNodes.json","w")
#self.t.childrenAdder(self.t.getProvincia(), self.t.getComune())
#f.write("var treeNodes={text:'root','expanded':true,'children'"+":[") #file js
f.write('{"text":"root","expanded":"true","children"'+":[")
l=L[0:1]#prendo un sottoinsieme di regioni
#l.export(f)
for p in l:
p.exportJson(f)
f.write("]}")
def exportDb(self,L):
dbfile='root'+":"+'mmasgisDB'+'@'+'localhost'+":"+'3306'
engine = create_engine('mysql://root:vilu7240@localhost/mmasgisDB?charset=utf8&use_unicode=0', pool_recycle=3600)
connection =MySQLdb.connect(user='root',passwd='vilu7240',db='mmasgisDB',host='localhost')# engine.connect()
cur=connection.cursor()
metadata = MetaData(bind=engine)
Session = sessionmaker(bind=engine)
session=Session()
c=0
for p in L:
print " nodo ",c
c+=1
p.exportDb(session,-1)
session.commit()
def treeMaker(self):
self.tree.resetTree()
layersmap=QgsMapLayerRegistry.instance().mapLayers()
curLayer = self.iface.mapCanvas().currentLayer()
mc=self.iface.mapCanvas()
#self.log("nLayers test.py 194",self.nLayers)
# rimuovo il layer aggiunto dal plugin openLayers
#non posso cambiare il dict mentre sono in un ciclo
#quindi creo un dict con i soli layers che mi interessano
dummymap={}
for key in layersmap.iterkeys():
if (self.search(str(key),"_g"))or(self.match(str(key),'Cap')):
dummymap[key]=layersmap[key]
#self.log("dummymap={0}".format(dummymap),"203 test.py")
#self.log("layersmap={0}".format(layersmap),"204 test.py")
#self.log("lunghezza dummy={0}".format(len(dummymap)),"lunghezza original={0}".format(len(layersmap)))
n=0
for key in dummymap.iterkeys():
self.progressBar.setValueOverall(n)
# self.log("esamino layers {0}".format(key),"test.py 212")
# self.log("type(key {0}".format(type(key))," ###")
curLayer=layersmap[key]
chiave=str(key[0:3])
#aggiungo il layer che verra' usato per selezionare le features sulla mappa
self.ui_tree.addLayer(str(key)[0:3], curLayer)
self.analizeLayers(curLayer,chiave.lower(),str(key))
n+=1
lista_nodi=self.tree.treeBuilder()
#self.testExport(lista_nodi)
#self.exportDb(lista_nodi)
#creo il file javascript per l'albero
for i in lista_nodi:
if self.ui_tree.albero.topLevelItemCount()<20:
self.ui_tree.albero.addTopLevelItem(i.getNode())
#print " nodi dell'albero",self.ui_tree.albero.topLevelItemCount()
self.ui_tree.etichetta.setText("Italia")
screen = QtGui.QDesktopWidget().screenGeometry()
size = self.ui_tree.geometry()
self.ui_tree.move((screen.width()/2)-size.width()*2,(screen.height()/2)-size.width())
self.ui_tree.show()
self.progressBar.close()
def initGui(self):
# Create action that will start plugin configuration
self.action = QAction(QIcon(":/plugins/Albero/icon.png"), \
"visualizzazione ad albero", self.iface.mainWindow())
# connect the action to the run method
QObject.connect(self.action, SIGNAL("triggered()"), self.run)
# Add toolbar button and menu item
self.iface.addToolBarIcon(self.action)
self.iface.addPluginToMenu("&visualizzazione ad albero", self.action)
def search(self,string,pattern):
"""
esegue il metodo search su una stringa:
@param string: stringa da analizzare
@param string: pattern dell' espressione regolare
@return: boolean
"""
match=re.compile(pattern)
b=False
if match.search(string):
b=True
return b
def putNode(self,nodo):
"""inserisce un nodo nella lista di selezionegeo
@param Nodo:
"""
self.ui_tree.selectNode(nodo)
def getSelectedNodes(self,selections):
"""
ritorna la lista dei nodi relativiu alla lista dei feat_id passata
@param [long,string: [(feat_id,layer_alias<'cap'>,<'regione'>,<'comune'>,<'provincia'>)]
@return: [Nodo]
"""
selectedNodes=[]
for i in selections:
print "selezioni",i
nodo=self.tree.fetchNode(i[1], i[0])
selectedNodes.append(nodo)
return selectedNodes
def showNodes(self,selectedNodes):
""" inserisce i nodi presenti nella lista passata in selezionegeo
@param [Nodo]:
"""
for nodo in selectedNodes:
#seleziono il nodo e tutti i suoi discendenti
nodo.setSelected(True,True)
#e rendo visibile il nodo
nodo.setVisible(True)
# visualizzo la finestra delle selezioni geografiche
self.ui_tree.showSelectionGui()
# aggiungo il nodo alla finestra
self.putNode(nodo)
def slotSelectionChanged(self,*a):
""" slot connesso al segnale selectionChanged di qgis
"""
selections=self.getSelectionsByMetadataLayer(a[0],a[1])
selectedNodes=self.getSelectedNodes(selections)
if len(selectedNodes)==0:
self.ui_tree.reset()
self.showNodes(selectedNodes)
def getSelectionsByMetadataLayer(self, *a):
"""ritorna la lista delle features selezionate sul layer di cui vengono passati i metadata
@param *string:(string,qgsVectorLayer): self.message, QGSVectorLayer
@return: [(long,string)]: [(feat_id,layer_alias<'cap'>,<'regione'>,<'comune'>,<'provincia'>)]
"""
#####settrace()()
#print "argomenti"+str(a)
#print "metadata"#+()
#print self.retrieveLayer(a[1].metadata())
#print "selected Ids",
#print self.message+str(self.n)
metadata=a[1].name()
layer=self.retrieveLayer(metadata)
selectedFeatures=a[1].selectedFeaturesIds()
selectedNodes=[]
for i in selectedFeatures:
#nodo=self.tree.fetchNode(layer, i)
#nodo.setSelected(True,True)
#nodo.setVisible(True)
#print "e' stata cliccato",str(nodo)
selectedNodes.append((i,layer))
return selectedNodes
"""
for n in selectedNodes:
#print "selezione "+str(n)+"\n"
#aggiungo il nodo a selectionGui
self.ui_tree.selectNode(n)
"""
#self.dlg.listWidget.addItem(item)
def retrieveLayer(self,metadata):
"""
determina il nome del layer dai suoi metadata
@param string:metadata del layer
@return: string sigla layer:<'cap',<'pro'>,<'reg'>,<'com'>
"""
layersTest=[]
layersTest.append(('regione','reg'))
layersTest.append(('provincia','prov'))
layersTest.append(('comune','comuni'))
layersTest.append(('cap','CAP'))
for i in layersTest:
if self.search(metadata, i[1]):
return i[0]
def selectedChanged(self,t):
#self.ui_tree.etichetta.setText(self.getLayer())
layersmap=QgsMapLayerRegistry.instance().mapLayers()
curLayer = curLayer=layersmap[layersmap.keys()[0]]#self.iface.mapCanvas().currentLayer()
layersmap=QgsMapLayerRegistry.instance().mapLayers()
fProvider = curLayer.dataProvider()
myFields = fProvider.fields()
featids=curLayer.selectedFeaturesIds()
#self.log( "chiave in getselections={0}".format(key),"test.py 102")
#featids = range(curLayer.featureCount())
feat = QgsFeature()
def unload(self):
# Remove the plugin menu item and icon
self.iface.removePluginMenu("&visualizzazione ad albero",self.action)
self.iface.removeToolBarIcon(self.action)
# run method that performs all the real work
#if map
def getLayer(self):
""" ritorna la sigla del layer corrente
@return: string <'reg'>,<'pro'>,<'cap'>,<'com'>
"""
layersmap=QgsMapLayerRegistry.instance().mapLayers()
curLayer= self.iface.mapCanvas().currentLayer()#layersmap[layersmap.keys()[0]]
if curLayer is None:
curLayer=layersmap[layersmap.keys()[0]]
fProvider = curLayer.dataProvider()
fProvider = curLayer.dataProvider()
fieldMap=fProvider.fieldNameMap()
layers={}
regione=PyQt4.QtCore.QString(unicode('NOME_REG'))#1
provincia= PyQt4.QtCore.QString(unicode('SIGLA_PRO'))#5
comune=PyQt4.QtCore.QString(unicode('NOME_COM'))# 6
cap=PyQt4.QtCore.QString(unicode('nome1'))#2
layers[regione]=(1,"reg")#(valore della chiave nella maplist del layer,sigla del layer
layers[provincia]=(5,"pro")
layers[comune]=(3,"com")
layers[cap]=(2,"Cap")
for k in layers.iterkeys():
if fieldMap.has_key(k):
print "chiave presente {0}".format(k)
if layers[k][0]==fieldMap[k]:
return layers[k][1]
def findAllSelections(self):
""" ritorna le selezioni effettuate su ogni layer
@param [(long,string)]: [(feat_id,layer_alias<'cap'>,<'regione'>,<'comune'>,<'provincia'>)]
"""
selections=[]
#ottengo l'elenco dei layers caricati nel progetto
layersmap=QgsMapLayerRegistry.instance().mapLayers()
for key in layersmap.iterkeys():
curLayer=layersmap[key]
#settrace()()
selections+=self.getSelectionsByMetadataLayer("",curLayer)
return selections
def run(self):
#self.message="selected"+" "+self.getLayer()
#wrapped_slot = partial(self.selectedChanged, self.message)
#self.iface.mapCanvas().getSelectionsByMetadataLayer.connect(wrapped_slot)
self.ui_tree.reset()
self.treeMaker()
# create and show the dialog
wrapped_slot = partial(self.slotSelectionChanged, self.message)
self.iface.mapCanvas().selectionChanged.connect(wrapped_slot)
# cerco le selezioni su tutti i layers
selections= self.findAllSelections()
# visualizzo e selziono i nodi
nodes=self.getSelectedNodes(selections)
self.showNodes(nodes)
#self.makeList()
# show the dialog
#dlg.show()
#result = dlg.exec_()
# See if OK was pressed
#if result == 1:
# do something useful (delete the line containing pass and
# substitute with your code
# pass
| mit |
wweiradio/django | django/contrib/gis/gdal/driver.py | 526 | 3260 | from ctypes import c_void_p
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.prototypes import ds as vcapi, raster as rcapi
from django.utils import six
from django.utils.encoding import force_bytes, force_text
class Driver(GDALBase):
"""
Wraps a GDAL/OGR Data Source Driver.
For more information, see the C API source code:
http://www.gdal.org/gdal_8h.html - http://www.gdal.org/ogr__api_8h.html
"""
# Case-insensitive aliases for some GDAL/OGR Drivers.
# For a complete list of original driver names see
# http://www.gdal.org/ogr_formats.html (vector)
# http://www.gdal.org/formats_list.html (raster)
_alias = {
# vector
'esri': 'ESRI Shapefile',
'shp': 'ESRI Shapefile',
'shape': 'ESRI Shapefile',
'tiger': 'TIGER',
'tiger/line': 'TIGER',
# raster
'tiff': 'GTiff',
'tif': 'GTiff',
'jpeg': 'JPEG',
'jpg': 'JPEG',
}
def __init__(self, dr_input):
"""
Initializes an GDAL/OGR driver on either a string or integer input.
"""
if isinstance(dr_input, six.string_types):
# If a string name of the driver was passed in
self.ensure_registered()
# Checking the alias dictionary (case-insensitive) to see if an
# alias exists for the given driver.
if dr_input.lower() in self._alias:
name = self._alias[dr_input.lower()]
else:
name = dr_input
# Attempting to get the GDAL/OGR driver by the string name.
for iface in (vcapi, rcapi):
driver = iface.get_driver_by_name(force_bytes(name))
if driver:
break
elif isinstance(dr_input, int):
self.ensure_registered()
for iface in (vcapi, rcapi):
driver = iface.get_driver(dr_input)
if driver:
break
elif isinstance(dr_input, c_void_p):
driver = dr_input
else:
raise GDALException('Unrecognized input type for GDAL/OGR Driver: %s' % str(type(dr_input)))
# Making sure we get a valid pointer to the OGR Driver
if not driver:
raise GDALException('Could not initialize GDAL/OGR Driver on input: %s' % str(dr_input))
self.ptr = driver
def __str__(self):
return self.name
@classmethod
def ensure_registered(cls):
"""
Attempts to register all the data source drivers.
"""
# Only register all if the driver count is 0 (or else all drivers
# will be registered over and over again)
if not cls.driver_count():
vcapi.register_all()
rcapi.register_all()
@classmethod
def driver_count(cls):
"""
Returns the number of GDAL/OGR data source drivers registered.
"""
return vcapi.get_driver_count() + rcapi.get_driver_count()
@property
def name(self):
"""
Returns description/name string for this driver.
"""
return force_text(rcapi.get_driver_description(self.ptr))
| bsd-3-clause |
saurabh6790/omn-app | accounts/doctype/gl_entry/gl_entry.py | 14 | 6905 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import flt, fmt_money, getdate
from webnotes import _
class DocType:
def __init__(self,d,dl):
self.doc, self.doclist = d, dl
def validate(self):
self.check_mandatory()
self.pl_must_have_cost_center()
self.validate_posting_date()
self.check_pl_account()
self.validate_cost_center()
def on_update_with_args(self, adv_adj, update_outstanding = 'Yes'):
self.validate_account_details(adv_adj)
validate_frozen_account(self.doc.account, adv_adj)
check_freezing_date(self.doc.posting_date, adv_adj)
check_negative_balance(self.doc.account, adv_adj)
# Update outstanding amt on against voucher
if self.doc.against_voucher and self.doc.against_voucher_type != "POS" \
and update_outstanding == 'Yes':
update_outstanding_amt(self.doc.account, self.doc.against_voucher_type,
self.doc.against_voucher)
def check_mandatory(self):
mandatory = ['account','remarks','voucher_type','voucher_no','fiscal_year','company']
for k in mandatory:
if not self.doc.fields.get(k):
webnotes.throw(k + _(" is mandatory for GL Entry"))
# Zero value transaction is not allowed
if not (flt(self.doc.debit) or flt(self.doc.credit)):
webnotes.throw(_("GL Entry: Debit or Credit amount is mandatory for ") +
self.doc.account)
def pl_must_have_cost_center(self):
if webnotes.conn.get_value("Account", self.doc.account, "is_pl_account") == "Yes":
if not self.doc.cost_center and self.doc.voucher_type != 'Period Closing Voucher':
webnotes.throw(_("Cost Center must be specified for PL Account: ") +
self.doc.account)
elif self.doc.cost_center:
self.doc.cost_center = None
def validate_posting_date(self):
from accounts.utils import validate_fiscal_year
validate_fiscal_year(self.doc.posting_date, self.doc.fiscal_year, "Posting Date")
def check_pl_account(self):
if self.doc.is_opening=='Yes' and \
webnotes.conn.get_value("Account", self.doc.account, "is_pl_account") == "Yes":
webnotes.throw(_("For opening balance entry account can not be a PL account"))
def validate_account_details(self, adv_adj):
"""Account must be ledger, active and not freezed"""
ret = webnotes.conn.sql("""select group_or_ledger, docstatus, company
from tabAccount where name=%s""", self.doc.account, as_dict=1)[0]
if ret.group_or_ledger=='Group':
webnotes.throw(_("Account") + ": " + self.doc.account + _(" is not a ledger"))
if ret.docstatus==2:
webnotes.throw(_("Account") + ": " + self.doc.account + _(" is not active"))
if ret.company != self.doc.company:
webnotes.throw(_("Account") + ": " + self.doc.account +
_(" does not belong to the company") + ": " + self.doc.company)
def validate_cost_center(self):
if not hasattr(self, "cost_center_company"):
self.cost_center_company = {}
def _get_cost_center_company():
if not self.cost_center_company.get(self.doc.cost_center):
self.cost_center_company[self.doc.cost_center] = webnotes.conn.get_value(
"Cost Center", self.doc.cost_center, "company")
return self.cost_center_company[self.doc.cost_center]
if self.doc.cost_center and _get_cost_center_company() != self.doc.company:
webnotes.throw(_("Cost Center") + ": " + self.doc.cost_center +
_(" does not belong to the company") + ": " + self.doc.company)
def check_negative_balance(account, adv_adj=False):
if not adv_adj and account:
account_details = webnotes.conn.get_value("Account", account,
["allow_negative_balance", "debit_or_credit"], as_dict=True)
if not account_details["allow_negative_balance"]:
balance = webnotes.conn.sql("""select sum(debit) - sum(credit) from `tabGL Entry`
where account = %s""", account)
balance = account_details["debit_or_credit"] == "Debit" and \
flt(balance[0][0]) or -1*flt(balance[0][0])
if flt(balance) < 0:
webnotes.throw(_("Negative balance is not allowed for account ") + account)
def check_freezing_date(posting_date, adv_adj=False):
"""
Nobody can do GL Entries where posting date is before freezing date
except authorized person
"""
if not adv_adj:
acc_frozen_upto = webnotes.conn.get_value('Accounts Settings', None, 'acc_frozen_upto')
if acc_frozen_upto:
bde_auth_role = webnotes.conn.get_value( 'Accounts Settings', None,'bde_auth_role')
if getdate(posting_date) <= getdate(acc_frozen_upto) \
and not bde_auth_role in webnotes.user.get_roles():
webnotes.throw(_("You are not authorized to do/modify back dated entries before ")
+ getdate(acc_frozen_upto).strftime('%d-%m-%Y'))
def update_outstanding_amt(account, against_voucher_type, against_voucher, on_cancel=False):
# get final outstanding amt
bal = flt(webnotes.conn.sql("""select sum(ifnull(debit, 0)) - sum(ifnull(credit, 0))
from `tabGL Entry`
where against_voucher_type=%s and against_voucher=%s and account = %s""",
(against_voucher_type, against_voucher, account))[0][0] or 0.0)
if against_voucher_type == 'Purchase Invoice':
bal = -bal
elif against_voucher_type == "Journal Voucher":
against_voucher_amount = flt(webnotes.conn.sql("""
select sum(ifnull(debit, 0)) - sum(ifnull(credit, 0))
from `tabGL Entry` where voucher_type = 'Journal Voucher' and voucher_no = %s
and account = %s and ifnull(against_voucher, '') = ''""",
(against_voucher, account))[0][0])
bal = against_voucher_amount + bal
if against_voucher_amount < 0:
bal = -bal
# Validation : Outstanding can not be negative
if bal < 0 and not on_cancel:
webnotes.throw(_("Outstanding for Voucher ") + against_voucher + _(" will become ") +
fmt_money(bal) + _(". Outstanding cannot be less than zero. \
Please match exact outstanding."))
# Update outstanding amt on against voucher
if against_voucher_type in ["Sales Invoice", "Purchase Invoice"]:
webnotes.conn.sql("update `tab%s` set outstanding_amount=%s where name='%s'" %
(against_voucher_type, bal, against_voucher))
def validate_frozen_account(account, adv_adj=None):
frozen_account = webnotes.conn.get_value("Account", account, "freeze_account")
if frozen_account == 'Yes' and not adv_adj:
frozen_accounts_modifier = webnotes.conn.get_value( 'Accounts Settings', None,
'frozen_accounts_modifier')
if not frozen_accounts_modifier:
webnotes.throw(account + _(" is a frozen account. \
Either make the account active or assign role in Accounts Settings \
who can create / modify entries against this account"))
elif frozen_accounts_modifier not in webnotes.user.get_roles():
webnotes.throw(account + _(" is a frozen account. ") +
_("To create / edit transactions against this account, you need role") + ": " +
frozen_accounts_modifier) | agpl-3.0 |
eruffaldi/protosim | protorec/protocols.py | 1 | 19699 | import struct
import array
class proto_env:
id = 0
name = "env"
size = 4
bytesize = 33
hash = "a7c9bdbae9b7c692af5d65a935fa8c78"
meta = (("time",1,"double"),("camera",3,"double"),)
def __init__(self):
self.time = 0.0;
self.camera = array.array('d',[0 for i in range(0,3)]);
def decode(self,data):
if len(data) < 32:
return False
self.time = struct.unpack('d',data[0:8])[0]
self.camera = struct.unpack('3d',data[8:32])
return True
def encode(self,io):
io.write(struct.pack('d',self.time))
io.write(struct.pack('3d',*self.camera))
return True
class proto_boat:
id = 1
name = "boat"
size = 5
bytesize = 41
hash = "87aef19be1d945eb6ea21e42b88bdac2"
meta = (("boatid",1,"double"),("position",3,"double"),("psi",1,"double"),)
def __init__(self):
self.boatid = 0.0;
self.position = array.array('d',[0 for i in range(0,3)]);
self.psi = 0.0;
def decode(self,data):
if len(data) < 40:
return False
self.boatid = struct.unpack('d',data[0:8])[0]
self.position = struct.unpack('3d',data[8:32])
self.psi = struct.unpack('d',data[32:40])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.boatid))
io.write(struct.pack('3d',*self.position))
io.write(struct.pack('d',self.psi))
return True
class proto_avatar:
id = 2
name = "avatar"
size = 6
bytesize = 49
hash = "2fcb99f1cb7c3d7f24fd4fe5f4e4089b"
meta = (("avatarid",1,"double"),("alphas",2,"double"),("phis",2,"double"),("seat",1,"double"),)
def __init__(self):
self.avatarid = 0.0;
self.alphas = array.array('d',[0 for i in range(0,2)]);
self.phis = array.array('d',[0 for i in range(0,2)]);
self.seat = 0.0;
def decode(self,data):
if len(data) < 48:
return False
self.avatarid = struct.unpack('d',data[0:8])[0]
self.alphas = struct.unpack('2d',data[8:24])
self.phis = struct.unpack('2d',data[24:40])
self.seat = struct.unpack('d',data[40:48])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.avatarid))
io.write(struct.pack('2d',*self.alphas))
io.write(struct.pack('2d',*self.phis))
io.write(struct.pack('d',self.seat))
return True
class proto_visibility:
id = 3
name = "visibility"
size = 2
bytesize = 17
hash = "347aab2d90fdbcae3513fc823e5dbe28"
meta = (("entityid",1,"double"),("show",1,"double"),)
def __init__(self):
self.entityid = 0.0;
self.show = 0.0;
def decode(self,data):
if len(data) < 16:
return False
self.entityid = struct.unpack('d',data[0:8])[0]
self.show = struct.unpack('d',data[8:16])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.entityid))
io.write(struct.pack('d',self.show))
return True
class proto_user_perf:
id = 4
name = "user_perf"
size = 12
bytesize = 97
hash = "5f99d875dcad77874748a4feab4a0b94"
meta = (("avatarid",1,"double"),("tpass",1,"double"),("mpass",1,"double"),("trip",1,"double"),("mrip",1,"double"),("strokes",1,"double"),("tin",1,"double"),("tout",1,"double"),("vmean",1,"double"),("work",1,"double"),("efficiency",1,"double"),("powerout",1,"double"),)
def __init__(self):
self.avatarid = 0.0;
self.tpass = 0.0;
self.mpass = 0.0;
self.trip = 0.0;
self.mrip = 0.0;
self.strokes = 0.0;
self.tin = 0.0;
self.tout = 0.0;
self.vmean = 0.0;
self.work = 0.0;
self.efficiency = 0.0;
self.powerout = 0.0;
def decode(self,data):
if len(data) < 96:
return False
self.avatarid = struct.unpack('d',data[0:8])[0]
self.tpass = struct.unpack('d',data[8:16])[0]
self.mpass = struct.unpack('d',data[16:24])[0]
self.trip = struct.unpack('d',data[24:32])[0]
self.mrip = struct.unpack('d',data[32:40])[0]
self.strokes = struct.unpack('d',data[40:48])[0]
self.tin = struct.unpack('d',data[48:56])[0]
self.tout = struct.unpack('d',data[56:64])[0]
self.vmean = struct.unpack('d',data[64:72])[0]
self.work = struct.unpack('d',data[72:80])[0]
self.efficiency = struct.unpack('d',data[80:88])[0]
self.powerout = struct.unpack('d',data[88:96])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.avatarid))
io.write(struct.pack('d',self.tpass))
io.write(struct.pack('d',self.mpass))
io.write(struct.pack('d',self.trip))
io.write(struct.pack('d',self.mrip))
io.write(struct.pack('d',self.strokes))
io.write(struct.pack('d',self.tin))
io.write(struct.pack('d',self.tout))
io.write(struct.pack('d',self.vmean))
io.write(struct.pack('d',self.work))
io.write(struct.pack('d',self.efficiency))
io.write(struct.pack('d',self.powerout))
return True
class proto_task:
id = 5
name = "task"
size = 1
bytesize = 9
hash = "3993512e4594c79305ef1397b145b569"
meta = (("distance",1,"double"),)
def __init__(self):
self.distance = 0.0;
def decode(self,data):
if len(data) < 8:
return False
self.distance = struct.unpack('d',data[0:8])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.distance))
return True
class proto_multi2:
id = 6
name = "multi2"
size = 6
bytesize = 49
hash = "fd979ae08bdb493287eeb0e19f503d44"
meta = (("plotstat",1,"double"),("plotdyn",1,"double"),("resstatus",1,"double"),("xplotstat",1,"double"),("xplotdyn",1,"double"),("visualfeed",1,"double"),)
def __init__(self):
self.plotstat = 0.0;
self.plotdyn = 0.0;
self.resstatus = 0.0;
self.xplotstat = 0.0;
self.xplotdyn = 0.0;
self.visualfeed = 0.0;
def decode(self,data):
if len(data) < 48:
return False
self.plotstat = struct.unpack('d',data[0:8])[0]
self.plotdyn = struct.unpack('d',data[8:16])[0]
self.resstatus = struct.unpack('d',data[16:24])[0]
self.xplotstat = struct.unpack('d',data[24:32])[0]
self.xplotdyn = struct.unpack('d',data[32:40])[0]
self.visualfeed = struct.unpack('d',data[40:48])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.plotstat))
io.write(struct.pack('d',self.plotdyn))
io.write(struct.pack('d',self.resstatus))
io.write(struct.pack('d',self.xplotstat))
io.write(struct.pack('d',self.xplotdyn))
io.write(struct.pack('d',self.visualfeed))
return True
class proto_widget:
id = 7
name = "widget"
size = 2
bytesize = 17
hash = "e4d4979b13b3cb0ebe20ff62663c2228"
meta = (("widgetid",1,"double"),("visible",1,"double"),)
def __init__(self):
self.widgetid = 0.0;
self.visible = 0.0;
def decode(self,data):
if len(data) < 16:
return False
self.widgetid = struct.unpack('d',data[0:8])[0]
self.visible = struct.unpack('d',data[8:16])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.widgetid))
io.write(struct.pack('d',self.visible))
return True
class proto_cameramode:
id = 8
name = "cameramode"
size = 2
bytesize = 17
hash = "6436d9c7db23a2264cba398af7b0017c"
meta = (("mode",1,"double"),("entityid",1,"double"),)
def __init__(self):
self.mode = 0.0;
self.entityid = 0.0;
def decode(self,data):
if len(data) < 16:
return False
self.mode = struct.unpack('d',data[0:8])[0]
self.entityid = struct.unpack('d',data[8:16])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.mode))
io.write(struct.pack('d',self.entityid))
return True
class proto_cameraset:
id = 9
name = "cameraset"
size = 7
bytesize = 57
hash = "62a457a83c57256c4329084b1bb6bd64"
meta = (("mode",1,"double"),("position",3,"double"),("direction",3,"double"),)
def __init__(self):
self.mode = 0.0;
self.position = array.array('d',[0 for i in range(0,3)]);
self.direction = array.array('d',[0 for i in range(0,3)]);
def decode(self,data):
if len(data) < 56:
return False
self.mode = struct.unpack('d',data[0:8])[0]
self.position = struct.unpack('3d',data[8:32])
self.direction = struct.unpack('3d',data[32:56])
return True
def encode(self,io):
io.write(struct.pack('d',self.mode))
io.write(struct.pack('3d',*self.position))
io.write(struct.pack('3d',*self.direction))
return True
class proto_subskills:
id = 10
name = "subskills"
size = 11
bytesize = 89
hash = "3cb1f1630664490e0afa47ced8429666"
meta = (("procedural",6,"double"),("PDC",5,"double"),)
def __init__(self):
self.procedural = array.array('d',[0 for i in range(0,6)]);
self.PDC = array.array('d',[0 for i in range(0,5)]);
def decode(self,data):
if len(data) < 88:
return False
self.procedural = struct.unpack('6d',data[0:48])
self.PDC = struct.unpack('5d',data[48:88])
return True
def encode(self,io):
io.write(struct.pack('6d',*self.procedural))
io.write(struct.pack('5d',*self.PDC))
return True
class proto_energy_measures:
id = 11
name = "energy_measures"
size = 4
bytesize = 33
hash = "3dee8f7f266a54c1d12bf15574fe7325"
meta = (("energy_measures",4,"double"),)
def __init__(self):
self.energy_measures = array.array('d',[0 for i in range(0,4)]);
def decode(self,data):
if len(data) < 32:
return False
self.energy_measures = struct.unpack('4d',data[0:32])
return True
def encode(self,io):
io.write(struct.pack('4d',*self.energy_measures))
return True
class proto_phases_times:
id = 12
name = "phases_times"
size = 9
bytesize = 73
hash = "6ee2b520cfdd6da26be89bb96b618c87"
meta = (("phases_times",8,"double"),("drive_rec_ratio",1,"double"),)
def __init__(self):
self.phases_times = array.array('d',[0 for i in range(0,8)]);
self.drive_rec_ratio = 0.0;
def decode(self,data):
if len(data) < 72:
return False
self.phases_times = struct.unpack('8d',data[0:64])
self.drive_rec_ratio = struct.unpack('d',data[64:72])[0]
return True
def encode(self,io):
io.write(struct.pack('8d',*self.phases_times))
io.write(struct.pack('d',self.drive_rec_ratio))
return True
class proto_energy2:
id = 13
name = "energy2"
size = 10
bytesize = 81
hash = "038f889d0cd84b1ace00a2914eb42ef5"
meta = (("PO_err",1,"double"),("PO_tol",1,"double"),("opp_dist",1,"double"),("target_dist",1,"double"),("hull_pos_in",1,"double"),("WU_flag",1,"double"),("VO2_flag",1,"double"),("arrow_flag",1,"double"),("race_flag",1,"double"),("opp_flag",1,"double"),)
def __init__(self):
self.PO_err = 0.0;
self.PO_tol = 0.0;
self.opp_dist = 0.0;
self.target_dist = 0.0;
self.hull_pos_in = 0.0;
self.WU_flag = 0.0;
self.VO2_flag = 0.0;
self.arrow_flag = 0.0;
self.race_flag = 0.0;
self.opp_flag = 0.0;
def decode(self,data):
if len(data) < 80:
return False
self.PO_err = struct.unpack('d',data[0:8])[0]
self.PO_tol = struct.unpack('d',data[8:16])[0]
self.opp_dist = struct.unpack('d',data[16:24])[0]
self.target_dist = struct.unpack('d',data[24:32])[0]
self.hull_pos_in = struct.unpack('d',data[32:40])[0]
self.WU_flag = struct.unpack('d',data[40:48])[0]
self.VO2_flag = struct.unpack('d',data[48:56])[0]
self.arrow_flag = struct.unpack('d',data[56:64])[0]
self.race_flag = struct.unpack('d',data[64:72])[0]
self.opp_flag = struct.unpack('d',data[72:80])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.PO_err))
io.write(struct.pack('d',self.PO_tol))
io.write(struct.pack('d',self.opp_dist))
io.write(struct.pack('d',self.target_dist))
io.write(struct.pack('d',self.hull_pos_in))
io.write(struct.pack('d',self.WU_flag))
io.write(struct.pack('d',self.VO2_flag))
io.write(struct.pack('d',self.arrow_flag))
io.write(struct.pack('d',self.race_flag))
io.write(struct.pack('d',self.opp_flag))
return True
class proto_techexp:
id = 14
name = "techexp"
size = 12
bytesize = 97
hash = "16bb28136d6dd73da800c44ef81996fa"
meta = (("scenery_flag",1,"double"),("cube_flag",1,"double"),("VisFb_flag",1,"double"),("point_ev",8,"double"),("global_ev",1,"double"),)
def __init__(self):
self.scenery_flag = 0.0;
self.cube_flag = 0.0;
self.VisFb_flag = 0.0;
self.point_ev = array.array('d',[0 for i in range(0,8)]);
self.global_ev = 0.0;
def decode(self,data):
if len(data) < 96:
return False
self.scenery_flag = struct.unpack('d',data[0:8])[0]
self.cube_flag = struct.unpack('d',data[8:16])[0]
self.VisFb_flag = struct.unpack('d',data[16:24])[0]
self.point_ev = struct.unpack('8d',data[24:88])
self.global_ev = struct.unpack('d',data[88:96])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.scenery_flag))
io.write(struct.pack('d',self.cube_flag))
io.write(struct.pack('d',self.VisFb_flag))
io.write(struct.pack('8d',*self.point_ev))
io.write(struct.pack('d',self.global_ev))
return True
class proto_coord3:
id = 15
name = "coord3"
size = 2
bytesize = 17
hash = "55d8a8ec295e0ac413d8c2e7ca8d1f91"
meta = (("error",1,"double"),("Fb_flag",1,"double"),)
def __init__(self):
self.error = 0.0;
self.Fb_flag = 0.0;
def decode(self,data):
if len(data) < 16:
return False
self.error = struct.unpack('d',data[0:8])[0]
self.Fb_flag = struct.unpack('d',data[8:16])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.error))
io.write(struct.pack('d',self.Fb_flag))
return True
class proto_avatarcolor:
id = 16
name = "avatarcolor"
size = 5
bytesize = 41
hash = "e06e3fce2783c38b4f20b5369a680d54"
meta = (("avatarid",1,"double"),("color",4,"double"),)
def __init__(self):
self.avatarid = 0.0;
self.color = array.array('d',[0 for i in range(0,4)]);
def decode(self,data):
if len(data) < 40:
return False
self.avatarid = struct.unpack('d',data[0:8])[0]
self.color = struct.unpack('4d',data[8:40])
return True
def encode(self,io):
io.write(struct.pack('d',self.avatarid))
io.write(struct.pack('4d',*self.color))
return True
class proto_avatarext:
id = 17
name = "avatarext"
size = 3
bytesize = 25
hash = "853d468c57ee9e4a62caadc9acbad829"
meta = (("avatarid",1,"double"),("backangle",1,"double"),("headangle",1,"double"),)
def __init__(self):
self.avatarid = 0.0;
self.backangle = 0.0;
self.headangle = 0.0;
def decode(self,data):
if len(data) < 24:
return False
self.avatarid = struct.unpack('d',data[0:8])[0]
self.backangle = struct.unpack('d',data[8:16])[0]
self.headangle = struct.unpack('d',data[16:24])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.avatarid))
io.write(struct.pack('d',self.backangle))
io.write(struct.pack('d',self.headangle))
return True
class proto_avatarshirt:
id = 18
name = "avatarshirt"
size = 2
bytesize = 17
hash = "ea2e3cc921adfaa0551d98c4728b3e7e"
meta = (("avatarid",1,"double"),("shirt",1,"double"),)
def __init__(self):
self.avatarid = 0.0;
self.shirt = 0.0;
def decode(self,data):
if len(data) < 16:
return False
self.avatarid = struct.unpack('d',data[0:8])[0]
self.shirt = struct.unpack('d',data[8:16])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.avatarid))
io.write(struct.pack('d',self.shirt))
return True
class proto_rowconfig:
id = 19
name = "rowconfig"
size = 2
bytesize = 17
hash = "bf751387aef6d5fb50374ddab3dacee7"
meta = (("boats",1,"double"),("crew",1,"double"),)
def __init__(self):
self.boats = 0.0;
self.crew = 0.0;
def decode(self,data):
if len(data) < 16:
return False
self.boats = struct.unpack('d',data[0:8])[0]
self.crew = struct.unpack('d',data[8:16])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.boats))
io.write(struct.pack('d',self.crew))
return True
class proto_sprintdata:
id = 20
name = "sprintdata"
size = 8
bytesize = 65
hash = "3cc5cb554740e50e6c97af3ab00fa63a"
meta = (("time",1,"double"),("left_alpha_phi_fan",3,"double"),("right_alpha_phi_fan",3,"double"),("seat",1,"double"),)
def __init__(self):
self.time = 0.0;
self.left_alpha_phi_fan = array.array('d',[0 for i in range(0,3)]);
self.right_alpha_phi_fan = array.array('d',[0 for i in range(0,3)]);
self.seat = 0.0;
def decode(self,data):
if len(data) < 64:
return False
self.time = struct.unpack('d',data[0:8])[0]
self.left_alpha_phi_fan = struct.unpack('3d',data[8:32])
self.right_alpha_phi_fan = struct.unpack('3d',data[32:56])
self.seat = struct.unpack('d',data[56:64])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.time))
io.write(struct.pack('3d',*self.left_alpha_phi_fan))
io.write(struct.pack('3d',*self.right_alpha_phi_fan))
io.write(struct.pack('d',self.seat))
return True
class proto_audio:
id = 21
name = "audio"
size = 4
bytesize = 33
hash = "70877dc8e6924b74d77a0c046e3e15a2"
meta = (("time",1,"double"),("strokes_min",1,"double"),("phase",1,"double"),("volume",1,"double"),)
def __init__(self):
self.time = 0.0;
self.strokes_min = 0.0;
self.phase = 0.0;
self.volume = 0.0;
def decode(self,data):
if len(data) < 32:
return False
self.time = struct.unpack('d',data[0:8])[0]
self.strokes_min = struct.unpack('d',data[8:16])[0]
self.phase = struct.unpack('d',data[16:24])[0]
self.volume = struct.unpack('d',data[24:32])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.time))
io.write(struct.pack('d',self.strokes_min))
io.write(struct.pack('d',self.phase))
io.write(struct.pack('d',self.volume))
return True
class proto_vibrorun:
id = 22
name = "vibrorun"
size = 2
bytesize = 17
hash = "6f55ff4e1d316430e613565fa626fa1a"
meta = (("device",1,"double"),("activemotors",1,"double"),)
def __init__(self):
self.device = 0.0;
self.activemotors = 0.0;
def decode(self,data):
if len(data) < 16:
return False
self.device = struct.unpack('d',data[0:8])[0]
self.activemotors = struct.unpack('d',data[8:16])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.device))
io.write(struct.pack('d',self.activemotors))
return True
class proto_vibrosetup:
id = 23
name = "vibrosetup"
size = 5
bytesize = 41
hash = "599408ef77c229cc460666aa28b4d57e"
meta = (("device",1,"double"),("motor",1,"double"),("frequency_Hz",1,"double"),("offset_0_100",1,"double"),("dutycycle_0_100",1,"double"),)
def __init__(self):
self.device = 0.0;
self.motor = 0.0;
self.frequency_Hz = 0.0;
self.offset_0_100 = 0.0;
self.dutycycle_0_100 = 0.0;
def decode(self,data):
if len(data) < 40:
return False
self.device = struct.unpack('d',data[0:8])[0]
self.motor = struct.unpack('d',data[8:16])[0]
self.frequency_Hz = struct.unpack('d',data[16:24])[0]
self.offset_0_100 = struct.unpack('d',data[24:32])[0]
self.dutycycle_0_100 = struct.unpack('d',data[32:40])[0]
return True
def encode(self,io):
io.write(struct.pack('d',self.device))
io.write(struct.pack('d',self.motor))
io.write(struct.pack('d',self.frequency_Hz))
io.write(struct.pack('d',self.offset_0_100))
io.write(struct.pack('d',self.dutycycle_0_100))
return True
class proto_start:
id = 24
name = "start"
size = 42
bytesize = 337
hash = "450c95b10a3baa75bf7769e872ddff49"
meta = (("taskuuid",36,"double"),("tasktype",1,"double"),("userid",1,"double"),("sessionid",1,"double"),("blockid",1,"double"),("simtime",1,"double"),("wintime",1,"double"),)
def __init__(self):
self.taskuuid = array.array('d',[0 for i in range(0,36)]);
self.tasktype = 0.0;
self.userid = 0.0;
self.sessionid = 0.0;
self.blockid = 0.0;
self.simtime = 0.0;
self.wintime = 0.0;
def decode(self,data):
if len(data) < 336:
return False
self.taskuuid = struct.unpack('36d',data[0:288])
self.tasktype = struct.unpack('d',data[288:296])[0]
self.userid = struct.unpack('d',data[296:304])[0]
self.sessionid = struct.unpack('d',data[304:312])[0]
self.blockid = struct.unpack('d',data[312:320])[0]
self.simtime = struct.unpack('d',data[320:328])[0]
self.wintime = struct.unpack('d',data[328:336])[0]
return True
def encode(self,io):
io.write(struct.pack('36d',*self.taskuuid))
io.write(struct.pack('d',self.tasktype))
io.write(struct.pack('d',self.userid))
io.write(struct.pack('d',self.sessionid))
io.write(struct.pack('d',self.blockid))
io.write(struct.pack('d',self.simtime))
io.write(struct.pack('d',self.wintime))
return True
| mit |
yaojingwu1992/XlsxWriter | xlsxwriter/test/comparison/test_autofilter06.py | 8 | 2859 | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'autofilter06.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.txt_filename = test_dir + 'xlsx_files/' + 'autofilter_data.txt'
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""
Test the creation of a simple XlsxWriter file with an autofilter.
This test corresponds to the following examples/autofilter.pl example:
Example 6. Autofilter with filter for non-blanks.
"""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
# Set the autofilter.
worksheet.autofilter('A1:D51')
# Add filter criteria.
worksheet.filter_column(0, 'x == NonBlanks')
# Open a text file with autofilter example data.
textfile = open(self.txt_filename)
# Read the headers from the first line of the input file.
headers = textfile.readline().strip("\n").split()
# Write out the headers.
worksheet.write_row('A1', headers)
# Start writing data after the headers.
row = 1
# Read the rest of the text file and write it to the worksheet.
for line in textfile:
# Split the input data based on whitespace.
data = line.strip("\n").split()
# Convert the number data from the text file.
for i, item in enumerate(data):
try:
data[i] = float(item)
except ValueError:
pass
# Simulate a blank cell in the data.
if row == 6:
data[0] = ''
# Get some of the field data.
region = data[0]
# Check for rows that match the filter.
if region != '':
# Row matches the filter, no further action required.
pass
else:
# We need to hide rows that don't match the filter.
worksheet.set_row(row, options={'hidden': True})
# Write out the row data.
worksheet.write_row(row, 0, data)
# Move on to the next worksheet row.
row += 1
textfile.close()
workbook.close()
self.assertExcelEqual()
| bsd-2-clause |
kseistrup/qtile | libqtile/pangocffi.py | 2 | 6789 | # Copyright (c) 2014-2015 Sean Vig
# Copyright (c) 2014 roger
# Copyright (c) 2014 Tycho Andersen
# Copyright (c) 2015 Craig Barnes
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# This module is kind of a hack; you've been warned :-). Some upstream work
# needs to happen in order to avoid doing this, though.
#
# The problem is that we want to use pango to draw stuff. We need to create a
# cairo surface, in particular an XCB surface. Since we're using xcffib as the
# XCB binding and there is no portable way to go from cffi's PyObject* cdata
# wrappers to the wrapped type [1], we can't add support to pycairo for XCB
# surfaces via xcffib.
#
# A similar problem exists one layer of indirection down with cairocffi --
# python's pangocairo is almost all C, and only works by including pycairo's
# headers and accessing members of structs only available in C, and not in
# python. Since cairocffi is pure python and also cffi based, we cannot extract
# the raw pointer to pass to the existing pangocairo bindings.
#
# The solution here is to implement a tiny pangocffi for the small set of pango
# functions we call. We're doing it directly here because we can, but it would
# not be difficult to use more upstream libraries (e.g. cairocffi and some
# pangocairocffi when it exists). This also allows us to drop pygtk entirely,
# since we are doing our own pango binding.
#
# [1]: https://groups.google.com/forum/#!topic/python-cffi/SPND0rRmazA
#
# This is not intended to be a complete cffi-based pango binding.
import six
# PyPy < 2.6 compatibility
try:
from libqtile._ffi_pango import ffi
except ImportError:
from libqtile.ffi_build import pango_ffi as ffi
gobject = ffi.dlopen('libgobject-2.0.so')
pango = ffi.dlopen('libpango-1.0.so')
pangocairo = ffi.dlopen('libpangocairo-1.0.so')
def CairoContext(cairo_t):
def create_layout():
return PangoLayout(cairo_t._pointer)
cairo_t.create_layout = create_layout
def show_layout(layout):
pangocairo.pango_cairo_show_layout(cairo_t._pointer, layout._pointer)
cairo_t.show_layout = show_layout
return cairo_t
ALIGN_CENTER = pango.PANGO_ALIGN_CENTER
ELLIPSIZE_END = pango.PANGO_ELLIPSIZE_END
units_from_double = pango.pango_units_from_double
def _const_char_to_py_str(cc):
return ''.join(ffi.buffer(cc, len(cc)))
class PangoLayout(object):
def __init__(self, cairo_t):
self._cairo_t = cairo_t
self._pointer = pangocairo.pango_cairo_create_layout(cairo_t)
def free(p):
p = ffi.cast("gpointer", p)
gobject.g_object_unref(p)
self._pointer = ffi.gc(self._pointer, free)
def finalize(self):
self._desc = None
self._pointer = None
self._cairo_t = None
def set_font_description(self, desc):
# save a pointer so it doesn't get GC'd out from under us
self._desc = desc
pango.pango_layout_set_font_description(self._pointer, desc._pointer)
def get_font_description(self):
descr = pango.pango_layout_get_font_description(self._pointer)
return FontDescription(descr)
def set_alignment(self, alignment):
pango.pango_layout_set_alignment(self._pointer, alignment)
def set_attributes(self, attrs):
pango.pango_layout_set_attributes(self._pointer, attrs)
def set_text(self, text):
text = text.encode('utf-8')
pango.pango_layout_set_text(self._pointer, text, -1)
def get_text(self):
ret = pango.pango_layout_get_text(self._pointer)
return _const_char_to_py_str(ret)
def set_ellipsize(self, ellipzize):
pango.pango_layout_set_ellipsize(self._pointer, ellipzize)
def get_ellipsize(self):
return pango.pango_layout_get_ellipsize(self._pointer)
def get_pixel_size(self):
width = ffi.new("int[1]")
height = ffi.new("int[1]")
pango.pango_layout_get_pixel_size(self._pointer, width, height)
return width[0], height[0]
def set_width(self, width):
pango.pango_layout_set_width(self._pointer, width)
class FontDescription(object):
def __init__(self, pointer=None):
if pointer is None:
self._pointer = pango.pango_font_description_new()
self._pointer = ffi.gc(self._pointer, pango.pango_font_description_free)
else:
self._pointer = pointer
@classmethod
def from_string(cls, string):
pointer = pango.pango_font_description_from_string(string.encode())
pointer = ffi.gc(pointer, pango.pango_font_description_free)
return cls(pointer)
def set_family(self, family):
pango.pango_font_description_set_family(self._pointer, family.encode())
def get_family(self):
ret = pango.pango_font_description_get_family(self._pointer)
return _const_char_to_py_str(ret)
def set_absolute_size(self, size):
pango.pango_font_description_set_absolute_size(self._pointer, size)
def set_size(self, size):
pango.pango_font_description_set_size(self._pointer, size)
def get_size(self, size):
return pango.pango_font_description_get_size(self._pointer, size)
def parse_markup(value, accel_marker=0):
attr_list = ffi.new("PangoAttrList**")
text = ffi.new("char**")
error = ffi.new("GError**")
if six.PY3:
value = value.encode()
ret = pango.pango_parse_markup(value, -1, accel_marker, attr_list, text, ffi.NULL, error)
if ret == 0:
raise Exception("parse_markup() failed for %s" % value)
return attr_list[0], ffi.string(text[0]), six.unichr(accel_marker)
def markup_escape_text(text):
ret = gobject.g_markup_escape_text(text.encode(), -1)
if six.PY3:
return ffi.string(ret).decode()
return ffi.string(ret)
| mit |
bak1an/django | tests/forms_tests/widget_tests/test_timeinput.py | 47 | 1735 | from datetime import time
from django.forms import TimeInput
from django.test import override_settings
from django.utils import translation
from .base import WidgetTest
class TimeInputTest(WidgetTest):
widget = TimeInput()
def test_render_none(self):
self.check_html(self.widget, 'time', None, html='<input type="text" name="time" />')
def test_render_value(self):
"""
The microseconds are trimmed on display, by default.
"""
t = time(12, 51, 34, 482548)
self.assertEqual(str(t), '12:51:34.482548')
self.check_html(self.widget, 'time', t, html='<input type="text" name="time" value="12:51:34" />')
self.check_html(self.widget, 'time', time(12, 51, 34), html=(
'<input type="text" name="time" value="12:51:34" />'
))
self.check_html(self.widget, 'time', time(12, 51), html=(
'<input type="text" name="time" value="12:51:00" />'
))
def test_string(self):
"""Initializing from a string value."""
self.check_html(self.widget, 'time', '13:12:11', html=(
'<input type="text" name="time" value="13:12:11" />'
))
def test_format(self):
"""
Use 'format' to change the way a value is displayed.
"""
t = time(12, 51, 34, 482548)
widget = TimeInput(format='%H:%M', attrs={'type': 'time'})
self.check_html(widget, 'time', t, html='<input type="time" name="time" value="12:51" />')
@override_settings(USE_L10N=True)
@translation.override('de-at')
def test_l10n(self):
t = time(12, 51, 34, 482548)
self.check_html(self.widget, 'time', t, html='<input type="text" name="time" value="12:51:34" />')
| bsd-3-clause |
knehez/edx-platform | pavelib/servers.py | 43 | 9473 | """
Run and manage servers for local development.
"""
from __future__ import print_function
import argparse
from paver.easy import *
from .assets import collect_assets
from .utils.cmd import django_cmd
from .utils.process import run_process, run_multi_processes
DEFAULT_PORT = {"lms": 8000, "studio": 8001}
DEFAULT_SETTINGS = 'devstack'
OPTIMIZED_SETTINGS = "devstack_optimized"
OPTIMIZED_ASSETS_SETTINGS = "test_static_optimized"
ASSET_SETTINGS_HELP = (
"Settings file used for updating assets. Defaults to the value of the settings variable if not provided."
)
def run_server(
system, fast=False, settings=None, asset_settings=None, port=None, contracts=False
):
"""Start the server for LMS or Studio.
Args:
system (str): The system to be run (lms or studio).
fast (bool): If true, then start the server immediately without updating assets (defaults to False).
settings (str): The Django settings module to use; if not provided, use the default.
asset_settings (str) The settings to use when generating assets. If not provided, assets are not generated.
port (str): The port number to run the server on. If not provided, uses the default port for the system.
contracts (bool) If true then PyContracts is enabled (defaults to False).
"""
if system not in ['lms', 'studio']:
print("System must be either lms or studio", file=sys.stderr)
exit(1)
if not settings:
settings = DEFAULT_SETTINGS
if not fast and asset_settings:
args = [system, '--settings={}'.format(asset_settings), '--watch']
# The default settings use DEBUG mode for running the server which means that
# the optimized assets are ignored, so we skip collectstatic in that case
# to save time.
if settings == DEFAULT_SETTINGS:
args.append('--skip-collect')
call_task('pavelib.assets.update_assets', args=args)
if port is None:
port = DEFAULT_PORT[system]
args = [settings, 'runserver', '--traceback', '--pythonpath=.', '0.0.0.0:{}'.format(port)]
if contracts:
args.append("--contracts")
run_process(django_cmd(system, *args))
@task
@needs('pavelib.prereqs.install_prereqs')
@cmdopts([
("settings=", "s", "Django settings"),
("asset-settings=", "a", ASSET_SETTINGS_HELP),
("port=", "p", "Port"),
("fast", "f", "Skip updating assets"),
])
def lms(options):
"""
Run the LMS server.
"""
settings = getattr(options, 'settings', DEFAULT_SETTINGS)
asset_settings = getattr(options, 'asset-settings', settings)
port = getattr(options, 'port', None)
fast = getattr(options, 'fast', False)
run_server(
'lms',
fast=fast,
settings=settings,
asset_settings=asset_settings,
port=port,
)
@task
@needs('pavelib.prereqs.install_prereqs')
@cmdopts([
("settings=", "s", "Django settings"),
("asset-settings=", "a", ASSET_SETTINGS_HELP),
("port=", "p", "Port"),
("fast", "f", "Skip updating assets"),
])
def studio(options):
"""
Run the Studio server.
"""
settings = getattr(options, 'settings', DEFAULT_SETTINGS)
asset_settings = getattr(options, 'asset-settings', settings)
port = getattr(options, 'port', None)
fast = getattr(options, 'fast', False)
run_server(
'studio',
fast=fast,
settings=settings,
asset_settings=asset_settings,
port=port,
)
@task
@needs('pavelib.prereqs.install_prereqs')
@consume_args
def devstack(args):
"""
Start the devstack lms or studio server
"""
parser = argparse.ArgumentParser(prog='paver devstack')
parser.add_argument('system', type=str, nargs=1, help="lms or studio")
parser.add_argument('--fast', action='store_true', default=False, help="Skip updating assets")
parser.add_argument('--optimized', action='store_true', default=False, help="Run with optimized assets")
parser.add_argument('--settings', type=str, default=DEFAULT_SETTINGS, help="Settings file")
parser.add_argument('--asset-settings', type=str, default=None, help=ASSET_SETTINGS_HELP)
parser.add_argument(
'--no-contracts',
action='store_true',
default=False,
help="Disable contracts. By default, they're enabled in devstack."
)
args = parser.parse_args(args)
settings = args.settings
asset_settings = args.asset_settings if args.asset_settings else settings
if args.optimized:
settings = OPTIMIZED_SETTINGS
asset_settings = OPTIMIZED_ASSETS_SETTINGS
run_server(
args.system[0],
fast=args.fast,
settings=settings,
asset_settings=asset_settings,
contracts=not args.no_contracts,
)
@task
@needs('pavelib.prereqs.install_prereqs')
@cmdopts([
("settings=", "s", "Django settings"),
])
def celery(options):
"""
Runs Celery workers.
"""
settings = getattr(options, 'settings', 'dev_with_worker')
run_process(django_cmd('lms', settings, 'celery', 'worker', '--beat', '--loglevel=INFO', '--pythonpath=.'))
@task
@needs('pavelib.prereqs.install_prereqs')
@cmdopts([
("settings=", "s", "Django settings for both LMS and Studio"),
("asset_settings=", "a", "Django settings for updating assets for both LMS and Studio (defaults to settings)"),
("worker_settings=", "w", "Celery worker Django settings"),
("fast", "f", "Skip updating assets"),
("optimized", "o", "Run with optimized assets"),
("settings_lms=", "l", "Set LMS only, overriding the value from --settings (if provided)"),
("asset_settings_lms=", None, "Set LMS only, overriding the value from --asset_settings (if provided)"),
("settings_cms=", "c", "Set Studio only, overriding the value from --settings (if provided)"),
("asset_settings_cms=", None, "Set Studio only, overriding the value from --asset_settings (if provided)"),
])
def run_all_servers(options):
"""
Runs Celery workers, Studio, and LMS.
"""
settings = getattr(options, 'settings', DEFAULT_SETTINGS)
asset_settings = getattr(options, 'asset_settings', settings)
worker_settings = getattr(options, 'worker_settings', 'dev_with_worker')
fast = getattr(options, 'fast', False)
optimized = getattr(options, 'optimized', False)
if optimized:
settings = OPTIMIZED_SETTINGS
asset_settings = OPTIMIZED_ASSETS_SETTINGS
settings_lms = getattr(options, 'settings_lms', settings)
settings_cms = getattr(options, 'settings_cms', settings)
asset_settings_lms = getattr(options, 'asset_settings_lms', asset_settings)
asset_settings_cms = getattr(options, 'asset_settings_cms', asset_settings)
if not fast:
# First update assets for both LMS and Studio but don't collect static yet
args = [
'lms', 'studio',
'--settings={}'.format(asset_settings),
'--skip-collect'
]
call_task('pavelib.assets.update_assets', args=args)
# Now collect static for each system separately with the appropriate settings.
# Note that the default settings use DEBUG mode for running the server which
# means that the optimized assets are ignored, so we skip collectstatic in that
# case to save time.
if settings != DEFAULT_SETTINGS:
collect_assets(['lms'], asset_settings_lms)
collect_assets(['studio'], asset_settings_cms)
# Install an asset watcher to regenerate files that change
call_task('pavelib.assets.watch_assets', options={'background': True})
# Start up LMS, CMS and Celery
lms_port = DEFAULT_PORT['lms']
cms_port = DEFAULT_PORT['studio']
lms_runserver_args = ["0.0.0.0:{}".format(lms_port)]
cms_runserver_args = ["0.0.0.0:{}".format(cms_port)]
run_multi_processes([
django_cmd(
'lms', settings_lms, 'runserver', '--traceback', '--pythonpath=.', *lms_runserver_args
),
django_cmd(
'studio', settings_cms, 'runserver', '--traceback', '--pythonpath=.', *cms_runserver_args
),
django_cmd(
'lms', worker_settings, 'celery', 'worker', '--beat', '--loglevel=INFO', '--pythonpath=.'
)
])
@task
@needs('pavelib.prereqs.install_prereqs')
@cmdopts([
("settings=", "s", "Django settings"),
])
def update_db():
"""
Runs syncdb and then migrate.
"""
settings = getattr(options, 'settings', DEFAULT_SETTINGS)
for system in ('lms', 'cms'):
sh(django_cmd(system, settings, 'syncdb', '--migrate', '--traceback', '--pythonpath=.'))
@task
@needs('pavelib.prereqs.install_prereqs')
@consume_args
def check_settings(args):
"""
Checks settings files.
"""
parser = argparse.ArgumentParser(prog='paver check_settings')
parser.add_argument('system', type=str, nargs=1, help="lms or studio")
parser.add_argument('settings', type=str, nargs=1, help='Django settings')
args = parser.parse_args(args)
system = args.system[0]
settings = args.settings[0]
try:
import_cmd = "echo 'import {system}.envs.{settings}'".format(system=system, settings=settings)
django_shell_cmd = django_cmd(system, settings, 'shell', '--plain', '--pythonpath=.')
sh("{import_cmd} | {shell_cmd}".format(import_cmd=import_cmd, shell_cmd=django_shell_cmd))
except:
print("Failed to import settings", file=sys.stderr)
| agpl-3.0 |
vipul-sharma20/oh-mainline | vendor/packages/sphinx/sphinx/websupport/__init__.py | 16 | 19132 | # -*- coding: utf-8 -*-
"""
sphinx.websupport
~~~~~~~~~~~~~~~~~
Base Module for web support functions.
:copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
import cPickle as pickle
import posixpath
from os import path
from jinja2 import Environment, FileSystemLoader
from docutils.core import publish_parts
from sphinx.application import Sphinx
from sphinx.locale import _
from sphinx.util.osutil import ensuredir
from sphinx.util.jsonimpl import dumps as dump_json
from sphinx.util.pycompat import htmlescape
from sphinx.websupport import errors
from sphinx.websupport.search import BaseSearch, SEARCH_ADAPTERS
from sphinx.websupport.storage import StorageBackend
class WebSupport(object):
"""The main API class for the web support package. All interactions
with the web support package should occur through this class.
"""
def __init__(self,
srcdir=None, # only required for building
builddir='', # the dir with data/static/doctrees subdirs
datadir=None, # defaults to builddir/data
staticdir=None, # defaults to builddir/static
doctreedir=None, # defaults to builddir/doctrees
search=None, # defaults to no search
storage=None, # defaults to SQLite in datadir
status=sys.stdout,
warning=sys.stderr,
moderation_callback=None,
allow_anonymous_comments=True,
docroot='',
staticroot='static',
):
# directories
self.srcdir = srcdir
self.builddir = builddir
self.outdir = path.join(builddir, 'data')
self.datadir = datadir or self.outdir
self.staticdir = staticdir or path.join(self.builddir, 'static')
self.doctreedir = staticdir or path.join(self.builddir, 'doctrees')
# web server virtual paths
self.staticroot = staticroot.strip('/')
self.docroot = docroot.strip('/')
self.status = status
self.warning = warning
self.moderation_callback = moderation_callback
self.allow_anonymous_comments = allow_anonymous_comments
self._init_templating()
self._init_search(search)
self._init_storage(storage)
self._globalcontext = None
self._make_base_comment_options()
def _init_storage(self, storage):
if isinstance(storage, StorageBackend):
self.storage = storage
else:
# If a StorageBackend isn't provided, use the default
# SQLAlchemy backend.
from sphinx.websupport.storage.sqlalchemystorage \
import SQLAlchemyStorage
if not storage:
# no explicit DB path given; create default sqlite database
db_path = path.join(self.datadir, 'db', 'websupport.db')
ensuredir(path.dirname(db_path))
storage = 'sqlite:///' + db_path
self.storage = SQLAlchemyStorage(storage)
def _init_templating(self):
import sphinx
template_path = path.join(sphinx.package_dir,
'themes', 'basic')
loader = FileSystemLoader(template_path)
self.template_env = Environment(loader=loader)
def _init_search(self, search):
if isinstance(search, BaseSearch):
self.search = search
else:
mod, cls = SEARCH_ADAPTERS[search or 'null']
mod = 'sphinx.websupport.search.' + mod
SearchClass = getattr(__import__(mod, None, None, [cls]), cls)
search_path = path.join(self.datadir, 'search')
self.search = SearchClass(search_path)
self.results_template = \
self.template_env.get_template('searchresults.html')
def build(self):
"""Build the documentation. Places the data into the `outdir`
directory. Use it like this::
support = WebSupport(srcdir, builddir, search='xapian')
support.build()
This will read reStructured text files from `srcdir`. Then it will
build the pickles and search index, placing them into `builddir`.
It will also save node data to the database.
"""
if not self.srcdir:
raise RuntimeError('No srcdir associated with WebSupport object')
app = Sphinx(self.srcdir, self.srcdir, self.outdir, self.doctreedir,
'websupport', status=self.status, warning=self.warning)
app.builder.set_webinfo(self.staticdir, self.staticroot,
self.search, self.storage)
self.storage.pre_build()
app.build()
self.storage.post_build()
def get_globalcontext(self):
"""Load and return the "global context" pickle."""
if not self._globalcontext:
infilename = path.join(self.datadir, 'globalcontext.pickle')
f = open(infilename, 'rb')
try:
self._globalcontext = pickle.load(f)
finally:
f.close()
return self._globalcontext
def get_document(self, docname, username='', moderator=False):
"""Load and return a document from a pickle. The document will
be a dict object which can be used to render a template::
support = WebSupport(datadir=datadir)
support.get_document('index', username, moderator)
In most cases `docname` will be taken from the request path and
passed directly to this function. In Flask, that would be something
like this::
@app.route('/<path:docname>')
def index(docname):
username = g.user.name if g.user else ''
moderator = g.user.moderator if g.user else False
try:
document = support.get_document(docname, username,
moderator)
except DocumentNotFoundError:
abort(404)
render_template('doc.html', document=document)
The document dict that is returned contains the following items
to be used during template rendering.
* **body**: The main body of the document as HTML
* **sidebar**: The sidebar of the document as HTML
* **relbar**: A div containing links to related documents
* **title**: The title of the document
* **css**: Links to css files used by Sphinx
* **script**: Javascript containing comment options
This raises :class:`~sphinx.websupport.errors.DocumentNotFoundError`
if a document matching `docname` is not found.
:param docname: the name of the document to load.
"""
docpath = path.join(self.datadir, 'pickles', docname)
if path.isdir(docpath):
infilename = docpath + '/index.fpickle'
if not docname:
docname = 'index'
else:
docname += '/index'
else:
infilename = docpath + '.fpickle'
try:
f = open(infilename, 'rb')
except IOError:
raise errors.DocumentNotFoundError(
'The document "%s" could not be found' % docname)
try:
document = pickle.load(f)
finally:
f.close()
comment_opts = self._make_comment_options(username, moderator)
comment_meta = self._make_metadata(
self.storage.get_metadata(docname, moderator))
document['script'] = comment_opts + comment_meta + document['script']
return document
def get_search_results(self, q):
"""Perform a search for the query `q`, and create a set
of search results. Then render the search results as html and
return a context dict like the one created by
:meth:`get_document`::
document = support.get_search_results(q)
:param q: the search query
"""
results = self.search.query(q)
ctx = {
'q': q,
'search_performed': True,
'search_results': results,
'docroot': '../', # XXX
'_': _,
}
document = {
'body': self.results_template.render(ctx),
'title': 'Search Results',
'sidebar': '',
'relbar': ''
}
return document
def get_data(self, node_id, username=None, moderator=False):
"""Get the comments and source associated with `node_id`. If
`username` is given vote information will be included with the
returned comments. The default CommentBackend returns a dict with
two keys, *source*, and *comments*. *source* is raw source of the
node and is used as the starting point for proposals a user can
add. *comments* is a list of dicts that represent a comment, each
having the following items:
============= ======================================================
Key Contents
============= ======================================================
text The comment text.
username The username that was stored with the comment.
id The comment's unique identifier.
rating The comment's current rating.
age The time in seconds since the comment was added.
time A dict containing time information. It contains the
following keys: year, month, day, hour, minute, second,
iso, and delta. `iso` is the time formatted in ISO
8601 format. `delta` is a printable form of how old
the comment is (e.g. "3 hours ago").
vote If `user_id` was given, this will be an integer
representing the vote. 1 for an upvote, -1 for a
downvote, or 0 if unvoted.
node The id of the node that the comment is attached to.
If the comment's parent is another comment rather than
a node, this will be null.
parent The id of the comment that this comment is attached
to if it is not attached to a node.
children A list of all children, in this format.
proposal_diff An HTML representation of the differences between the
the current source and the user's proposed source.
============= ======================================================
:param node_id: the id of the node to get comments for.
:param username: the username of the user viewing the comments.
:param moderator: whether the user is a moderator.
"""
return self.storage.get_data(node_id, username, moderator)
def delete_comment(self, comment_id, username='', moderator=False):
"""Delete a comment.
If `moderator` is True, the comment and all descendants will be deleted
from the database, and the function returns ``True``.
If `moderator` is False, the comment will be marked as deleted (but not
removed from the database so as not to leave any comments orphaned), but
only if the `username` matches the `username` on the comment. The
username and text files are replaced with "[deleted]" . In this case,
the function returns ``False``.
This raises :class:`~sphinx.websupport.errors.UserNotAuthorizedError`
if moderator is False and `username` doesn't match username on the
comment.
:param comment_id: the id of the comment to delete.
:param username: the username requesting the deletion.
:param moderator: whether the requestor is a moderator.
"""
return self.storage.delete_comment(comment_id, username, moderator)
def add_comment(self, text, node_id='', parent_id='', displayed=True,
username=None, time=None, proposal=None,
moderator=False):
"""Add a comment to a node or another comment. Returns the comment
in the same format as :meth:`get_comments`. If the comment is being
attached to a node, pass in the node's id (as a string) with the
node keyword argument::
comment = support.add_comment(text, node_id=node_id)
If the comment is the child of another comment, provide the parent's
id (as a string) with the parent keyword argument::
comment = support.add_comment(text, parent_id=parent_id)
If you would like to store a username with the comment, pass
in the optional `username` keyword argument::
comment = support.add_comment(text, node=node_id,
username=username)
:param parent_id: the prefixed id of the comment's parent.
:param text: the text of the comment.
:param displayed: for moderation purposes
:param username: the username of the user making the comment.
:param time: the time the comment was created, defaults to now.
"""
if username is None:
if self.allow_anonymous_comments:
username = 'Anonymous'
else:
raise errors.UserNotAuthorizedError()
parsed = self._parse_comment_text(text)
comment = self.storage.add_comment(parsed, displayed, username,
time, proposal, node_id,
parent_id, moderator)
comment['original_text'] = text
if not displayed and self.moderation_callback:
self.moderation_callback(comment)
return comment
def process_vote(self, comment_id, username, value):
"""Process a user's vote. The web support package relies
on the API user to perform authentication. The API user will
typically receive a comment_id and value from a form, and then
make sure the user is authenticated. A unique username must be
passed in, which will also be used to retrieve the user's past
voting data. An example, once again in Flask::
@app.route('/docs/process_vote', methods=['POST'])
def process_vote():
if g.user is None:
abort(401)
comment_id = request.form.get('comment_id')
value = request.form.get('value')
if value is None or comment_id is None:
abort(400)
support.process_vote(comment_id, g.user.name, value)
return "success"
:param comment_id: the comment being voted on
:param username: the unique username of the user voting
:param value: 1 for an upvote, -1 for a downvote, 0 for an unvote.
"""
value = int(value)
if not -1 <= value <= 1:
raise ValueError('vote value %s out of range (-1, 1)' % value)
self.storage.process_vote(comment_id, username, value)
def update_username(self, old_username, new_username):
"""To remain decoupled from a webapp's authentication system, the
web support package stores a user's username with each of their
comments and votes. If the authentication system allows a user to
change their username, this can lead to stagnate data in the web
support system. To avoid this, each time a username is changed, this
method should be called.
:param old_username: The original username.
:param new_username: The new username.
"""
self.storage.update_username(old_username, new_username)
def accept_comment(self, comment_id, moderator=False):
"""Accept a comment that is pending moderation.
This raises :class:`~sphinx.websupport.errors.UserNotAuthorizedError`
if moderator is False.
:param comment_id: The id of the comment that was accepted.
:param moderator: Whether the user making the request is a moderator.
"""
if not moderator:
raise errors.UserNotAuthorizedError()
self.storage.accept_comment(comment_id)
def _make_base_comment_options(self):
"""Helper method to create the part of the COMMENT_OPTIONS javascript
that remains the same throughout the lifetime of the
:class:`~sphinx.websupport.WebSupport` object.
"""
self.base_comment_opts = {}
if self.docroot != '':
comment_urls = [
('addCommentURL', '_add_comment'),
('getCommentsURL', '_get_comments'),
('processVoteURL', '_process_vote'),
('acceptCommentURL', '_accept_comment'),
('deleteCommentURL', '_delete_comment')
]
for key, value in comment_urls:
self.base_comment_opts[key] = \
'/' + posixpath.join(self.docroot, value)
if self.staticroot != 'static':
static_urls = [
('commentImage', 'comment.png'),
('closeCommentImage', 'comment-close.png'),
('loadingImage', 'ajax-loader.gif'),
('commentBrightImage', 'comment-bright.png'),
('upArrow', 'up.png'),
('upArrowPressed', 'up-pressed.png'),
('downArrow', 'down.png'),
('downArrowPressed', 'down-pressed.png')
]
for key, value in static_urls:
self.base_comment_opts[key] = \
'/' + posixpath.join(self.staticroot, '_static', value)
def _make_comment_options(self, username, moderator):
"""Helper method to create the parts of the COMMENT_OPTIONS
javascript that are unique to each request.
:param username: The username of the user making the request.
:param moderator: Whether the user making the request is a moderator.
"""
rv = self.base_comment_opts.copy()
if username:
rv.update({
'voting': True,
'username': username,
'moderator': moderator,
})
return '''\
<script type="text/javascript">
var COMMENT_OPTIONS = %s;
</script>
''' % dump_json(rv)
def _make_metadata(self, data):
return '''\
<script type="text/javascript">
var COMMENT_METADATA = %s;
</script>
''' % dump_json(data)
def _parse_comment_text(self, text):
settings = {'file_insertion_enabled': False,
'raw_enabled': False,
'output_encoding': 'unicode'}
try:
ret = publish_parts(text, writer_name='html',
settings_overrides=settings)['fragment']
except Exception:
ret = htmlescape(text)
return ret
| agpl-3.0 |
pinterest/thrift-tools | thrift_tools/tests/test_basic.py | 1 | 6186 | from collections import deque
import unittest
from thrift_tools.sniffer import Sniffer
from thrift_tools.stream_handler import StreamHandler
from thrift_tools.thrift_struct import ThriftField, ThriftStruct
from .util import get_pcap_path
class BasicTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_calculator_service_binary(self):
self._test_protocol('binary')
def test_calculator_service_compact(self):
self._test_protocol('compact')
def test_calculator_service_json(self):
self._test_protocol('json')
def test_finagle(self):
queue = deque()
pcap_file = get_pcap_path('finagle-thrift')
handler = StreamHandler(queue, read_values=True, finagle_thrift=True)
sniffer = Sniffer(None, 9090, handler, offline=pcap_file)
sniffer.join()
self.assertEquals(len(queue), 22)
# is this finagle-thrift indeed?
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, '__can__finagle__trace__v3__')
self.assertEquals(msg.type, 'call')
self.assertEquals(len(msg.args), 0)
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, '__can__finagle__trace__v3__')
self.assertEquals(msg.type, 'reply')
self.assertEquals(len(msg.args), 0)
# the search() call
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'search')
self.assertEquals(msg.type, 'call')
# inspect the header & the contexts
self.assertEquals(len(msg.header), 4)
self.assertEquals(msg.header[0], ThriftField('i64', 1, -8277104800942727271))
self.assertEquals(msg.header[1], ThriftField('i64', 2, -8277104800942727271))
self.assertEquals(msg.header[2], ThriftField('i64', 7, 0))
contexts = msg.header[3].value
self.assertEquals(contexts[0][0].value,
'com.twitter.finagle.tracing.TraceContext')
self.assertEquals(contexts[1][0].value,
'com.twitter.finagle.Deadline')
self.assertEquals(msg.args, ThriftStruct([ThriftField('string', 1, 'foo')]))
# the reply
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'search')
self.assertEquals(msg.type, 'reply')
self.assertEquals(msg.args, ThriftStruct([ThriftField('list', 0, ['one', 'two', 'three'])]))
def _test_protocol(self, protoname):
queue = deque()
pcap_file = get_pcap_path('calc-service-%s' % protoname)
handler = StreamHandler(queue, read_values=True, debug=True)
sniffer = Sniffer(None, 9090, handler, offline=pcap_file)
sniffer.join()
self.assertEquals(len(queue), 10)
# the ping call
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'ping')
self.assertEquals(msg.type, 'call')
self.assertEquals(len(msg.args), 0)
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'ping')
self.assertEquals(msg.type, 'reply')
self.assertEquals(len(msg.args), 0)
# a succesful add
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'add')
self.assertEquals(msg.type, 'call')
self.assertEquals(len(msg.args), 2)
self.assertEquals(msg.args[0], ThriftField('i32', 1, 1))
self.assertEquals(msg.args[1], ThriftField('i32', 2, 1))
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'add')
self.assertEquals(msg.type, 'reply')
self.assertEquals(len(msg.args), 1)
self.assertEquals(msg.args[0], ThriftField('i32', 0, 2))
# a failed calculate call
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'calculate')
self.assertEquals(msg.type, 'call')
self.assertEquals(len(msg.args), 2)
self.assertEquals(msg.args[0], ThriftField('i32', 1, 1))
self.assertEquals(
msg.args[1],
ThriftField('struct', 2,
ThriftStruct(
[ThriftField('i32', 1, 1),
ThriftField('i32', 2, 0),
ThriftField('i32', 3, 4)])))
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'calculate')
self.assertEquals(msg.type, 'reply')
self.assertEquals(len(msg.args), 1)
self.assertEquals(
msg.args[0],
ThriftField('struct', 1,
ThriftStruct([ThriftField('i32', 1, 4),
ThriftField('string', 2, 'Cannot divide by 0')])))
# a successful calculate call
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'calculate')
self.assertEquals(msg.type, 'call')
self.assertEquals(len(msg.args), 2)
self.assertEquals(
msg.args[1],
ThriftField('struct', 2, ThriftStruct([ThriftField('i32', 1, 15),
ThriftField('i32', 2, 10),
ThriftField('i32', 3, 2)])))
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'calculate')
self.assertEquals(msg.type, 'reply')
self.assertEquals(len(msg.args), 1)
self.assertEquals(msg.args[0], ThriftField('i32', 0, 5))
# getStruct
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'getStruct')
self.assertEquals(msg.type, 'call')
self.assertEquals(len(msg.args), 1)
self.assertEquals(msg.args[0], ThriftField('i32', 1, 1))
_, src, dst, msg = queue.popleft()
self.assertEquals(msg.method, 'getStruct')
self.assertEquals(msg.type, 'reply')
self.assertEquals(len(msg.args), 1)
self.assertEquals(
msg.args[0],
ThriftField('struct', 0, ThriftStruct([ThriftField('i32', 1, 1),
ThriftField('string', 2, '5')])))
| apache-2.0 |
waynesun09/virt-test | virttest/remote_commander/remote_runner.py | 14 | 25909 | #!/usr/bin/env python
'''
Created on Dec 6, 2013
:author: jzupka
'''
import os
import sys
import select
import time
import stat
import gc
import logging
import traceback
import subprocess
import string
import random
import shutil
import signal
import remote_interface
import messenger as ms
def daemonize(pipe_root_path="/tmp"):
"""
Init daemon.
:param pipe_root_path: path to directory for pipe.
:return: [True if child, stdin_path, stdou_path, stderr_path]
"""
def is_file_open(path):
"""
Determine process which open file.
:param path: Path to file.
:return: [[pid,mode], ... ].
"""
opens = []
pids = os.listdir('/proc')
for pid in sorted(pids):
try:
int(pid)
except ValueError:
continue
fd_dir = os.path.join('/proc', pid, 'fd')
try:
for filepath in os.listdir(fd_dir):
try:
p = os.path.join(fd_dir, filepath)
link = os.readlink(os.path.join(fd_dir, filepath))
if link == path:
mode = os.lstat(p).st_mode
opens.append([pid, mode])
except OSError:
continue
except OSError, e:
if e.errno == 2:
continue
raise
return opens
def daemonize():
"""
Run guest as a daemon.
"""
gc_was_enabled = gc.isenabled()
# Disable gc to avoid bug where gc -> file_dealloc ->
# write to stderr -> hang. http://bugs.python.org/issue1336
gc.disable()
try:
pid = os.fork()
if gc_was_enabled:
gc.enable()
if pid > 0: # If parent return False
os.waitpid(pid, 0)
return 0
except OSError, e:
sys.stderr.write("Daemonize failed: %s\n" % (e))
sys.exit(1)
os.chdir("/")
os.setsid()
os.umask(0)
try:
pid = os.fork()
if gc_was_enabled:
gc.enable()
if pid > 0: # If parent Exit
sys.exit(0)
except OSError, e:
sys.stderr.write("Daemonize failed: %s\n" % (e))
sys.exit(1)
if gc_was_enabled:
gc.enable()
sys.stdout.flush()
sys.stderr.flush()
return 1
stdin_path = os.path.join(pipe_root_path, "stdin")
stdout_path = os.path.join(pipe_root_path, "stdout")
stderr_path = os.path.join(pipe_root_path, "stderr")
results_path = os.path.join(pipe_root_path, "results")
inputs_path = os.path.join(pipe_root_path, "inputs")
for f in [stdin_path, stdout_path, stderr_path, results_path, inputs_path]:
try:
os.mkfifo(f)
except OSError, e:
if e.errno == 17:
pass
# Check for a pidfile to see if the daemon already runs
openers = is_file_open(stdout_path)
rundaemon = False
if len(openers) > 0:
for i in openers:
if i[1] & stat.S_IWUSR:
rundaemon = True
openers.remove(i)
if len(openers) > 0:
for i in openers:
os.kill(int(i[0]), 9)
time.sleep(0.3)
# Start the daemon
child = False
if not rundaemon:
child = daemonize()
if child == 0:
return (child,
inputs_path,
results_path,
stdin_path,
stdout_path,
stderr_path)
else:
signal.signal(signal.SIGIO, signal.SIG_DFL)
return (child,
results_path,
inputs_path,
stdin_path,
stdout_path,
stderr_path)
def create_process_cmd():
"""
Create child process without clean process data thanks that it is possible
call function and classes from child process.
"""
r_c, w_p = os.pipe()
r_p, w_c = os.pipe()
r_si, w_si = os.pipe()
r_so, w_so = os.pipe()
r_se, w_se = os.pipe()
gc_was_enabled = gc.isenabled()
# Disable gc to avoid bug where gc -> file_dealloc ->
# write to stderr -> hang. http://bugs.python.org/issue1336
gc.disable()
pid = os.fork()
if pid == 0: # Child process
os.close(r_p)
os.close(w_p)
os.close(w_si)
os.close(r_so)
os.close(r_se)
sys.stdin.close()
sys.stdout.close()
sys.stderr.close()
sys.stdin = os.fdopen(r_si, 'r', 0)
sys.stdout = os.fdopen(w_so, 'w', 0)
sys.stderr = os.fdopen(w_se, 'w', 0)
if gc_was_enabled:
gc.enable()
return (0, r_c, w_c, None, None, None)
else:
os.close(r_c)
os.close(w_c)
os.close(r_si)
os.close(w_so)
os.close(w_se)
if gc_was_enabled:
gc.enable()
return (pid, r_p, w_p, w_si, r_so, r_se)
def gen_tmp_dir(root_path):
"""
Try to create tmp dir with special name.
"""
path = None
while (path is None or os.path.exists(path)):
rname = "runner" + "".join(random.sample(string.letters, 4))
path = os.path.join(root_path, rname)
try:
if not os.path.exists(path):
os.mkdir(path)
return path
except:
continue
def clean_tmp_dir(path):
"""
Clean up directory.
"""
shutil.rmtree(path, True)
def sort_fds_event(fds):
hup = [x[0] for x in fds if x[1] & select.POLLHUP]
read = [x[0] for x in fds if x[1] & select.POLLIN]
write = [x[0] for x in fds if x[1] & select.POLLOUT]
return hup, read, write
def close_unused_fds(fds):
"""
Close all file descriptors which are not necessary anymore.
:param fds: file descriptors
:type fds: list []
"""
for fd in fds:
os.close(fd)
class CmdFinish(object):
"""
Class used for communication with child process. This class
"""
__slots__ = ["pid"]
def __init__(self, parent=False):
if not parent:
self.pid = os.getpid()
else:
self.pid = os.getppid()
self.pid = self.pid
class CmdSlave(object):
"""
Representation of BaseCmd on slave side.
"""
def __init__(self, baseCmd):
"""
:param baseCmd: basecmd for encapsulation.
"""
self.basecmd = baseCmd
self.cmd_id = baseCmd.cmd_id
self.obj = None
self.pid = None
self.r_pipe = None
self.w_pipe = None
self.stdin_pipe = None
self.stdout_pipe = None
self.stderr_pipe = None
self.async = False
self.nohup = False
self.manage = False
self.msg = None
def close_pipes(self):
"""
Close command communication pipe.
"""
if self.r_pipe is not None:
os.close(self.r_pipe)
if self.w_pipe is not None:
os.close(self.w_pipe)
if self.stdin_pipe is not None:
os.close(self.stdin_pipe)
if self.stdout_pipe is not None:
os.close(self.stdout_pipe)
if self.stderr_pipe is not None:
os.close(self.stderr_pipe)
def parse_func_name(self, func_name, commander):
"""
Parse name sended from master.
format: ``["manage|async|nohup| ", "fnname1", "fnname2", ...]``
:param func_name: Function name
:param commander: Where to execute the command (remote or local)
"""
if func_name[0] == "manage": # start command in main process.
self.manage = True
func_name = func_name[1:]
if func_name[0] == "async": # start command in new process.
self.async = True
func_name = func_name[1:]
if func_name[0] == "nohup": # start command in new daemon process.
self.nohup = True
func_name = func_name[1:]
if hasattr(commander, func_name[0]):
obj = getattr(commander, func_name[0])
elif func_name[0] in commander.globals:
obj = commander.globals[func_name[0]]
elif func_name[0] in commander.locals:
obj = commander.locals[func_name[0]]
else:
obj = globals()[func_name[0]]
if len(func_name) > 1:
for name in func_name[1:]:
obj = getattr(obj, name)
return obj
def __call__(self, commander):
"""
Call command cmd(*args, **kargs)
"""
self.obj = self.parse_func_name(self.basecmd.func, commander)
if self.manage: # start command in main process
self.basecmd.results = self.obj(*self.basecmd.args,
**self.basecmd.kargs)
self.basecmd._finished = True
self.finish(commander)
elif self.async: # start command in new process
self.basecmd.results = self.__call_async__(commander)
self.basecmd._async = True
elif self.nohup: # start command in new daemon process
if self.basecmd.cmd_hash is None:
self.basecmd.cmd_hash = gen_tmp_dir("/tmp")
self.basecmd.results = self.__call_nohup__(commander)
self.basecmd._async = True
else: # start command in new process but wait for input.
self.basecmd.results = self.__call_async__(commander)
def __call_async__(self, commander):
(self.pid, self.r_pipe, self.w_pipe, self.stdin_pipe,
self.stdout_pipe, self.stderr_pipe) = create_process_cmd()
if self.pid == 0: # Child process make commands
commander._close_cmds_stdios(self)
self.msg = ms.Messenger(ms.StdIOWrapperIn(self.r_pipe),
ms.StdIOWrapperOut(self.w_pipe))
try:
self.basecmd.results = self.obj(*self.basecmd.args,
**self.basecmd.kargs)
except Exception:
err_msg = traceback.format_exc()
self.msg.write_msg(remote_interface.CmdTraceBack(err_msg))
sys.exit(-1)
finally:
self.msg.write_msg(self.basecmd.results)
self.msg.write_msg(CmdFinish())
sys.exit(0)
else: # Parent process create communication interface to child process
self.msg = ms.Messenger(ms.StdIOWrapperIn(self.r_pipe),
ms.StdIOWrapperOut(self.w_pipe))
def __call_nohup__(self, commander):
(pid, self.r_path, self.w_path, self.stdin_path, self.stdout_path,
self.stderr_path) = daemonize(self.basecmd.cmd_hash)
if pid == 1: # Child process make commands
commander._close_cmds_stdios(self)
(self.pid, r_pipe, w_pipe, stdin_pipe,
stdout_pipe, stderr_pipe) = create_process_cmd()
if self.pid == 0: # Child process make commands
self.msg = ms.Messenger(ms.StdIOWrapperIn(r_pipe),
ms.StdIOWrapperOut(w_pipe))
try:
self.basecmd.results = self.obj(*self.basecmd.args,
**self.basecmd.kargs)
except Exception:
err_msg = traceback.format_exc()
self.msg.write_msg(remote_interface.CmdTraceBack(err_msg))
sys.exit(-1)
finally:
self.msg.write_msg(self.basecmd.results)
sys.exit(0)
else:
# helper child process open communication pipes.
# This process is able to manage problem with connection width
# main parent process. It allows start unchanged child process.
self.r_pipe = os.open(self.r_path, os.O_RDONLY)
self.w_pipe = os.open(self.w_path, os.O_WRONLY)
sys.stdout = os.fdopen(os.open(self.stdout_path, os.O_WRONLY),
"w",
0)
sys.stderr = os.fdopen(os.open(self.stderr_path, os.O_WRONLY),
"w",
0)
sys.stdin = os.fdopen(os.open(self.stdin_path, os.O_RDONLY),
"r",
0)
w_fds = [r_pipe, w_pipe, stdin_pipe, stdout_pipe, stderr_pipe]
m_fds = [self.r_pipe,
self.w_pipe,
sys.stdin.fileno(),
sys.stdout.fileno(),
sys.stderr.fileno()]
p = select.poll()
p.register(r_pipe)
p.register(w_pipe)
# p.register(stdin_pipe)
p.register(stdout_pipe)
p.register(stderr_pipe)
p.register(self.r_pipe)
# p.register(self.w_pipe)
p.register(sys.stdin.fileno())
# p.register(sys.stdout.fileno())
# p.register(sys.stderr.fileno())
io_map = {r_pipe: self.w_pipe,
self.r_pipe: w_pipe,
sys.stdin.fileno(): stdin_pipe,
stdout_pipe: sys.stdout.fileno(),
stderr_pipe: sys.stderr.fileno()}
while 1:
d = p.poll()
w_ev = [x for x in d if x[0] in w_fds]
m_ev = [x for x in d if x[0] in m_fds]
w_hup, w_read, _ = sort_fds_event(w_ev)
m_hup, m_read, _ = sort_fds_event(m_ev)
if m_hup:
time.sleep(0.1)
if w_hup: # child process finished
for r in w_read:
data = os.read(r, 16384)
os.write(io_map[r], data)
break
for r in w_read:
data = os.read(r, 16384)
os.write(io_map[r], data)
for r in m_read:
data = os.read(r, 16384)
os.write(io_map[r], data)
self.msg = ms.Messenger(ms.StdIOWrapperIn(self.r_pipe),
ms.StdIOWrapperOut(self.w_pipe))
self.msg.write_msg(CmdFinish())
exit(0)
else: # main process open communication named pipes.
self.w_pipe = os.open(self.w_path, os.O_WRONLY)
self.r_pipe = os.open(self.r_path, os.O_RDONLY)
self.stdout_pipe = os.open(self.stdout_path, os.O_RDONLY)
self.stderr_pipe = os.open(self.stderr_path, os.O_RDONLY)
self.stdin_pipe = os.open(self.stdin_path, os.O_WRONLY)
self.msg = ms.Messenger(ms.StdIOWrapperIn(self.r_pipe),
ms.StdIOWrapperOut(self.w_pipe))
def work(self):
"""
Wait for message from running child process
"""
succ, msg = self.msg.read_msg()
if isinstance(msg, CmdFinish):
try:
pid, _ = os.waitpid(msg.pid, 0)
except OSError:
pid = msg.pid
if (succ is False or pid == msg.pid):
self.basecmd._finished = True
return True
else:
return False
else:
self.basecmd.results = msg
def recover_paths(self):
"""
Helper function for reconnect to daemon/nohup process.
"""
self.stdin_path = os.path.join(self.basecmd.cmd_hash, "stdin")
self.stdout_path = os.path.join(self.basecmd.cmd_hash, "stdout")
self.stderr_path = os.path.join(self.basecmd.cmd_hash, "stderr")
self.w_path = os.path.join(self.basecmd.cmd_hash, "results")
self.r_path = os.path.join(self.basecmd.cmd_hash, "inputs")
def recover_fds(self):
"""
Helper function for reconnect to daemon/nohup process.
"""
if self.r_pipe is None:
self.recover_paths()
self.w_pipe = os.open(self.w_path, os.O_WRONLY)
self.r_pipe = os.open(self.r_path, os.O_RDONLY)
self.stdin_pipe = os.open(self.stdin_path, os.O_WRONLY)
self.stdout_pipe = os.open(self.stdout_path, os.O_RDONLY)
self.stderr_pipe = os.open(self.stderr_path, os.O_RDONLY)
self.msg = ms.Messenger(ms.StdIOWrapperIn(self.r_pipe),
ms.StdIOWrapperOut(self.w_pipe))
def finish(self, commander):
"""
Remove cmd from commander commands on finish of process.
"""
self.close_pipes()
if self.basecmd.cmd_hash:
clean_tmp_dir(self.basecmd.cmd_hash)
self.basecmd.cmd_hash = None
del commander.cmds[self.cmd_id]
class CommanderSlave(ms.Messenger):
"""
Class commander slace is responsible for communication with commander
master. It invoke commands to slave part and receive messages from them.
For communication is used only stdin and stdout which are streams from
slave part.
"""
def __init__(self, stdin, stdout, o_stdout, o_stderr):
super(CommanderSlave, self).__init__(stdin, stdout)
self._exit = False
self.cmds = {}
self.globals = {}
self.locals = {}
self.o_stdout = o_stdout
self.o_stderr = o_stderr
def cmd_loop(self):
"""
Wait for commands from master and receive results and outputs from
commands.
"""
try:
while (not self._exit):
stdios = [self.stdin, self.o_stdout, self.o_stderr]
r_pipes = [cmd.r_pipe for cmd in self.cmds.values()
if cmd.r_pipe is not None]
stdouts = [cmd.stdout_pipe for cmd in self.cmds.values()
if cmd.stdout_pipe is not None]
stderrs = [cmd.stderr_pipe for cmd in self.cmds.values()
if cmd.stderr_pipe is not None]
r, _, _ = select.select(stdios + r_pipes + stdouts + stderrs, [], [])
if self.stdin in r: # command from controller
cmd = CmdSlave(self.read_msg()[1])
self.cmds[cmd.cmd_id] = cmd
try:
cmd(self)
self.write_msg(cmd.basecmd)
except Exception:
err_msg = traceback.format_exc()
self.write_msg(remote_interface.CommanderError(err_msg))
if self.o_stdout in r: # Send message from stdout
msg = os.read(self.o_stdout, 16384)
self.write_msg(remote_interface.StdOut(msg))
if self.o_stderr in r: # Send message from stdout
msg = os.read(self.o_stderr, 16384)
self.write_msg(remote_interface.StdErr(msg))
# test all commands for io
for cmd in self.cmds.values():
if cmd.stdout_pipe in r: # command stdout
data = os.read(cmd.stdout_pipe, 16384)
if data != "": # pipe is not closed on another side.
self.write_msg(remote_interface.StdOut(data,
cmd.cmd_id))
else:
os.close(cmd.stdout_pipe)
cmd.stdout_pipe = None
if cmd.stderr_pipe in r: # command stderr
data = os.read(cmd.stderr_pipe, 16384)
if data != "": # pipe is not closed on another side.
self.write_msg(remote_interface.StdErr(data,
cmd.cmd_id))
else:
os.close(cmd.stderr_pipe)
cmd.stderr_pipe = None
if cmd.r_pipe in r: # command results
if cmd.work():
cmd.finish(self)
self.write_msg(cmd.basecmd)
except Exception:
err_msg = traceback.format_exc()
self.write_msg(remote_interface.CommanderError(err_msg))
def _close_cmds_stdios(self, exclude_cmd):
for cmd in self.cmds.values():
if cmd is not exclude_cmd:
cmd.close_pipes()
class CommanderSlaveCmds(CommanderSlave):
"""
Class extends CommanderSlave and adds to them special commands like
shell process, interactive python, send_msg to cmd.
"""
def __init__(self, stdin, stdout, o_stdout, o_stderr):
super(CommanderSlaveCmds, self).__init__(stdin, stdout,
o_stdout, o_stderr)
while (1):
succ, data = self.read_msg()
if succ and data == "start":
break
self.write_msg("Started")
def shell(self, cmd):
"""
Starts shell process. Stdout is automatically copyed to basecmd.stdout
:param cmd: Command which should be started.
:return: basecmd with return code of cmd.
"""
process = subprocess.Popen(cmd,
shell=True,
stdin=sys.stdin,
stdout=sys.stdout,
stderr=sys.stderr)
return process.wait()
def interactive(self):
"""
Starts interactive python.
"""
while 1:
out = raw_input()
if out == "":
return
try:
exec out
except Exception:
exc_type, exc_value, exc_traceback = sys.exc_info()
print "On Guest exception from: \n" + "".join(
traceback.format_exception(exc_type,
exc_value,
exc_traceback))
print "FAIL: Guest command exception."
def send_msg(self, msg, cmd_id):
"""
Send msg to cmd with id == cmd_id
:param msg: message passed to cmd over the stdin
:type msg: str
:param cmd_id: id of cmd.
"""
os.write(self.cmds[cmd_id].stdin_pipe, msg)
def register_cmd(self, basecmd, basecmd_cmd_id):
"""
Second side of set_commander cmd from master. It register existing
cmd to CommandSlave dict.
:param basecmd: cmd which should be added to CommandSlave dict
:type basecmd: BaseCmd
:param basecmd_cmd_id: number under which should be stored
:type basecmd_cmd_id: int
"""
remote_interface.BaseCmd.single_cmd_id = basecmd_cmd_id
cmd = CmdSlave(basecmd)
self.cmds[basecmd.cmd_id] = cmd
if cmd.basecmd.cmd_hash is not None:
cmd.recover_fds()
return basecmd
def add_function(self, f_code):
"""
Adds function to client code.
:param f_code: Code of function.
:type f_code: str.
"""
exec(f_code, globals(), globals())
def copy_file(self, name, path, content):
"""
Really naive implementation of copping files. Should be used only for
short files.
"""
f = open(os.path.join(path, name), "w")
f.write(content)
f.close()
def import_src(self, name, path=None):
"""
Import file to running python session.
"""
if path:
if path not in sys.path:
sys.path.append(path)
mod = __import__(name, globals(), locals())
globals()[name] = mod
sys.modules[name] = mod
def exit(self):
"""
Method for killing command slave.
"""
self._exit = True
return "bye"
def remote_agent(in_stream_cls, out_stream_cls):
"""
Connect file descriptors to right pipe and start slave command loop.
When something happend it raise exception which could be caught by cmd
master.
:params in_stream_cls: Class encapsulated input stream.
:params out_stream_cls: Class encapsulated output stream.
"""
try:
fd_stdout = sys.stdout.fileno()
fd_stderr = sys.stderr.fileno()
fd_stdin = sys.stdin.fileno()
soutr, soutw = os.pipe()
serrr, serrw = os.pipe()
sys.stdout = os.fdopen(soutw, 'w', 0)
sys.stderr = os.fdopen(serrw, 'w', 0)
os.write(fd_stdout, "#")
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
w_stdin = None
w_stdout = out_stream_cls(fd_stdout)
w_stdin = in_stream_cls(fd_stdin)
cmd = CommanderSlaveCmds(w_stdin,
w_stdout,
soutr,
serrr)
cmd.cmd_loop()
except SystemExit:
pass
except:
e = traceback.format_exc()
sys.stderr.write(e)
# traceback.print_exc()
if __name__ == '__main__':
if len(sys.argv) > 1:
if sys.argv[1] == "agent":
remote_agent(ms.StdIOWrapperIn, ms.StdIOWrapperOut)
elif sys.argv[1] == "agent_base64":
remote_agent(ms.StdIOWrapperInBase64, ms.StdIOWrapperOutBase64)
| gpl-2.0 |
h3biomed/ansible | lib/ansible/modules/identity/keycloak/keycloak_clienttemplate.py | 32 | 15139 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Eike Frost <ei@kefro.st>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: keycloak_clienttemplate
short_description: Allows administration of Keycloak client templates via Keycloak API
version_added: "2.5"
description:
- This module allows the administration of Keycloak client templates via the Keycloak REST API. It
requires access to the REST API via OpenID Connect; the user connecting and the client being
used must have the requisite access rights. In a default Keycloak installation, admin-cli
and an admin user would work, as would a separate client definition with the scope tailored
to your needs and a user having the expected roles.
- The names of module options are snake_cased versions of the camelCase ones found in the
Keycloak API and its documentation at U(http://www.keycloak.org/docs-api/3.3/rest-api/)
- The Keycloak API does not always enforce for only sensible settings to be used -- you can set
SAML-specific settings on an OpenID Connect client for instance and vice versa. Be careful.
If you do not specify a setting, usually a sensible default is chosen.
options:
state:
description:
- State of the client template
- On C(present), the client template will be created (or updated if it exists already).
- On C(absent), the client template will be removed if it exists
choices: ['present', 'absent']
default: 'present'
id:
description:
- Id of client template to be worked on. This is usually a UUID.
realm:
description:
- Realm this client template is found in.
name:
description:
- Name of the client template
description:
description:
- Description of the client template in Keycloak
protocol:
description:
- Type of client template (either C(openid-connect) or C(saml).
choices: ['openid-connect', 'saml']
full_scope_allowed:
description:
- Is the "Full Scope Allowed" feature set for this client template or not.
This is 'fullScopeAllowed' in the Keycloak REST API.
type: bool
protocol_mappers:
description:
- a list of dicts defining protocol mappers for this client template.
This is 'protocolMappers' in the Keycloak REST API.
suboptions:
consentRequired:
description:
- Specifies whether a user needs to provide consent to a client for this mapper to be active.
consentText:
description:
- The human-readable name of the consent the user is presented to accept.
id:
description:
- Usually a UUID specifying the internal ID of this protocol mapper instance.
name:
description:
- The name of this protocol mapper.
protocol:
description:
- is either 'openid-connect' or 'saml', this specifies for which protocol this protocol mapper
is active.
choices: ['openid-connect', 'saml']
protocolMapper:
description:
- The Keycloak-internal name of the type of this protocol-mapper. While an exhaustive list is
impossible to provide since this may be extended through SPIs by the user of Keycloak,
by default Keycloak as of 3.4 ships with at least
- C(docker-v2-allow-all-mapper)
- C(oidc-address-mapper)
- C(oidc-full-name-mapper)
- C(oidc-group-membership-mapper)
- C(oidc-hardcoded-claim-mapper)
- C(oidc-hardcoded-role-mapper)
- C(oidc-role-name-mapper)
- C(oidc-script-based-protocol-mapper)
- C(oidc-sha256-pairwise-sub-mapper)
- C(oidc-usermodel-attribute-mapper)
- C(oidc-usermodel-client-role-mapper)
- C(oidc-usermodel-property-mapper)
- C(oidc-usermodel-realm-role-mapper)
- C(oidc-usersessionmodel-note-mapper)
- C(saml-group-membership-mapper)
- C(saml-hardcode-attribute-mapper)
- C(saml-hardcode-role-mapper)
- C(saml-role-list-mapper)
- C(saml-role-name-mapper)
- C(saml-user-attribute-mapper)
- C(saml-user-property-mapper)
- C(saml-user-session-note-mapper)
- An exhaustive list of available mappers on your installation can be obtained on
the admin console by going to Server Info -> Providers and looking under
'protocol-mapper'.
config:
description:
- Dict specifying the configuration options for the protocol mapper; the
contents differ depending on the value of I(protocolMapper) and are not documented
other than by the source of the mappers and its parent class(es). An example is given
below. It is easiest to obtain valid config values by dumping an already-existing
protocol mapper configuration through check-mode in the "existing" field.
attributes:
description:
- A dict of further attributes for this client template. This can contain various
configuration settings, though in the default installation of Keycloak as of 3.4, none
are documented or known, so this is usually empty.
notes:
- The Keycloak REST API defines further fields (namely I(bearerOnly), I(consentRequired), I(standardFlowEnabled),
I(implicitFlowEnabled), I(directAccessGrantsEnabled), I(serviceAccountsEnabled), I(publicClient), and
I(frontchannelLogout)) which, while available with keycloak_client, do not have any effect on
Keycloak client-templates and are discarded if supplied with an API request changing client-templates. As such,
they are not available through this module.
extends_documentation_fragment:
- keycloak
author:
- Eike Frost (@eikef)
'''
EXAMPLES = '''
- name: Create or update Keycloak client template (minimal)
local_action:
module: keycloak_clienttemplate
auth_client_id: admin-cli
auth_keycloak_url: https://auth.example.com/auth
auth_realm: master
auth_username: USERNAME
auth_password: PASSWORD
realm: master
name: this_is_a_test
- name: delete Keycloak client template
local_action:
module: keycloak_clienttemplate
auth_client_id: admin-cli
auth_keycloak_url: https://auth.example.com/auth
auth_realm: master
auth_username: USERNAME
auth_password: PASSWORD
realm: master
state: absent
name: test01
- name: Create or update Keycloak client template (with a protocol mapper)
local_action:
module: keycloak_clienttemplate
auth_client_id: admin-cli
auth_keycloak_url: https://auth.example.com/auth
auth_realm: master
auth_username: USERNAME
auth_password: PASSWORD
realm: master
name: this_is_a_test
protocol_mappers:
- config:
access.token.claim: True
claim.name: "family_name"
id.token.claim: True
jsonType.label: String
user.attribute: lastName
userinfo.token.claim: True
consentRequired: True
consentText: "${familyName}"
name: family name
protocol: openid-connect
protocolMapper: oidc-usermodel-property-mapper
full_scope_allowed: false
id: bce6f5e9-d7d3-4955-817e-c5b7f8d65b3f
'''
RETURN = '''
msg:
description: Message as to what action was taken
returned: always
type: str
sample: "Client template testclient has been updated"
proposed:
description: client template representation of proposed changes to client template
returned: always
type: dict
sample: {
name: "test01"
}
existing:
description: client template representation of existing client template (sample is truncated)
returned: always
type: dict
sample: {
"description": "test01",
"fullScopeAllowed": false,
"id": "9c3712ab-decd-481e-954f-76da7b006e5f",
"name": "test01",
"protocol": "saml"
}
end_state:
description: client template representation of client template after module execution (sample is truncated)
returned: always
type: dict
sample: {
"description": "test01",
"fullScopeAllowed": false,
"id": "9c3712ab-decd-481e-954f-76da7b006e5f",
"name": "test01",
"protocol": "saml"
}
'''
from ansible.module_utils.keycloak import KeycloakAPI, camel, keycloak_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
"""
Module execution
:return:
"""
argument_spec = keycloak_argument_spec()
protmapper_spec = dict(
consentRequired=dict(type='bool'),
consentText=dict(type='str'),
id=dict(type='str'),
name=dict(type='str'),
protocol=dict(type='str', choices=['openid-connect', 'saml']),
protocolMapper=dict(type='str'),
config=dict(type='dict'),
)
meta_args = dict(
realm=dict(type='str', default='master'),
state=dict(default='present', choices=['present', 'absent']),
id=dict(type='str'),
name=dict(type='str'),
description=dict(type='str'),
protocol=dict(type='str', choices=['openid-connect', 'saml']),
attributes=dict(type='dict'),
full_scope_allowed=dict(type='bool'),
protocol_mappers=dict(type='list', elements='dict', options=protmapper_spec),
)
argument_spec.update(meta_args)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
required_one_of=([['id', 'name']]))
result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={})
# Obtain access token, initialize API
kc = KeycloakAPI(module)
realm = module.params.get('realm')
state = module.params.get('state')
cid = module.params.get('id')
# convert module parameters to client representation parameters (if they belong in there)
clientt_params = [x for x in module.params
if x not in ['state', 'auth_keycloak_url', 'auth_client_id', 'auth_realm',
'auth_client_secret', 'auth_username', 'auth_password',
'validate_certs', 'realm'] and module.params.get(x) is not None]
# See whether the client template already exists in Keycloak
if cid is None:
before_clientt = kc.get_client_template_by_name(module.params.get('name'), realm=realm)
if before_clientt is not None:
cid = before_clientt['id']
else:
before_clientt = kc.get_client_template_by_id(cid, realm=realm)
if before_clientt is None:
before_clientt = dict()
result['existing'] = before_clientt
# Build a proposed changeset from parameters given to this module
changeset = dict()
for clientt_param in clientt_params:
# lists in the Keycloak API are sorted
new_param_value = module.params.get(clientt_param)
if isinstance(new_param_value, list):
try:
new_param_value = sorted(new_param_value)
except TypeError:
pass
changeset[camel(clientt_param)] = new_param_value
# Whether creating or updating a client, take the before-state and merge the changeset into it
updated_clientt = before_clientt.copy()
updated_clientt.update(changeset)
result['proposed'] = changeset
# If the client template does not exist yet, before_client is still empty
if before_clientt == dict():
if state == 'absent':
# do nothing and exit
if module._diff:
result['diff'] = dict(before='', after='')
result['msg'] = 'Client template does not exist, doing nothing.'
module.exit_json(**result)
# create new client template
result['changed'] = True
if 'name' not in updated_clientt:
module.fail_json(msg='name needs to be specified when creating a new client')
if module._diff:
result['diff'] = dict(before='', after=updated_clientt)
if module.check_mode:
module.exit_json(**result)
kc.create_client_template(updated_clientt, realm=realm)
after_clientt = kc.get_client_template_by_name(updated_clientt['name'], realm=realm)
result['end_state'] = after_clientt
result['msg'] = 'Client template %s has been created.' % updated_clientt['name']
module.exit_json(**result)
else:
if state == 'present':
# update existing client template
result['changed'] = True
if module.check_mode:
# We can only compare the current client template with the proposed updates we have
if module._diff:
result['diff'] = dict(before=before_clientt,
after=updated_clientt)
module.exit_json(**result)
kc.update_client_template(cid, updated_clientt, realm=realm)
after_clientt = kc.get_client_template_by_id(cid, realm=realm)
if before_clientt == after_clientt:
result['changed'] = False
if module._diff:
result['diff'] = dict(before=before_clientt,
after=after_clientt)
result['end_state'] = after_clientt
result['msg'] = 'Client template %s has been updated.' % updated_clientt['name']
module.exit_json(**result)
else:
# Delete existing client
result['changed'] = True
if module._diff:
result['diff']['before'] = before_clientt
result['diff']['after'] = ''
if module.check_mode:
module.exit_json(**result)
kc.delete_client_template(cid, realm=realm)
result['proposed'] = dict()
result['end_state'] = dict()
result['msg'] = 'Client template %s has been deleted.' % before_clientt['name']
module.exit_json(**result)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
brakhane/python-mode | pymode/libs3/rope/refactor/suites.py | 16 | 4794 | from rope.base import ast
def find_visible(node, lines):
"""Return the line which is visible from all `lines`"""
root = ast_suite_tree(node)
return find_visible_for_suite(root, lines)
def find_visible_for_suite(root, lines):
if len(lines) == 1:
return lines[0]
line1 = lines[0]
line2 = find_visible_for_suite(root, lines[1:])
suite1 = root.find_suite(line1)
suite2 = root.find_suite(line2)
def valid(suite):
return suite is not None and not suite.ignored
if valid(suite1) and not valid(suite2):
return line1
if not valid(suite1) and valid(suite2):
return line2
if not valid(suite1) and not valid(suite2):
return None
while suite1 != suite2 and suite1.parent != suite2.parent:
if suite1._get_level() < suite2._get_level():
line2 = suite2.get_start()
suite2 = suite2.parent
elif suite1._get_level() > suite2._get_level():
line1 = suite1.get_start()
suite1 = suite1.parent
else:
line1 = suite1.get_start()
line2 = suite2.get_start()
suite1 = suite1.parent
suite2 = suite2.parent
if suite1 == suite2:
return min(line1, line2)
return min(suite1.get_start(), suite2.get_start())
def ast_suite_tree(node):
if hasattr(node, 'lineno'):
lineno = node.lineno
else:
lineno = 1
return Suite(node.body, lineno)
class Suite(object):
def __init__(self, child_nodes, lineno, parent=None, ignored=False):
self.parent = parent
self.lineno = lineno
self.child_nodes = child_nodes
self._children = None
self.ignored = ignored
def get_start(self):
if self.parent is None:
if self.child_nodes:
return self.local_start()
else:
return 1
return self.lineno
def get_children(self):
if self._children is None:
walker = _SuiteWalker(self)
for child in self.child_nodes:
ast.walk(child, walker)
self._children = walker.suites
return self._children
def local_start(self):
return self.child_nodes[0].lineno
def local_end(self):
end = self.child_nodes[-1].lineno
if self.get_children():
end = max(end, self.get_children()[-1].local_end())
return end
def find_suite(self, line):
if line is None:
return None
for child in self.get_children():
if child.local_start() <= line <= child.local_end():
return child.find_suite(line)
return self
def _get_level(self):
if self.parent is None:
return 0
return self.parent._get_level() + 1
class _SuiteWalker(object):
def __init__(self, suite):
self.suite = suite
self.suites = []
def _If(self, node):
self._add_if_like_node(node)
def _For(self, node):
self._add_if_like_node(node)
def _While(self, node):
self._add_if_like_node(node)
def _With(self, node):
self.suites.append(Suite(node.body, node.lineno, self.suite))
def _TryFinally(self, node):
if len(node.finalbody) == 1 and \
isinstance(node.body[0], ast.TryExcept):
self._TryExcept(node.body[0])
else:
self.suites.append(Suite(node.body, node.lineno, self.suite))
self.suites.append(Suite(node.finalbody, node.lineno, self.suite))
def _TryExcept(self, node):
self.suites.append(Suite(node.body, node.lineno, self.suite))
for handler in node.handlers:
self.suites.append(Suite(handler.body, node.lineno, self.suite))
if node.orelse:
self.suites.append(Suite(node.orelse, node.lineno, self.suite))
def _Try(self, node):
self.suites.append(Suite(node.body, node.lineno, self.suite))
for handler in node.handlers:
self.suites.append(Suite(handler.body, node.lineno, self.suite))
if node.orelse:
self.suites.append(Suite(node.orelse, node.lineno, self.suite))
if node.finalbody:
self.suites.append(Suite(node.finalbody, node.lineno, self.suite))
def _add_if_like_node(self, node):
self.suites.append(Suite(node.body, node.lineno, self.suite))
if node.orelse:
self.suites.append(Suite(node.orelse, node.lineno, self.suite))
def _FunctionDef(self, node):
self.suites.append(Suite(node.body, node.lineno,
self.suite, ignored=True))
def _ClassDef(self, node):
self.suites.append(Suite(node.body, node.lineno,
self.suite, ignored=True))
| lgpl-3.0 |
manaris/jythonMusic | library/jython2.5.3/Lib/encodings/iso2022_jp_2.py | 816 | 1061 | #
# iso2022_jp_2.py: Python Unicode Codec for ISO2022_JP_2
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_jp_2')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_jp_2',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| gpl-3.0 |
danialbehzadi/Nokia-RM-1013-2.0.0.11 | chromium/chrome/common/extensions/docs/build/build.py | 65 | 8905 | #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Docbuilder for extension docs."""
import os
import os.path
import shutil
import sys
import time
import urllib
from subprocess import Popen, PIPE
from optparse import OptionParser
_script_path = os.path.realpath(__file__)
_build_dir = os.path.dirname(_script_path)
_base_dir = os.path.normpath(_build_dir + "/..")
_webkit_dir = _base_dir + "/../../../../third_party/WebKit"
_devtools_dir = _webkit_dir + "/Source/WebCore/inspector/front-end"
_static_dir = _base_dir + "/static"
_js_dir = _base_dir + "/js"
_template_dir = _base_dir + "/template"
_samples_dir = _base_dir + "/examples"
_extension_api_dir = os.path.normpath(_base_dir + "/../api")
_extension_api_json = _extension_api_dir + "/extension_api.json"
_devtools_api_json = _devtools_dir + "/ExtensionAPISchema.json"
_api_template_html = _template_dir + "/api_template.html"
_page_shell_html = _template_dir + "/page_shell.html"
_generator_html = _build_dir + "/generator.html"
_samples_json = _base_dir + "/samples.json"
_expected_output_preamble = "#BEGIN"
_expected_output_postamble = "#END"
# HACK! This is required because we can only depend on python 2.4 and
# the calling environment may not be setup to set the PYTHONPATH
sys.path.append(os.path.normpath(_base_dir +
"/../../../../third_party"))
import simplejson as json
from directory import Sample
from directory import ApiManifest
from directory import SamplesManifest
def RenderPages(names, dump_render_tree):
"""
Calls DumpRenderTree .../generator.html?<names> and writes the
results to .../docs/<name>.html
"""
if not names:
raise Exception("RenderPage called with empty names param")
generator_url = "file:" + urllib.pathname2url(_generator_html)
generator_url += "?" + ",".join(names)
# Start with a fresh copy of page shell for each file.
# Save the current contents so that we can look for changes later.
originals = {}
for name in names:
input_file = _base_dir + "/" + name + ".html"
if (os.path.isfile(input_file)):
originals[name] = open(input_file, 'rb').read()
os.remove(input_file)
else:
originals[name] = ""
shutil.copy(_page_shell_html, input_file)
# Run DumpRenderTree and capture result
dump_render_tree_timeout = 1000 * 60 * 5 # five minutes
p = Popen(
[dump_render_tree, "--test-shell",
"%s %s" % (generator_url, dump_render_tree_timeout)],
stdout=PIPE)
# The remaining output will be the content of the generated pages.
output = p.stdout.read()
# Parse out just the JSON part.
begin = output.find(_expected_output_preamble)
end = output.rfind(_expected_output_postamble)
if (begin < 0 or end < 0):
raise Exception("%s returned invalid output:\n\n%s" %
(dump_render_tree, output))
begin += len(_expected_output_preamble)
try:
output_parsed = json.loads(output[begin:end])
except ValueError, msg:
raise Exception("Could not parse DumpRenderTree output as JSON. Error: " +
msg + "\n\nOutput was:\n" + output)
changed_files = []
for name in names:
result = output_parsed[name].encode("utf8") + '\n'
# Remove CRs that are appearing from captured DumpRenderTree output.
result = result.replace('\r', '')
# Remove page_shell
input_file = _base_dir + "/" + name + ".html"
os.remove(input_file)
# Write output
open(input_file, 'wb').write(result)
if (originals[name] and result != originals[name]):
changed_files.append(input_file)
return changed_files
def FindDumpRenderTree():
# This is hacky. It is used to guess the location of the DumpRenderTree
chrome_dir = os.path.normpath(_base_dir + "/../../../")
src_dir = os.path.normpath(chrome_dir + "/../")
search_locations = []
if (sys.platform in ('cygwin', 'win32')):
home_dir = os.path.normpath(os.getenv("HOMEDRIVE") + os.getenv("HOMEPATH"))
search_locations.append(chrome_dir + "/Release/DumpRenderTree.exe")
search_locations.append(chrome_dir + "/Debug/DumpRenderTree.exe")
search_locations.append(home_dir + "/bin/DumpRenderTree/"
"DumpRenderTree.exe")
if (sys.platform in ('linux', 'linux2')):
search_locations.append(src_dir + "/sconsbuild/Release/DumpRenderTree")
search_locations.append(src_dir + "/out/Release/DumpRenderTree")
search_locations.append(src_dir + "/sconsbuild/Debug/DumpRenderTree")
search_locations.append(src_dir + "/out/Debug/DumpRenderTree")
search_locations.append(os.getenv("HOME") + "/bin/DumpRenderTree/"
"DumpRenderTree")
if (sys.platform == 'darwin'):
search_locations.append(src_dir +
"/xcodebuild/Release/DumpRenderTree.app/Contents/MacOS/DumpRenderTree")
search_locations.append(src_dir +
"/xcodebuild/Debug/DumpRenderTree.app/Contents/MacOS/DumpRenderTree")
search_locations.append(os.getenv("HOME") + "/bin/DumpRenderTree/" +
"DumpRenderTree.app/Contents/MacOS/DumpRenderTree")
for loc in search_locations:
if os.path.isfile(loc):
return loc
raise Exception("Could not find DumpRenderTree executable\n"
"**DumpRenderTree may need to be built**\n"
"Searched: \n" + "\n".join(search_locations) + "\n"
"To specify a path to DumpRenderTree use "
"--dump-render-tree-path")
def GetStaticFileNames():
static_files = os.listdir(_static_dir)
return set(os.path.splitext(file_name)[0]
for file_name in static_files
if file_name.endswith(".html") and not file_name.startswith("."))
def main():
# Prevent windows from using cygwin python.
if (sys.platform == "cygwin"):
sys.exit("Building docs not supported for cygwin python. Please run the "
"build.sh script instead, which uses depot_tools python.")
parser = OptionParser()
parser.add_option("--dump-render-tree-path", dest="dump_render_tree_path",
metavar="PATH",
help="path to DumpRenderTree executable")
parser.add_option("--page-name", dest="page_name", metavar="PAGE",
help="only generate docs for PAGE.html")
parser.add_option("--nozip", dest="zips", action="store_false",
help="do not generate zip files for samples",
default=True)
options, args = parser.parse_args()
if (options.dump_render_tree_path and
os.path.isfile(options.dump_render_tree_path)):
dump_render_tree = options.dump_render_tree_path
else:
dump_render_tree = FindDumpRenderTree()
# Load the manifest of existing API Methods
api_manifest = ApiManifest(_extension_api_json)
# DevTools API is maintained separately, in WebCore land
devtools_api_manifest = ApiManifest(_devtools_api_json)
# Read static file names
static_names = GetStaticFileNames()
# Read module names
module_names = (api_manifest.getModuleNames() |
devtools_api_manifest.getModuleNames())
# All pages to generate
page_names = static_names | module_names
# Allow the user to render a single page if they want
if options.page_name:
if options.page_name in page_names:
page_names = [options.page_name]
else:
raise Exception("--page-name argument must be one of %s." %
', '.join(sorted(page_names)))
# Render a manifest file containing metadata about all the extension samples
samples_manifest = SamplesManifest(_samples_dir, _base_dir, api_manifest)
samples_manifest.writeToFile(_samples_json)
# Write zipped versions of the samples listed in the manifest to the
# filesystem, unless the user has disabled it
if options.zips:
modified_zips = samples_manifest.writeZippedSamples()
else:
modified_zips = []
modified_files = RenderPages(page_names, dump_render_tree)
modified_files.extend(modified_zips)
if len(modified_files) == 0:
print "Output files match existing files. No changes made."
else:
print ("ATTENTION: EXTENSION DOCS HAVE CHANGED\n" +
"The following files have been modified and should be checked\n" +
"into source control (ideally in the same changelist as the\n" +
"underlying files that resulting in their changing).")
for f in modified_files:
print " * %s" % f
# Hack. Sleep here, otherwise windows doesn't properly close the debug.log
# and the os.remove will fail with a "Permission denied".
time.sleep(1)
debug_log = os.path.normpath(_build_dir + "/" + "debug.log")
if (os.path.isfile(debug_log)):
os.remove(debug_log)
if 'EX_OK' in dir(os):
return os.EX_OK
else:
return 0
if __name__ == '__main__':
sys.exit(main())
| gpl-3.0 |
wangyum/mxnet | example/ssd/dataset/imdb.py | 56 | 3859 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import os.path as osp
class Imdb(object):
"""
Base class for dataset loading
Parameters:
----------
name : str
name of dataset
"""
def __init__(self, name):
self.name = name
self.classes = []
self.num_classes = 0
self.image_set_index = None
self.num_images = 0
self.labels = None
self.padding = 0
def image_path_from_index(self, index):
"""
load image full path given specified index
Parameters:
----------
index : int
index of image requested in dataset
Returns:
----------
full path of specified image
"""
raise NotImplementedError
def label_from_index(self, index):
"""
load ground-truth of image given specified index
Parameters:
----------
index : int
index of image requested in dataset
Returns:
----------
object ground-truths, in format
numpy.array([id, xmin, ymin, xmax, ymax]...)
"""
raise NotImplementedError
def save_imglist(self, fname=None, root=None, shuffle=False):
"""
save imglist to disk
Parameters:
----------
fname : str
saved filename
"""
def progress_bar(count, total, suffix=''):
import sys
bar_len = 24
filled_len = int(round(bar_len * count / float(total)))
percents = round(100.0 * count / float(total), 1)
bar = '=' * filled_len + '-' * (bar_len - filled_len)
sys.stdout.write('[%s] %s%s ...%s\r' % (bar, percents, '%', suffix))
sys.stdout.flush()
str_list = []
for index in range(self.num_images):
progress_bar(index, self.num_images)
label = self.label_from_index(index)
if label.size < 1:
continue
path = self.image_path_from_index(index)
if root:
path = osp.relpath(path, root)
str_list.append('\t'.join([str(index), str(2), str(label.shape[1])] \
+ ["{0:.4f}".format(x) for x in label.ravel()] + [path,]) + '\n')
if str_list:
if shuffle:
import random
random.shuffle(str_list)
if not fname:
fname = self.name + '.lst'
with open(fname, 'w') as f:
for line in str_list:
f.write(line)
else:
raise RuntimeError("No image in imdb")
def _load_class_names(self, filename, dirname):
"""
load class names from text file
Parameters:
----------
filename: str
file stores class names
dirname: str
file directory
"""
full_path = osp.join(dirname, filename)
classes = []
with open(full_path, 'r') as f:
classes = [l.strip() for l in f.readlines()]
return classes
| apache-2.0 |
andresailer/DIRAC | Core/Base/API.py | 1 | 5421 | """ DIRAC API Base Class """
__RCSID__ = '$Id$'
import pprint
import sys
from DIRAC import gLogger, gConfig, S_OK, S_ERROR
from DIRAC.Core.Security.ProxyInfo import getProxyInfo, formatProxyInfoAsString
from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getDNForUsername
from DIRAC.Core.Utilities.Version import getCurrentVersion
COMPONENT_NAME = 'API'
def _printFormattedDictList(dictList, fields, uniqueField, orderBy):
""" Will print ordered the supplied field of a list of dictionaries """
orderDict = {}
fieldWidths = {}
dictFields = {}
for myDict in dictList:
for field in fields:
fieldValue = myDict[field]
if field not in fieldWidths:
fieldWidths[field] = len(str(field))
if len(str(fieldValue)) > fieldWidths[field]:
fieldWidths[field] = len(str(fieldValue))
orderValue = myDict[orderBy]
if orderValue not in orderDict:
orderDict[orderValue] = []
orderDict[orderValue].append(myDict[uniqueField])
dictFields[myDict[uniqueField]] = myDict
headString = "%s" % fields[0].ljust(fieldWidths[fields[0]] + 5)
for field in fields[1:]:
headString = "%s %s" % (headString, field.ljust(fieldWidths[field] + 5))
print headString
for orderValue in sorted(orderDict.keys()):
uniqueFields = orderDict[orderValue]
for uniqueField in sorted(uniqueFields):
myDict = dictFields[uniqueField]
outStr = "%s" % str(myDict[fields[0]]).ljust(fieldWidths[fields[0]] + 5)
for field in fields[1:]:
outStr = "%s %s" % (outStr, str(myDict[field]).ljust(fieldWidths[field] + 5))
print outStr
# TODO: some of these can just be functions, and moved out of here
class API(object):
""" An utilities class for APIs
"""
#############################################################################
def __init__(self):
""" c'tor
"""
self._printFormattedDictList = _printFormattedDictList
self.log = gLogger.getSubLogger(COMPONENT_NAME)
self.section = COMPONENT_NAME
self.pPrint = pprint.PrettyPrinter()
# Global error dictionary
self.errorDict = {}
self.setup = gConfig.getValue('/DIRAC/Setup', 'Unknown')
self.diracInfo = getCurrentVersion()['Value']
#############################################################################
def __getstate__(self):
""" Return a copied dictionary containing all the attributes of the API.
Called when pickling the object. Also used in copy.deepcopy.
:return: dictionary of attributes
"""
from DIRAC.FrameworkSystem.private.standardLogging.Logging import Logging
state = dict(self.__dict__)
# Replace the Logging instance by its name because it is not copyable
# because of the thread locks
if isinstance(state['log'], Logging):
state['log'] = state['log'].getSubName()
return state
def __setstate__(self, state):
""" Parameter the Job with an attributes dictionary.
Called when un-pickling the object.
:params state: attributes dictionary
"""
self.__dict__.update(state)
# Build the Logging instance again because it can not be in the dictionary
# due to the thread locks
if isinstance(state['log'], basestring):
self.log = gLogger.getSubLogger(state['log'])
#############################################################################
def _errorReport(self, error, message=None):
"""Internal function to return errors and exit with an S_ERROR() """
if not message:
message = error
self.log.warn(error)
return S_ERROR(message)
#############################################################################
def _prettyPrint(self, myObject):
"""Helper function to pretty print an object. """
print self.pPrint.pformat(myObject)
#############################################################################
def _getCurrentUser(self):
res = getProxyInfo(False, False)
if not res['OK']:
return self._errorReport('No proxy found in local environment', res['Message'])
proxyInfo = res['Value']
gLogger.debug(formatProxyInfoAsString(proxyInfo))
if 'group' not in proxyInfo:
return self._errorReport('Proxy information does not contain the group', res['Message'])
res = getDNForUsername(proxyInfo['username'])
if not res['OK']:
return self._errorReport('Failed to get proxies for user', res['Message'])
return S_OK(proxyInfo['username'])
#############################################################################
def _reportError(self, message, name='', **kwargs):
"""Internal Function. Gets caller method name and arguments, formats the
information and adds an error to the global error dictionary to be
returned to the user.
"""
className = name
if not name:
className = __name__
methodName = sys._getframe(1).f_code.co_name
arguments = []
for key in kwargs:
if kwargs[key]:
arguments.append('%s = %s ( %s )' % (key, kwargs[key], type(kwargs[key])))
finalReport = """Problem with %s.%s() call:
Arguments: %s
Message: %s
""" % (className, methodName, '/'.join(arguments), message)
if methodName in self.errorDict:
tmp = self.errorDict[methodName]
tmp.append(finalReport)
self.errorDict[methodName] = tmp
else:
self.errorDict[methodName] = [finalReport]
self.log.verbose(finalReport)
return S_ERROR(finalReport)
| gpl-3.0 |
svogl/mbed-os | tools/host_tests/host_tests_plugins/module_copy_firefox.py | 128 | 2907 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from os.path import join, basename
from host_test_plugins import HostTestPluginBase
class HostTestPluginCopyMethod_Firefox(HostTestPluginBase):
def file_store_firefox(self, file_path, dest_disk):
try:
from selenium import webdriver
profile = webdriver.FirefoxProfile()
profile.set_preference('browser.download.folderList', 2) # custom location
profile.set_preference('browser.download.manager.showWhenStarting', False)
profile.set_preference('browser.download.dir', dest_disk)
profile.set_preference('browser.helperApps.neverAsk.saveToDisk', 'application/octet-stream')
# Launch browser with profile and get file
browser = webdriver.Firefox(profile)
browser.get(file_path)
browser.close()
except:
return False
return True
# Plugin interface
name = 'HostTestPluginCopyMethod_Firefox'
type = 'CopyMethod'
capabilities = ['firefox']
required_parameters = ['image_path', 'destination_disk']
def setup(self, *args, **kwargs):
""" Configure plugin, this function should be called before plugin execute() method is used.
"""
try:
from selenium import webdriver
except ImportError, e:
self.print_plugin_error("Error: firefox copy method requires selenium library. %s"% e)
return False
return True
def execute(self, capabilitity, *args, **kwargs):
""" Executes capability by name.
Each capability may directly just call some command line
program or execute building pythonic function
"""
result = False
if self.check_parameters(capabilitity, *args, **kwargs) is True:
image_path = kwargs['image_path']
destination_disk = kwargs['destination_disk']
# Prepare correct command line parameter values
image_base_name = basename(image_path)
destination_path = join(destination_disk, image_base_name)
if capabilitity == 'firefox':
self.file_store_firefox(image_path, destination_path)
return result
def load_plugin():
""" Returns plugin available in this module
"""
return HostTestPluginCopyMethod_Firefox()
| apache-2.0 |
kevinmel2000/brython | www/src/Lib/test/unittests/test_sys_setprofile.py | 177 | 11355 | import gc
import pprint
import sys
import unittest
from test import support
class TestGetProfile(unittest.TestCase):
def setUp(self):
sys.setprofile(None)
def tearDown(self):
sys.setprofile(None)
def test_empty(self):
self.assertIsNone(sys.getprofile())
def test_setget(self):
def fn(*args):
pass
sys.setprofile(fn)
self.assertIs(sys.getprofile(), fn)
class HookWatcher:
def __init__(self):
self.frames = []
self.events = []
def callback(self, frame, event, arg):
if (event == "call"
or event == "return"
or event == "exception"):
self.add_event(event, frame)
def add_event(self, event, frame=None):
"""Add an event to the log."""
if frame is None:
frame = sys._getframe(1)
try:
frameno = self.frames.index(frame)
except ValueError:
frameno = len(self.frames)
self.frames.append(frame)
self.events.append((frameno, event, ident(frame)))
def get_events(self):
"""Remove calls to add_event()."""
disallowed = [ident(self.add_event.__func__), ident(ident)]
self.frames = None
return [item for item in self.events if item[2] not in disallowed]
class ProfileSimulator(HookWatcher):
def __init__(self, testcase):
self.testcase = testcase
self.stack = []
HookWatcher.__init__(self)
def callback(self, frame, event, arg):
# Callback registered with sys.setprofile()/sys.settrace()
self.dispatch[event](self, frame)
def trace_call(self, frame):
self.add_event('call', frame)
self.stack.append(frame)
def trace_return(self, frame):
self.add_event('return', frame)
self.stack.pop()
def trace_exception(self, frame):
self.testcase.fail(
"the profiler should never receive exception events")
def trace_pass(self, frame):
pass
dispatch = {
'call': trace_call,
'exception': trace_exception,
'return': trace_return,
'c_call': trace_pass,
'c_return': trace_pass,
'c_exception': trace_pass,
}
class TestCaseBase(unittest.TestCase):
def check_events(self, callable, expected):
events = capture_events(callable, self.new_watcher())
if events != expected:
self.fail("Expected events:\n%s\nReceived events:\n%s"
% (pprint.pformat(expected), pprint.pformat(events)))
class ProfileHookTestCase(TestCaseBase):
def new_watcher(self):
return HookWatcher()
def test_simple(self):
def f(p):
pass
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_exception(self):
def f(p):
1/0
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_caught_exception(self):
def f(p):
try: 1/0
except: pass
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_caught_nested_exception(self):
def f(p):
try: 1/0
except: pass
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_nested_exception(self):
def f(p):
1/0
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
# This isn't what I expected:
# (0, 'exception', protect_ident),
# I expected this again:
(1, 'return', f_ident),
])
def test_exception_in_except_clause(self):
def f(p):
1/0
def g(p):
try:
f(p)
except:
try: f(p)
except: pass
f_ident = ident(f)
g_ident = ident(g)
self.check_events(g, [(1, 'call', g_ident),
(2, 'call', f_ident),
(2, 'return', f_ident),
(3, 'call', f_ident),
(3, 'return', f_ident),
(1, 'return', g_ident),
])
def test_exception_propogation(self):
def f(p):
1/0
def g(p):
try: f(p)
finally: p.add_event("falling through")
f_ident = ident(f)
g_ident = ident(g)
self.check_events(g, [(1, 'call', g_ident),
(2, 'call', f_ident),
(2, 'return', f_ident),
(1, 'falling through', g_ident),
(1, 'return', g_ident),
])
def test_raise_twice(self):
def f(p):
try: 1/0
except: 1/0
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_raise_reraise(self):
def f(p):
try: 1/0
except: raise
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_raise(self):
def f(p):
raise Exception()
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_distant_exception(self):
def f():
1/0
def g():
f()
def h():
g()
def i():
h()
def j(p):
i()
f_ident = ident(f)
g_ident = ident(g)
h_ident = ident(h)
i_ident = ident(i)
j_ident = ident(j)
self.check_events(j, [(1, 'call', j_ident),
(2, 'call', i_ident),
(3, 'call', h_ident),
(4, 'call', g_ident),
(5, 'call', f_ident),
(5, 'return', f_ident),
(4, 'return', g_ident),
(3, 'return', h_ident),
(2, 'return', i_ident),
(1, 'return', j_ident),
])
def test_generator(self):
def f():
for i in range(2):
yield i
def g(p):
for i in f():
pass
f_ident = ident(f)
g_ident = ident(g)
self.check_events(g, [(1, 'call', g_ident),
# call the iterator twice to generate values
(2, 'call', f_ident),
(2, 'return', f_ident),
(2, 'call', f_ident),
(2, 'return', f_ident),
# once more; returns end-of-iteration with
# actually raising an exception
(2, 'call', f_ident),
(2, 'return', f_ident),
(1, 'return', g_ident),
])
def test_stop_iteration(self):
def f():
for i in range(2):
yield i
raise StopIteration
def g(p):
for i in f():
pass
f_ident = ident(f)
g_ident = ident(g)
self.check_events(g, [(1, 'call', g_ident),
# call the iterator twice to generate values
(2, 'call', f_ident),
(2, 'return', f_ident),
(2, 'call', f_ident),
(2, 'return', f_ident),
# once more to hit the raise:
(2, 'call', f_ident),
(2, 'return', f_ident),
(1, 'return', g_ident),
])
class ProfileSimulatorTestCase(TestCaseBase):
def new_watcher(self):
return ProfileSimulator(self)
def test_simple(self):
def f(p):
pass
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_basic_exception(self):
def f(p):
1/0
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_caught_exception(self):
def f(p):
try: 1/0
except: pass
f_ident = ident(f)
self.check_events(f, [(1, 'call', f_ident),
(1, 'return', f_ident),
])
def test_distant_exception(self):
def f():
1/0
def g():
f()
def h():
g()
def i():
h()
def j(p):
i()
f_ident = ident(f)
g_ident = ident(g)
h_ident = ident(h)
i_ident = ident(i)
j_ident = ident(j)
self.check_events(j, [(1, 'call', j_ident),
(2, 'call', i_ident),
(3, 'call', h_ident),
(4, 'call', g_ident),
(5, 'call', f_ident),
(5, 'return', f_ident),
(4, 'return', g_ident),
(3, 'return', h_ident),
(2, 'return', i_ident),
(1, 'return', j_ident),
])
def ident(function):
if hasattr(function, "f_code"):
code = function.f_code
else:
code = function.__code__
return code.co_firstlineno, code.co_name
def protect(f, p):
try: f(p)
except: pass
protect_ident = ident(protect)
def capture_events(callable, p=None):
if p is None:
p = HookWatcher()
# Disable the garbage collector. This prevents __del__s from showing up in
# traces.
old_gc = gc.isenabled()
gc.disable()
try:
sys.setprofile(p.callback)
protect(callable, p)
sys.setprofile(None)
finally:
if old_gc:
gc.enable()
return p.get_events()[1:-1]
def show_events(callable):
import pprint
pprint.pprint(capture_events(callable))
def test_main():
support.run_unittest(
TestGetProfile,
ProfileHookTestCase,
ProfileSimulatorTestCase
)
if __name__ == "__main__":
test_main()
| bsd-3-clause |
sarakha63/persomov | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/radiofrance.py | 163 | 2089 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
class RadioFranceIE(InfoExtractor):
_VALID_URL = r'^https?://maison\.radiofrance\.fr/radiovisions/(?P<id>[^?#]+)'
IE_NAME = 'radiofrance'
_TEST = {
'url': 'http://maison.radiofrance.fr/radiovisions/one-one',
'md5': 'bdbb28ace95ed0e04faab32ba3160daf',
'info_dict': {
'id': 'one-one',
'ext': 'ogg',
"title": "One to one",
"description": "Plutôt que d'imaginer la radio de demain comme technologie ou comme création de contenu, je veux montrer que quelles que soient ses évolutions, j'ai l'intime conviction que la radio continuera d'être un grand média de proximité pour les auditeurs.",
"uploader": "Thomas Hercouët",
},
}
def _real_extract(self, url):
m = re.match(self._VALID_URL, url)
video_id = m.group('id')
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(r'<h1>(.*?)</h1>', webpage, 'title')
description = self._html_search_regex(
r'<div class="bloc_page_wrapper"><div class="text">(.*?)</div>',
webpage, 'description', fatal=False)
uploader = self._html_search_regex(
r'<div class="credit"> © (.*?)</div>',
webpage, 'uploader', fatal=False)
formats_str = self._html_search_regex(
r'class="jp-jplayer[^"]*" data-source="([^"]+)">',
webpage, 'audio URLs')
formats = [
{
'format_id': fm[0],
'url': fm[1],
'vcodec': 'none',
'preference': i,
}
for i, fm in
enumerate(re.findall(r"([a-z0-9]+)\s*:\s*'([^']+)'", formats_str))
]
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'description': description,
'uploader': uploader,
}
| gpl-3.0 |
RuudBurger/CouchPotatoServer | libs/guessit/transfo/guess_idnumber.py | 102 | 2224 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2013 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit.transfo import SingleNodeGuesser
from guessit.patterns import find_properties
import re
import logging
log = logging.getLogger(__name__)
def guess_properties(string):
try:
prop, value, pos, end = find_properties(string)[0]
return { prop: value }, (pos, end)
except IndexError:
return None, None
_idnum = re.compile(r'(?P<idNumber>[a-zA-Z0-9-]{10,})') # 1.0, (0, 0))
def guess_idnumber(string):
match = _idnum.search(string)
if match is not None:
result = match.groupdict()
switch_count = 0
DIGIT = 0
LETTER = 1
OTHER = 2
last = LETTER
for c in result['idNumber']:
if c in '0123456789':
ci = DIGIT
elif c in 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ':
ci = LETTER
else:
ci = OTHER
if ci != last:
switch_count += 1
last = ci
switch_ratio = float(switch_count) / len(result['idNumber'])
# only return the result as probable if we alternate often between
# char type (more likely for hash values than for common words)
if switch_ratio > 0.4:
return result, match.span()
return None, None
def process(mtree):
SingleNodeGuesser(guess_idnumber, 0.4, log).process(mtree)
| gpl-3.0 |
mitocw/edx-platform | pavelib/paver_tests/test_assets.py | 4 | 15231 | """Unit tests for the Paver asset tasks."""
import os
from unittest import TestCase
import ddt
import paver.tasks
import six
from mock import patch
from paver.easy import call_task, path
from watchdog.observers import Observer
from pavelib.assets import COLLECTSTATIC_LOG_DIR_ARG, collect_assets
from ..utils.envs import Env
from .utils import PaverTestCase
ROOT_PATH = path(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
TEST_THEME_DIR = ROOT_PATH / "common/test/test-theme"
@ddt.ddt
class TestPaverAssetTasks(PaverTestCase):
"""
Test the Paver asset tasks.
"""
@ddt.data(
[""],
["--force"],
["--debug"],
["--system=lms"],
["--system=lms --force"],
["--system=studio"],
["--system=studio --force"],
["--system=lms,studio"],
["--system=lms,studio --force"],
)
@ddt.unpack
def test_compile_sass(self, options):
"""
Test the "compile_sass" task.
"""
parameters = options.split(" ")
system = []
if '--system=studio' not in parameters:
system += ['lms']
if '--system=lms' not in parameters:
system += ['studio']
debug = '--debug' in parameters
force = '--force' in parameters
self.reset_task_messages()
call_task('pavelib.assets.compile_sass', options={'system': system, 'debug': debug, 'force': force})
expected_messages = []
if force:
expected_messages.append('rm -rf common/static/css/*.css')
expected_messages.append('libsass common/static/sass')
if "lms" in system:
if force:
expected_messages.append(u'rm -rf lms/static/css/*.css')
expected_messages.append(u'libsass lms/static/sass')
expected_messages.append(
u'rtlcss lms/static/css/bootstrap/lms-main.css lms/static/css/bootstrap/lms-main-rtl.css'
)
expected_messages.append(
u'rtlcss lms/static/css/discussion/lms-discussion-bootstrap.css'
u' lms/static/css/discussion/lms-discussion-bootstrap-rtl.css'
)
if force:
expected_messages.append(u'rm -rf lms/static/certificates/css/*.css')
expected_messages.append(u'libsass lms/static/certificates/sass')
if "studio" in system:
if force:
expected_messages.append(u'rm -rf cms/static/css/*.css')
expected_messages.append(u'libsass cms/static/sass')
expected_messages.append(
u'rtlcss cms/static/css/bootstrap/studio-main.css cms/static/css/bootstrap/studio-main-rtl.css'
)
six.assertCountEqual(self, self.task_messages, expected_messages)
@ddt.ddt
class TestPaverThemeAssetTasks(PaverTestCase):
"""
Test the Paver asset tasks.
"""
@ddt.data(
[""],
["--force"],
["--debug"],
["--system=lms"],
["--system=lms --force"],
["--system=studio"],
["--system=studio --force"],
["--system=lms,studio"],
["--system=lms,studio --force"],
)
@ddt.unpack
def test_compile_theme_sass(self, options):
"""
Test the "compile_sass" task.
"""
parameters = options.split(" ")
system = []
if '--system=studio' not in parameters:
system += ['lms']
if "--system=lms" not in parameters:
system += ['studio']
debug = '--debug' in parameters
force = '--force' in parameters
self.reset_task_messages()
call_task(
'pavelib.assets.compile_sass',
options=dict(
system=system,
debug=debug,
force=force,
theme_dirs=[TEST_THEME_DIR.dirname()],
themes=[TEST_THEME_DIR.basename()]
),
)
expected_messages = []
if force:
expected_messages.append(u'rm -rf common/static/css/*.css')
expected_messages.append(u'libsass common/static/sass')
if 'lms' in system:
expected_messages.append(u'mkdir_p ' + repr(TEST_THEME_DIR / 'lms/static/css'))
if force:
expected_messages.append(
u'rm -rf {test_theme_dir}/lms/static/css/*.css'.format(test_theme_dir=str(TEST_THEME_DIR))
)
expected_messages.append("libsass lms/static/sass")
expected_messages.append(
u'rtlcss {test_theme_dir}/lms/static/css/bootstrap/lms-main.css'
u' {test_theme_dir}/lms/static/css/bootstrap/lms-main-rtl.css'.format(
test_theme_dir=str(TEST_THEME_DIR),
)
)
expected_messages.append(
u'rtlcss {test_theme_dir}/lms/static/css/discussion/lms-discussion-bootstrap.css'
u' {test_theme_dir}/lms/static/css/discussion/lms-discussion-bootstrap-rtl.css'.format(
test_theme_dir=str(TEST_THEME_DIR),
)
)
if force:
expected_messages.append(
u'rm -rf {test_theme_dir}/lms/static/css/*.css'.format(test_theme_dir=str(TEST_THEME_DIR))
)
expected_messages.append(
u'libsass {test_theme_dir}/lms/static/sass'.format(test_theme_dir=str(TEST_THEME_DIR))
)
if force:
expected_messages.append(u'rm -rf lms/static/css/*.css')
expected_messages.append(u'libsass lms/static/sass')
expected_messages.append(
u'rtlcss lms/static/css/bootstrap/lms-main.css lms/static/css/bootstrap/lms-main-rtl.css'
)
expected_messages.append(
u'rtlcss lms/static/css/discussion/lms-discussion-bootstrap.css'
u' lms/static/css/discussion/lms-discussion-bootstrap-rtl.css'
)
if force:
expected_messages.append(u'rm -rf lms/static/certificates/css/*.css')
expected_messages.append(u'libsass lms/static/certificates/sass')
if "studio" in system:
expected_messages.append(u'mkdir_p ' + repr(TEST_THEME_DIR / 'cms/static/css'))
if force:
expected_messages.append(
u'rm -rf {test_theme_dir}/cms/static/css/*.css'.format(test_theme_dir=str(TEST_THEME_DIR))
)
expected_messages.append(u'libsass cms/static/sass')
expected_messages.append(
u'rtlcss {test_theme_dir}/cms/static/css/bootstrap/studio-main.css'
u' {test_theme_dir}/cms/static/css/bootstrap/studio-main-rtl.css'.format(
test_theme_dir=str(TEST_THEME_DIR),
)
)
if force:
expected_messages.append(
u'rm -rf {test_theme_dir}/cms/static/css/*.css'.format(test_theme_dir=str(TEST_THEME_DIR))
)
expected_messages.append(
u'libsass {test_theme_dir}/cms/static/sass'.format(test_theme_dir=str(TEST_THEME_DIR))
)
if force:
expected_messages.append(u'rm -rf cms/static/css/*.css')
expected_messages.append(u'libsass cms/static/sass')
expected_messages.append(
u'rtlcss cms/static/css/bootstrap/studio-main.css cms/static/css/bootstrap/studio-main-rtl.css'
)
six.assertCountEqual(self, self.task_messages, expected_messages)
class TestPaverWatchAssetTasks(TestCase):
"""
Test the Paver watch asset tasks.
"""
def setUp(self):
self.expected_sass_directories = [
path('common/static/sass'),
path('common/static'),
path('node_modules/@edx'),
path('node_modules'),
path('node_modules/edx-pattern-library/node_modules'),
path('lms/static/sass/partials'),
path('lms/static/sass'),
path('lms/static/certificates/sass'),
path('cms/static/sass'),
path('cms/static/sass/partials'),
]
# Reset the options that paver stores in a global variable (thus polluting tests)
if 'pavelib.assets.watch_assets' in paver.tasks.environment.options:
del paver.tasks.environment.options['pavelib.assets.watch_assets']
super(TestPaverWatchAssetTasks, self).setUp()
def tearDown(self):
self.expected_sass_directories = []
super(TestPaverWatchAssetTasks, self).tearDown()
def test_watch_assets(self):
"""
Test the "compile_sass" task.
"""
with patch('pavelib.assets.SassWatcher.register') as mock_register:
with patch('pavelib.assets.Observer.start'):
with patch('pavelib.assets.execute_webpack_watch') as mock_webpack:
call_task(
'pavelib.assets.watch_assets',
options={"background": True},
)
self.assertEqual(mock_register.call_count, 2)
self.assertEqual(mock_webpack.call_count, 1)
sass_watcher_args = mock_register.call_args_list[0][0]
self.assertIsInstance(sass_watcher_args[0], Observer)
self.assertIsInstance(sass_watcher_args[1], list)
six.assertCountEqual(self, sass_watcher_args[1], self.expected_sass_directories)
def test_watch_theme_assets(self):
"""
Test the Paver watch asset tasks with theming enabled.
"""
self.expected_sass_directories.extend([
path(TEST_THEME_DIR) / 'lms/static/sass',
path(TEST_THEME_DIR) / 'lms/static/sass/partials',
path(TEST_THEME_DIR) / 'cms/static/sass',
path(TEST_THEME_DIR) / 'cms/static/sass/partials',
])
with patch('pavelib.assets.SassWatcher.register') as mock_register:
with patch('pavelib.assets.Observer.start'):
with patch('pavelib.assets.execute_webpack_watch') as mock_webpack:
call_task(
'pavelib.assets.watch_assets',
options={
"background": True,
"theme_dirs": [TEST_THEME_DIR.dirname()],
"themes": [TEST_THEME_DIR.basename()]
},
)
self.assertEqual(mock_register.call_count, 2)
self.assertEqual(mock_webpack.call_count, 1)
sass_watcher_args = mock_register.call_args_list[0][0]
self.assertIsInstance(sass_watcher_args[0], Observer)
self.assertIsInstance(sass_watcher_args[1], list)
six.assertCountEqual(self, sass_watcher_args[1], self.expected_sass_directories)
@ddt.ddt
class TestCollectAssets(PaverTestCase):
"""
Test the collectstatic process call.
ddt data is organized thusly:
* debug: whether or not collect_assets is called with the debug flag
* specified_log_location: used when collect_assets is called with a specific
log location for collectstatic output
* expected_log_location: the expected string to be used for piping collectstatic logs
"""
@ddt.data(
[{
"collect_log_args": {}, # Test for default behavior
"expected_log_location": "> /dev/null"
}],
[{
"collect_log_args": {COLLECTSTATIC_LOG_DIR_ARG: "/foo/bar"},
"expected_log_location": "> /foo/bar/lms-collectstatic.log"
}], # can use specified log location
[{
"systems": ["lms", "cms"],
"collect_log_args": {},
"expected_log_location": "> /dev/null"
}], # multiple systems can be called
)
@ddt.unpack
def test_collect_assets(self, options):
"""
Ensure commands sent to the environment for collect_assets are as expected
"""
specified_log_loc = options.get("collect_log_args", {})
specified_log_dict = specified_log_loc
log_loc = options.get("expected_log_location", "> /dev/null")
systems = options.get("systems", ["lms"])
if specified_log_loc is None:
collect_assets(
systems,
Env.DEVSTACK_SETTINGS
)
else:
collect_assets(
systems,
Env.DEVSTACK_SETTINGS,
**specified_log_dict
)
self._assert_correct_messages(log_location=log_loc, systems=systems)
def test_collect_assets_debug(self):
"""
When the method is called specifically with None for the collectstatic log dir, then
it should run in debug mode and pipe to console.
"""
expected_log_loc = ""
systems = ["lms"]
kwargs = {COLLECTSTATIC_LOG_DIR_ARG: None}
collect_assets(systems, Env.DEVSTACK_SETTINGS, **kwargs)
self._assert_correct_messages(log_location=expected_log_loc, systems=systems)
def _assert_correct_messages(self, log_location, systems):
"""
Asserts that the expected commands were run.
We just extract the pieces we care about here instead of specifying an
exact command, so that small arg changes don't break this test.
"""
for i, sys in enumerate(systems):
msg = self.task_messages[i]
self.assertTrue(msg.startswith(u'python manage.py {}'.format(sys)))
self.assertIn(' collectstatic ', msg)
self.assertIn('--settings={}'.format(Env.DEVSTACK_SETTINGS), msg)
self.assertTrue(msg.endswith(' {}'.format(log_location)))
@ddt.ddt
class TestUpdateAssetsTask(PaverTestCase):
"""
These are nearly end-to-end tests, because they observe output from the commandline request,
but do not actually execute the commandline on the terminal/process
"""
@ddt.data(
[{"expected_substring": "> /dev/null"}], # go to /dev/null by default
[{"cmd_args": ["--debug"], "expected_substring": "collectstatic"}] # TODO: make this regex
)
@ddt.unpack
def test_update_assets_task_collectstatic_log_arg(self, options):
"""
Scoped test that only looks at what is passed to the collecstatic options
"""
cmd_args = options.get("cmd_args", [""])
expected_substring = options.get("expected_substring", None)
call_task('pavelib.assets.update_assets', args=cmd_args)
self.assertTrue(
self._is_substring_in_list(self.task_messages, expected_substring),
msg=u"{substring} not found in messages".format(substring=expected_substring)
)
def _is_substring_in_list(self, messages_list, expected_substring):
"""
Return true a given string is somewhere in a list of strings
"""
for message in messages_list:
if expected_substring in message:
return True
return False
| agpl-3.0 |
cjhak/b2share | invenio/b2share/modules/docs/views.py | 2 | 2431 | """SimpleStore Flask Blueprint"""
from flask import Blueprint, render_template
from flask.ext.breadcrumbs import register_breadcrumb
from invenio.base.i18n import _
from flask import redirect
import markdown, os
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
blueprint = Blueprint('docs', __name__, url_prefix="/docs", static_url_path='/docs',
template_folder='templates', static_folder='static')
from invenio.modules.search.models import Collection
def _read_markdown_as_html(target):
input_file = markdown.codecs.open(CURRENT_DIR + target, mode="r",
encoding="utf-8")
return markdown.markdown(input_file.read())
@blueprint.route('/b2share-about', methods=['GET'])
@register_breadcrumb(blueprint, 'breadcrumbs.about', _('About'))
def b2share_about():
html = _read_markdown_as_html("/templates/about.md")
collection = Collection.query.get_or_404(1)
return render_template('docs.html', markdown_render=html, collection=collection)
@blueprint.route('/b2share-tou', methods=['GET'])
@register_breadcrumb(blueprint, 'breadcrumbs.tou', _('Terms of Use'))
def b2share_tou():
html = _read_markdown_as_html("/templates/tou.md")
collection = Collection.query.get_or_404(1)
return render_template('docs.html', markdown_render=html, collection=collection)
@blueprint.route('/b2share-faq', methods=['GET'])
@register_breadcrumb(blueprint, 'breadcrumbs.faq', _('FAQ'))
def b2share_faq():
html = _read_markdown_as_html("/templates/faq.md")
collection = Collection.query.get_or_404(1)
return render_template('docs.html', markdown_render=html, collection=collection)
@blueprint.route('/b2share-guide', methods=['GET'])
@register_breadcrumb(blueprint, 'breadcrumbs.guide', _('Guide'))
def b2share_guide():
html = _read_markdown_as_html("/templates/user-docs.md")
collection = Collection.query.get_or_404(1)
return render_template('docs.html', markdown_render=html, collection=collection)
@blueprint.route('/b2share-rest-api', methods=['GET'])
@register_breadcrumb(blueprint, 'breadcrumbs.rest-api', _('REST-API'))
def b2share_rest_api():
html = _read_markdown_as_html("/templates/rest-api.md")
collection = Collection.query.get_or_404(1)
return render_template('docs.html', markdown_render=html, collection=collection)
@blueprint.route('/', methods=['GET'])
def index():
return redirect("/")
| gpl-2.0 |
NINAnor/QGIS | python/plugins/db_manager/db_plugins/postgis/info_model.py | 3 | 11691 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS
Date : May 23, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : brush.tyler@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtGui import QApplication
from ..info_model import TableInfo, VectorTableInfo, RasterTableInfo
from ..html_elems import HtmlSection, HtmlParagraph, HtmlTable, HtmlTableHeader, HtmlTableCol
class PGTableInfo(TableInfo):
def __init__(self, table):
self.table = table
def generalInfo(self):
ret = []
# if the estimation is less than 100 rows, try to count them - it shouldn't take long time
if self.table.rowCount is None and self.table.estimatedRowCount < 100:
# row count information is not displayed yet, so just block
# table signals to avoid double refreshing (infoViewer->refreshRowCount->tableChanged->infoViewer)
self.table.blockSignals(True)
self.table.refreshRowCount()
self.table.blockSignals(False)
tbl = [
(QApplication.translate("DBManagerPlugin", "Relation type:"),
QApplication.translate("DBManagerPlugin", "View") if self.table._relationType == 'v' else
QApplication.translate("DBManagerPlugin", "Materialized view") if self.table._relationType == 'm' else
QApplication.translate("DBManagerPlugin", "Table")),
(QApplication.translate("DBManagerPlugin", "Owner:"), self.table.owner)
]
if self.table.comment:
tbl.append((QApplication.translate("DBManagerPlugin", "Comment:"), self.table.comment))
tbl.extend([
(QApplication.translate("DBManagerPlugin", "Pages:"), self.table.pages),
(QApplication.translate("DBManagerPlugin", "Rows (estimation):"), self.table.estimatedRowCount)
])
# privileges
# has the user access to this schema?
schema_priv = self.table.database().connector.getSchemaPrivileges(
self.table.schemaName()) if self.table.schema() else None
if schema_priv is None:
pass
elif not schema_priv[1]: # no usage privileges on the schema
tbl.append((QApplication.translate("DBManagerPlugin", "Privileges:"),
QApplication.translate("DBManagerPlugin",
"<warning> This user doesn't have usage privileges for this schema!")))
else:
table_priv = self.table.database().connector.getTablePrivileges((self.table.schemaName(), self.table.name))
privileges = []
if table_priv[0]:
privileges.append("select")
if self.table.rowCount is not None or self.table.rowCount >= 0:
tbl.append((QApplication.translate("DBManagerPlugin", "Rows (counted):"),
self.table.rowCount if self.table.rowCount is not None else QApplication.translate(
"DBManagerPlugin", 'Unknown (<a href="action:rows/count">find out</a>)')))
if table_priv[1]:
privileges.append("insert")
if table_priv[2]:
privileges.append("update")
if table_priv[3]:
privileges.append("delete")
priv_string = u", ".join(privileges) if len(privileges) > 0 else QApplication.translate("DBManagerPlugin",
'<warning> This user has no privileges!')
tbl.append((QApplication.translate("DBManagerPlugin", "Privileges:"), priv_string))
ret.append(HtmlTable(tbl))
if schema_priv is not None and schema_priv[1]:
if table_priv[0] and not table_priv[1] and not table_priv[2] and not table_priv[3]:
ret.append(HtmlParagraph(
QApplication.translate("DBManagerPlugin", "<warning> This user has read-only privileges.")))
if not self.table.isView:
if self.table.rowCount is not None:
if abs(self.table.estimatedRowCount - self.table.rowCount) > 1 and \
(self.table.estimatedRowCount > 2 * self.table.rowCount or
self.table.rowCount > 2 * self.table.estimatedRowCount):
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
"<warning> There's a significant difference between estimated and real row count. "
'Consider running <a href="action:vacuumanalyze/run">VACUUM ANALYZE</a>.')))
# primary key defined?
if not self.table.isView:
if len(filter(lambda fld: fld.primaryKey, self.table.fields())) <= 0:
ret.append(HtmlParagraph(
QApplication.translate("DBManagerPlugin", "<warning> No primary key defined for this table!")))
return ret
def getSpatialInfo(self):
ret = []
info = self.db.connector.getSpatialInfo()
if info is None:
return
tbl = [
(QApplication.translate("DBManagerPlugin", "Library:"), info[0]),
(QApplication.translate("DBManagerPlugin", "Scripts:"), info[3]),
("GEOS:", info[1]),
("Proj:", info[2])
]
ret.append(HtmlTable(tbl))
if info[1] is not None and info[1] != info[2]:
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
"<warning> Version of installed scripts doesn't match version of released scripts!\n"
"This is probably a result of incorrect PostGIS upgrade.")))
if not self.db.connector.has_geometry_columns:
ret.append(HtmlParagraph(
QApplication.translate("DBManagerPlugin", "<warning> geometry_columns table doesn't exist!\n"
"This table is essential for many GIS applications for enumeration of tables.")))
elif not self.db.connector.has_geometry_columns_access:
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
"<warning> This user doesn't have privileges to read contents of geometry_columns table!\n"
"This table is essential for many GIS applications for enumeration of tables.")))
return ret
def fieldsDetails(self):
tbl = []
# define the table header
header = (
"#", QApplication.translate("DBManagerPlugin", "Name"), QApplication.translate("DBManagerPlugin", "Type"),
QApplication.translate("DBManagerPlugin", "Length"), QApplication.translate("DBManagerPlugin", "Null"),
QApplication.translate("DBManagerPlugin", "Default"))
tbl.append(HtmlTableHeader(header))
# add table contents
for fld in self.table.fields():
char_max_len = fld.charMaxLen if fld.charMaxLen is not None and fld.charMaxLen != -1 else ""
is_null_txt = "N" if fld.notNull else "Y"
# make primary key field underlined
attrs = {"class": "underline"} if fld.primaryKey else None
name = HtmlTableCol(fld.name, attrs)
tbl.append((fld.num, name, fld.type2String(), char_max_len, is_null_txt, fld.default2String()))
return HtmlTable(tbl, {"class": "header"})
def triggersDetails(self):
if self.table.triggers() is None or len(self.table.triggers()) <= 0:
return None
ret = []
tbl = []
# define the table header
header = (
QApplication.translate("DBManagerPlugin", "Name"), QApplication.translate("DBManagerPlugin", "Function"),
QApplication.translate("DBManagerPlugin", "Type"), QApplication.translate("DBManagerPlugin", "Enabled"))
tbl.append(HtmlTableHeader(header))
# add table contents
for trig in self.table.triggers():
name = u'%(name)s (<a href="action:trigger/%(name)s/%(action)s">%(action)s</a>)' % {"name": trig.name,
"action": "delete"}
(enabled, action) = (QApplication.translate("DBManagerPlugin", "Yes"), "disable") if trig.enabled else (
QApplication.translate("DBManagerPlugin", "No"), "enable")
txt_enabled = u'%(enabled)s (<a href="action:trigger/%(name)s/%(action)s">%(action)s</a>)' % {
"name": trig.name, "action": action, "enabled": enabled}
tbl.append((name, trig.function, trig.type2String(), txt_enabled))
ret.append(HtmlTable(tbl, {"class": "header"}))
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
'<a href="action:triggers/enable">Enable all triggers</a> / <a href="action:triggers/disable">Disable all triggers</a>')))
return ret
def rulesDetails(self):
if self.table.rules() is None or len(self.table.rules()) <= 0:
return None
tbl = []
# define the table header
header = (
QApplication.translate("DBManagerPlugin", "Name"), QApplication.translate("DBManagerPlugin", "Definition"))
tbl.append(HtmlTableHeader(header))
# add table contents
for rule in self.table.rules():
name = u'%(name)s (<a href="action:rule/%(name)s/%(action)s">%(action)s</a>)' % {"name": rule.name,
"action": "delete"}
tbl.append((name, rule.definition))
return HtmlTable(tbl, {"class": "header"})
def getTableInfo(self):
ret = TableInfo.getTableInfo(self)
# rules
rules_details = self.rulesDetails()
if rules_details is None:
pass
else:
ret.append(HtmlSection(QApplication.translate("DBManagerPlugin", 'Rules'), rules_details))
return ret
class PGVectorTableInfo(PGTableInfo, VectorTableInfo):
def __init__(self, table):
VectorTableInfo.__init__(self, table)
PGTableInfo.__init__(self, table)
def spatialInfo(self):
return VectorTableInfo.spatialInfo(self)
class PGRasterTableInfo(PGTableInfo, RasterTableInfo):
def __init__(self, table):
RasterTableInfo.__init__(self, table)
PGTableInfo.__init__(self, table)
def spatialInfo(self):
return RasterTableInfo.spatialInfo(self)
| gpl-2.0 |
shajrawi/swift | utils/gyb_syntax_support/AttributeNodes.py | 4 | 8942 | from Child import Child
from Node import Node # noqa: I201
ATTRIBUTE_NODES = [
# token-list -> token? token-list?
Node('TokenList', kind='SyntaxCollection',
element='Token'),
# token-list -> token token-list?
Node('NonEmptyTokenList', kind='SyntaxCollection',
element='Token', omit_when_empty=True),
Node('CustomAttribute', kind='Syntax',
description='''
A custom `@` attribute.
''',
children=[
Child('AtSignToken', kind='AtSignToken',
description='The `@` sign.'),
Child('AttributeName', kind='Type', classification='Attribute',
description='The name of the attribute.'),
Child('LeftParen', kind='LeftParenToken',
is_optional=True),
Child('ArgumentList', kind='FunctionCallArgumentList',
collection_element_name='Argument', is_optional=True),
Child('RightParen', kind='RightParenToken',
is_optional=True),
]),
# attribute -> '@' identifier '('?
# ( identifier
# | string-literal
# | integer-literal
# | availability-spec-list
# | specialize-attr-spec-list
# | implements-attr-arguments
# | named-attribute-string-argument
# )? ')'?
Node('Attribute', kind='Syntax',
description='''
An `@` attribute.
''',
children=[
Child('AtSignToken', kind='AtSignToken',
description='The `@` sign.'),
Child('AttributeName', kind='Token', classification='Attribute',
description='The name of the attribute.'),
Child('LeftParen', kind='LeftParenToken', is_optional=True,
description='''
If the attribute takes arguments, the opening parenthesis.
'''),
Child('Argument', kind='Syntax', is_optional=True,
node_choices=[
Child('Identifier', kind='IdentifierToken'),
Child('String', kind='StringLiteralToken'),
Child('Integer', kind='IntegerLiteralToken'),
Child('Availability', kind='AvailabilitySpecList'),
Child('SpecializeArguments',
kind='SpecializeAttributeSpecList'),
Child('ObjCName', kind='ObjCSelector'),
Child('ImplementsArguments',
kind='ImplementsAttributeArguments'),
Child('NamedAttributeString',
kind='NamedAttributeStringArgument'),
], description='''
The arguments of the attribute. In case the attribute \
takes multiple arguments, they are gather in the \
appropriate takes first.
'''),
Child('RightParen', kind='RightParenToken', is_optional=True,
description='''
If the attribute takes arguments, the closing parenthesis.
'''),
# TokenList to gather remaining tokens of invalid attributes
# FIXME: Remove this recovery option entirely
Child('TokenList', kind='TokenList',
collection_element_name='Token', is_optional=True),
]),
# attribute-list -> attribute attribute-list?
Node('AttributeList', kind='SyntaxCollection',
element='Syntax', element_name='Attribute',
element_choices=[
'Attribute',
'CustomAttribute',
]),
# The argument of '@_specialize(...)'
# specialize-attr-spec-list -> labeled-specialize-entry
# specialize-spec-attr-list?
# | generic-where-clause
# specialize-spec-attr-list?
Node('SpecializeAttributeSpecList', kind='SyntaxCollection',
description='''
A collection of arguments for the `@_specialize` attribute
''',
element='Syntax', element_name='SpecializeAttribute',
element_choices=[
'LabeledSpecializeEntry',
'GenericWhereClause',
]),
# Representation of e.g. 'exported: true,'
# labeled-specialize-entry -> identifier ':' token ','?
Node('LabeledSpecializeEntry', kind='Syntax',
description='''
A labeled argument for the `@_specialize` attribute like \
`exported: true`
''',
traits=['WithTrailingComma'],
children=[
Child('Label', kind='IdentifierToken',
description='The label of the argument'),
Child('Colon', kind='ColonToken',
description='The colon separating the label and the value'),
Child('Value', kind='Token',
description='The value for this argument'),
Child('TrailingComma', kind='CommaToken',
is_optional=True, description='''
A trailing comma if this argument is followed by another one
'''),
]),
# The argument of '@_dynamic_replacement(for:)' or '@_private(sourceFile:)'
# named-attribute-string-arg -> 'name': string-literal
Node('NamedAttributeStringArgument', kind='Syntax',
description='''
The argument for the `@_dynamic_replacement` or `@_private` \
attribute of the form `for: "function()"` or `sourceFile: \
"Src.swift"`
''',
children=[
Child('NameTok', kind='Token',
description='The label of the argument'),
Child('Colon', kind='ColonToken',
description='The colon separating the label and the value'),
Child('StringOrDeclname', kind='Syntax', node_choices=[
Child('String', kind='StringLiteralToken'),
Child('Declname', kind='DeclName'),
]),
]),
Node('DeclName', kind='Syntax', children=[
Child('DeclBaseName', kind='Syntax', description='''
The base name of the protocol\'s requirement.
''',
node_choices=[
Child('Identifier', kind='IdentifierToken'),
Child('Operator', kind='PrefixOperatorToken'),
]),
Child('DeclNameArguments', kind='DeclNameArguments',
is_optional=True, description='''
The argument labels of the protocol\'s requirement if it \
is a function requirement.
'''),
]),
# The argument of '@_implements(...)'
# implements-attr-arguments -> simple-type-identifier ','
# (identifier | operator) decl-name-arguments
Node('ImplementsAttributeArguments', kind='Syntax',
description='''
The arguments for the `@_implements` attribute of the form \
`Type, methodName(arg1Label:arg2Label:)`
''',
children=[
Child('Type', kind='SimpleTypeIdentifier', description='''
The type for which the method with this attribute \
implements a requirement.
'''),
Child('Comma', kind='CommaToken',
description='''
The comma separating the type and method name
'''),
Child('DeclBaseName', kind='Syntax', description='''
The base name of the protocol\'s requirement.
''',
node_choices=[
Child('Identifier', kind='IdentifierToken'),
Child('Operator', kind='PrefixOperatorToken'),
]),
Child('DeclNameArguments', kind='DeclNameArguments',
is_optional=True, description='''
The argument labels of the protocol\'s requirement if it \
is a function requirement.
'''),
]),
# objc-selector-piece -> identifier? ':'?
Node('ObjCSelectorPiece', kind='Syntax',
description='''
A piece of an Objective-C selector. Either consisiting of just an \
identifier for a nullary selector, an identifier and a colon for a \
labeled argument or just a colon for an unlabeled argument
''',
children=[
Child('Name', kind='IdentifierToken', is_optional=True),
Child('Colon', kind='ColonToken', is_optional=True),
]),
# objc-selector -> objc-selector-piece objc-selector?
Node('ObjCSelector', kind='SyntaxCollection', element='ObjCSelectorPiece')
]
| apache-2.0 |
tdyas/pants | src/python/pants/engine/collection_test.py | 1 | 3689 | # Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pants.engine.collection import Collection, DeduplicatedCollection
class Examples(Collection[int]):
"""A new type to ensure that subclassing works properly."""
class Examples2(Collection[int]):
pass
def test_collection_contains() -> None:
c1 = Collection([1, 2])
assert 1 in c1
assert 2 in c1
assert 200 not in c1
assert "bad" not in c1 # type: ignore[comparison-overlap]
def test_collection_iteration() -> None:
c1 = Collection([1, 2])
assert list(iter(c1)) == [1, 2]
assert [x for x in c1] == [1, 2]
def test_collection_length() -> None:
assert len(Collection([])) == 0
assert len(Collection([1, 2])) == 2
def test_collection_index() -> None:
c1 = Collection([0, 1, 2])
assert c1[0] == 0
assert c1[-1] == 2
assert c1[:] == c1
assert c1[1:] == Collection([1, 2])
assert c1[:1] == Collection([0])
def test_collection_reversed() -> None:
assert list(reversed(Collection([1, 2, 3]))) == [3, 2, 1]
def test_collection_equality() -> None:
assert Collection([]) == Collection([])
c1 = Collection([1, 2, 3])
assert c1 == Collection([1, 2, 3])
assert c1 != Collection([3, 2, 1])
assert c1 != Collection([])
assert c1 != Collection([1, 2])
e1 = Examples([1, 2, 3])
assert e1 == Examples([1, 2, 3])
assert e1 != Examples2([1, 2, 3])
def test_collection_hash() -> None:
assert hash(Collection([])) == hash(Collection([]))
c1 = Collection([1, 2, 3])
assert hash(c1) == hash(Collection([1, 2, 3]))
assert hash(c1) == hash(Examples([1, 2, 3]))
def test_collection_bool() -> None:
assert bool(Collection([0])) is True
assert bool(Collection([])) is False
def test_collection_repr() -> None:
assert repr(Collection([])) == "Collection([])"
assert repr(Examples([])) == "Examples([])"
assert repr(Collection([1, 2, 3])) == "Collection([1, 2, 3])"
assert repr(Examples([1, 2, 3])) == "Examples([1, 2, 3])"
def test_deduplicated_collection() -> None:
# NB: most of the functionality, like testing .union() and .intersection(), is tested
# exhaustively in the tests for FrozenOrderedSet. Here, we only have a couple basic
# smoke-screen tests to ensure things work properly.
class DedupedExamples(DeduplicatedCollection[int]):
sort_input = True
class DedupedExamples2(DeduplicatedCollection[int]):
sort_input = False
# Test deduplication
assert len(DeduplicatedCollection([1, 1, 1, 2])) == 2
# Test equality, especially that object identity matters
assert DedupedExamples([0]) == DedupedExamples([0])
assert DedupedExamples([0]) != DedupedExamples2([0])
# Test hash
c = DeduplicatedCollection([0, 1, 2])
assert hash(c) == hash(DeduplicatedCollection([0, 1, 2]))
assert hash(c) != hash(DeduplicatedCollection([0, 1]))
# Test contains
assert 2 in DeduplicatedCollection([0, 1, 2])
assert 20 not in DeduplicatedCollection([0, 1, 2])
# Test sorting
assert list(DedupedExamples([2, 1])) == [1, 2]
assert list(DedupedExamples2([2, 1])) == [2, 1]
# Test the interaction of sorting with equality
assert DedupedExamples([2, 1]) == DedupedExamples([1, 2])
assert DedupedExamples2([2, 1]) != DedupedExamples2([1, 2])
# Test bool
assert bool(DeduplicatedCollection([])) is False
assert bool(DeduplicatedCollection([1])) is True
# Test repr
assert repr(DedupedExamples()) == "DedupedExamples([])"
assert repr(DedupedExamples([0, 1])) == "DedupedExamples([0, 1])"
| apache-2.0 |
fedosov/django-smuggler | smuggler/forms.py | 1 | 4049 | # Copyright (c) 2009 Guilherme Gondim and contributors
#
# This file is part of Django Smuggler.
#
# Django Smuggler is free software under terms of the GNU Lesser
# General Public License version 3 (LGPLv3) as published by the Free
# Software Foundation. See the file README for copying conditions.
import os.path
from django import forms
from django.contrib.admin.widgets import FilteredSelectMultiple
from django.core.serializers import get_serializer_formats
from django.utils.translation import ugettext_lazy as _
from smuggler import settings
class MultiFileInput(forms.FileInput):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['multiple'] = 'multiple'
return super(MultiFileInput, self).render(name, None, attrs=attrs)
def value_from_datadict(self, data, files, name):
if hasattr(files, 'getlist'):
return files.getlist(name)
if name in files:
return [files.get(name)]
return []
class MultiFixtureField(forms.FileField):
widget = MultiFileInput
def to_python(self, data):
files = []
for item in data:
files.append(super(MultiFixtureField, self).to_python(item))
return files
def validate(self, data):
super(MultiFixtureField, self).validate(data)
for upload in data:
file_format = os.path.splitext(upload.name)[1][1:].lower()
if file_format not in get_serializer_formats():
raise forms.ValidationError(
_('Invalid file extension: .%(extension)s.') % {
'extension': file_format
})
return data
class FixturePathField(forms.MultipleChoiceField, forms.FilePathField):
widget = FilteredSelectMultiple(_('files'), False)
def __init__(self, path, match=None, **kwargs):
match = match or (
'(?i)^.+(%s)$' % '|'.join(
['\.%s' % ext for ext in get_serializer_formats()])
) # Generate a regex string like: (?i)^.+(\.xml|\.json)$
super(FixturePathField, self).__init__(path, match=match, **kwargs)
if not self.required:
del self.choices[0] # Remove the empty option
class ImportForm(forms.Form):
uploads = MultiFixtureField(
label=_('Upload'),
required=False
)
def __init__(self, *args, **kwargs):
super(ImportForm, self).__init__(*args, **kwargs)
if settings.SMUGGLER_FIXTURE_DIR:
self.fields['store'] = forms.BooleanField(
label=_('Save in fixture directory'),
required=False,
help_text=(
_('Uploads will be saved to "%(fixture_dir)s".') % {
'fixture_dir': settings.SMUGGLER_FIXTURE_DIR
})
)
self.fields['picked_files'] = FixturePathField(
settings.SMUGGLER_FIXTURE_DIR,
label=_('From fixture directory'),
required=False,
help_text=(
_('Data files from "%(fixture_dir)s".') % {
'fixture_dir': settings.SMUGGLER_FIXTURE_DIR
})
)
else:
self.fields['uploads'].required = True
def clean(self):
super(ImportForm, self).clean()
if settings.SMUGGLER_FIXTURE_DIR:
uploads = self.cleaned_data['uploads']
picked_files = self.cleaned_data['picked_files']
if not uploads and not picked_files:
raise forms.ValidationError(
_('At least one fixture file needs to be'
' uploaded or selected.'))
return self.cleaned_data
class Media:
css = {
'all': ['admin/css/forms.css']
}
js = [
'admin/js/core.js',
'admin/js/jquery.min.js',
'admin/js/jquery.init.js',
'admin/js/SelectBox.js',
'admin/js/SelectFilter2.js'
]
| lgpl-3.0 |
orekyuu/intellij-community | python/helpers/pydev/tests/test_check_pydevconsole.py | 41 | 4796 | import threading
import unittest
import pydevconsole
from pydev_imports import xmlrpclib, SimpleXMLRPCServer
import sys
from pydev_localhost import get_localhost
from pydev_ipython_console_011 import get_pydev_frontend
try:
raw_input
raw_input_name = 'raw_input'
except NameError:
raw_input_name = 'input'
#=======================================================================================================================
# Test
#=======================================================================================================================
class Test(unittest.TestCase):
def startClientThread(self, client_port):
class ClientThread(threading.Thread):
def __init__(self, client_port):
threading.Thread.__init__(self)
self.client_port = client_port
def run(self):
class HandleRequestInput:
def RequestInput(self):
client_thread.requested_input = True
return 'RequestInput: OK'
def NotifyFinished(self, *args, **kwargs):
client_thread.notified_finished += 1
return 1
handle_request_input = HandleRequestInput()
import pydev_localhost
self.client_server = client_server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), self.client_port), logRequests=False)
client_server.register_function(handle_request_input.RequestInput)
client_server.register_function(handle_request_input.NotifyFinished)
client_server.serve_forever()
def shutdown(self):
return
self.client_server.shutdown()
client_thread = ClientThread(client_port)
client_thread.requested_input = False
client_thread.notified_finished = 0
client_thread.setDaemon(True)
client_thread.start()
return client_thread
def getFreeAddresses(self):
import socket
s = socket.socket()
s.bind(('', 0))
port0 = s.getsockname()[1]
s1 = socket.socket()
s1.bind(('', 0))
port1 = s1.getsockname()[1]
s.close()
s1.close()
return port0, port1
def testServer(self):
# Just making sure that the singleton is created in this thread.
get_pydev_frontend(get_localhost(), 0)
client_port, server_port = self.getFreeAddresses()
class ServerThread(threading.Thread):
def __init__(self, client_port, server_port):
threading.Thread.__init__(self)
self.client_port = client_port
self.server_port = server_port
def run(self):
import pydev_localhost
print('Starting server with:', pydev_localhost.get_localhost(), self.server_port, self.client_port)
pydevconsole.StartServer(pydev_localhost.get_localhost(), self.server_port, self.client_port)
server_thread = ServerThread(client_port, server_port)
server_thread.setDaemon(True)
server_thread.start()
client_thread = self.startClientThread(client_port) #@UnusedVariable
try:
import time
time.sleep(.3) #let's give it some time to start the threads
import pydev_localhost
server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), server_port))
server.execLine("import sys; print('Running with: %s %s' % (sys.executable or sys.platform, sys.version))")
server.execLine('class Foo:')
server.execLine(' pass')
server.execLine('')
server.execLine('foo = Foo()')
server.execLine('a = %s()' % raw_input_name)
initial = time.time()
while not client_thread.requested_input:
if time.time() - initial > 2:
raise AssertionError('Did not get the return asked before the timeout.')
time.sleep(.1)
frame_xml = server.getFrame()
self.assert_('RequestInput' in frame_xml, 'Did not fid RequestInput in:\n%s' % (frame_xml,))
finally:
client_thread.shutdown()
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
wbc2010/django1.2.5 | tests/regressiontests/m2m_regress/tests.py | 39 | 3145 | from django.core.exceptions import FieldError
from django.test import TestCase
from models import (SelfRefer, Tag, TagCollection, Entry, SelfReferChild,
SelfReferChildSibling, Worksheet)
class M2MRegressionTests(TestCase):
def assertRaisesErrorWithMessage(self, error, message, callable, *args, **kwargs):
self.assertRaises(error, callable, *args, **kwargs)
try:
callable(*args, **kwargs)
except error, e:
self.assertEqual(message, str(e))
def test_multiple_m2m(self):
# Multiple m2m references to model must be distinguished when
# accessing the relations through an instance attribute.
s1 = SelfRefer.objects.create(name='s1')
s2 = SelfRefer.objects.create(name='s2')
s3 = SelfRefer.objects.create(name='s3')
s1.references.add(s2)
s1.related.add(s3)
e1 = Entry.objects.create(name='e1')
t1 = Tag.objects.create(name='t1')
t2 = Tag.objects.create(name='t2')
e1.topics.add(t1)
e1.related.add(t2)
self.assertQuerysetEqual(s1.references.all(), ["<SelfRefer: s2>"])
self.assertQuerysetEqual(s1.related.all(), ["<SelfRefer: s3>"])
self.assertQuerysetEqual(e1.topics.all(), ["<Tag: t1>"])
self.assertQuerysetEqual(e1.related.all(), ["<Tag: t2>"])
def test_internal_related_name_not_in_error_msg(self):
# The secret internal related names for self-referential many-to-many
# fields shouldn't appear in the list when an error is made.
self.assertRaisesErrorWithMessage(FieldError,
"Cannot resolve keyword 'porcupine' into field. Choices are: id, name, references, related, selfreferchild, selfreferchildsibling",
lambda: SelfRefer.objects.filter(porcupine='fred')
)
def test_m2m_inheritance_symmetry(self):
# Test to ensure that the relationship between two inherited models
# with a self-referential m2m field maintains symmetry
sr_child = SelfReferChild(name="Hanna")
sr_child.save()
sr_sibling = SelfReferChildSibling(name="Beth")
sr_sibling.save()
sr_child.related.add(sr_sibling)
self.assertQuerysetEqual(sr_child.related.all(), ["<SelfRefer: Beth>"])
self.assertQuerysetEqual(sr_sibling.related.all(), ["<SelfRefer: Hanna>"])
def test_m2m_pk_field_type(self):
# Regression for #11311 - The primary key for models in a m2m relation
# doesn't have to be an AutoField
w = Worksheet(id='abc')
w.save()
w.delete()
def test_add_m2m_with_base_class(self):
# Regression for #11956 -- You can add an object to a m2m with the
# base class without causing integrity errors
t1 = Tag.objects.create(name='t1')
t2 = Tag.objects.create(name='t2')
c1 = TagCollection.objects.create(name='c1')
c1.tags = [t1,t2]
c1 = TagCollection.objects.get(name='c1')
self.assertQuerysetEqual(c1.tags.all(), ["<Tag: t1>", "<Tag: t2>"])
self.assertQuerysetEqual(t1.tag_collections.all(), ["<TagCollection: c1>"])
| bsd-3-clause |
gautam1858/tensorflow | tensorflow/contrib/learn/python/learn/estimators/estimator.py | 14 | 62926 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Base Estimator class (deprecated).
This module and all its submodules are deprecated. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for migration instructions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import collections
import copy
import os
import tempfile
import numpy as np
import six
from google.protobuf import message
from tensorflow.contrib import layers
from tensorflow.contrib.framework import deprecated
from tensorflow.contrib.framework import deprecated_args
from tensorflow.contrib.framework import list_variables
from tensorflow.contrib.framework import load_variable
from tensorflow.contrib.learn.python.learn import evaluable
from tensorflow.contrib.learn.python.learn import metric_spec
from tensorflow.contrib.learn.python.learn import monitors as monitor_lib
from tensorflow.contrib.learn.python.learn import trainable
from tensorflow.contrib.learn.python.learn.estimators import _sklearn as sklearn
from tensorflow.contrib.learn.python.learn.estimators import constants
from tensorflow.contrib.learn.python.learn.estimators import metric_key
from tensorflow.contrib.learn.python.learn.estimators import model_fn as model_fn_lib
from tensorflow.contrib.learn.python.learn.estimators import run_config
from tensorflow.contrib.learn.python.learn.estimators import tensor_signature
from tensorflow.contrib.learn.python.learn.estimators._sklearn import NotFittedError
from tensorflow.contrib.learn.python.learn.learn_io import data_feeder
from tensorflow.contrib.learn.python.learn.utils import export
from tensorflow.contrib.learn.python.learn.utils import saved_model_export_utils
from tensorflow.contrib.meta_graph_transform import meta_graph_transform
from tensorflow.contrib.training.python.training import evaluation
from tensorflow.core.framework import summary_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session as tf_session
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import metrics as metrics_lib
from tensorflow.python.ops import resources
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import builder as saved_model_builder
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.summary import summary as core_summary
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training import device_setter
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver
from tensorflow.python.training import training_util
from tensorflow.python.util import compat
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
AS_ITERABLE_DATE = '2016-09-15'
AS_ITERABLE_INSTRUCTIONS = (
'The default behavior of predict() is changing. The default value for\n'
'as_iterable will change to True, and then the flag will be removed\n'
'altogether. The behavior of this flag is described below.')
SCIKIT_DECOUPLE_DATE = '2016-12-01'
SCIKIT_DECOUPLE_INSTRUCTIONS = (
'Estimator is decoupled from Scikit Learn interface by moving into\n'
'separate class SKCompat. Arguments x, y and batch_size are only\n'
'available in the SKCompat class, Estimator will only accept input_fn.\n'
'Example conversion:\n'
' est = Estimator(...) -> est = SKCompat(Estimator(...))')
def _verify_input_args(x, y, input_fn, feed_fn, batch_size):
"""Verifies validity of co-existence of input arguments."""
if input_fn is None:
if x is None:
raise ValueError('Either x or input_fn must be provided.')
if tensor_util.is_tensor(x) or y is not None and tensor_util.is_tensor(y):
raise ValueError('Inputs cannot be tensors. Please provide input_fn.')
if feed_fn is not None:
raise ValueError('Can not provide both feed_fn and x or y.')
else:
if (x is not None) or (y is not None):
raise ValueError('Can not provide both input_fn and x or y.')
if batch_size is not None:
raise ValueError('Can not provide both input_fn and batch_size.')
def _get_input_fn(x, y, input_fn, feed_fn, batch_size, shuffle=False, epochs=1):
"""Make inputs into input and feed functions.
Args:
x: Numpy, Pandas or Dask matrix or iterable.
y: Numpy, Pandas or Dask matrix or iterable.
input_fn: Pre-defined input function for training data.
feed_fn: Pre-defined data feeder function.
batch_size: Size to split data into parts. Must be >= 1.
shuffle: Whether to shuffle the inputs.
epochs: Number of epochs to run.
Returns:
Data input and feeder function based on training data.
Raises:
ValueError: Only one of `(x & y)` or `input_fn` must be provided.
"""
_verify_input_args(x, y, input_fn, feed_fn, batch_size)
if input_fn is not None:
return input_fn, feed_fn
df = data_feeder.setup_train_data_feeder(
x,
y,
n_classes=None,
batch_size=batch_size,
shuffle=shuffle,
epochs=epochs)
return df.input_builder, df.get_feed_dict_fn()
@deprecated(None, 'Please specify feature columns explicitly.')
def infer_real_valued_columns_from_input_fn(input_fn):
"""Creates `FeatureColumn` objects for inputs defined by `input_fn`.
This interprets all inputs as dense, fixed-length float values. This creates
a local graph in which it calls `input_fn` to build the tensors, then discards
it.
Args:
input_fn: Input function returning a tuple of:
features - Dictionary of string feature name to `Tensor` or `Tensor`.
labels - `Tensor` of label values.
Returns:
List of `FeatureColumn` objects.
"""
with ops.Graph().as_default():
features, _ = input_fn()
return layers.infer_real_valued_columns(features)
@deprecated(None, 'Please specify feature columns explicitly.')
def infer_real_valued_columns_from_input(x):
"""Creates `FeatureColumn` objects for inputs defined by input `x`.
This interprets all inputs as dense, fixed-length float values.
Args:
x: Real-valued matrix of shape [n_samples, n_features...]. Can be
iterator that returns arrays of features.
Returns:
List of `FeatureColumn` objects.
"""
input_fn, _ = _get_input_fn(
x=x, y=None, input_fn=None, feed_fn=None, batch_size=None)
return infer_real_valued_columns_from_input_fn(input_fn)
def _model_fn_args(fn):
"""Get argument names for function-like object.
Args:
fn: Function, or function-like object (e.g., result of `functools.partial`).
Returns:
`tuple` of string argument names.
Raises:
ValueError: if partial function has positionally bound arguments
"""
_, fn = tf_decorator.unwrap(fn)
if hasattr(fn, 'func') and hasattr(fn, 'keywords') and hasattr(fn, 'args'):
# Handle functools.partial and similar objects.
return tuple([
arg for arg in tf_inspect.getargspec(fn.func).args[len(fn.args):]
if arg not in set(fn.keywords.keys())
])
# Handle function.
return tuple(tf_inspect.getargspec(fn).args)
def _get_replica_device_setter(config):
"""Creates a replica device setter if required.
Args:
config: A RunConfig instance.
Returns:
A replica device setter, or None.
"""
ps_ops = [
'Variable', 'VariableV2', 'AutoReloadVariable', 'MutableHashTable',
'MutableHashTableV2', 'MutableHashTableOfTensors',
'MutableHashTableOfTensorsV2', 'MutableDenseHashTable',
'MutableDenseHashTableV2', 'VarHandleOp'
]
if config.task_type:
worker_device = '/job:%s/task:%d' % (config.task_type, config.task_id)
else:
worker_device = '/job:worker'
if config.num_ps_replicas > 0:
return device_setter.replica_device_setter(
ps_tasks=config.num_ps_replicas,
worker_device=worker_device,
merge_devices=True,
ps_ops=ps_ops,
cluster=config.cluster_spec)
else:
return None
def _make_metrics_ops(metrics, features, labels, predictions):
"""Add metrics based on `features`, `labels`, and `predictions`.
`metrics` contains a specification for how to run metrics. It is a dict
mapping friendly names to either `MetricSpec` objects, or directly to a metric
function (assuming that `predictions` and `labels` are single tensors), or to
`(pred_name, metric)` `tuple`, which passes `predictions[pred_name]` and
`labels` to `metric` (assuming `labels` is a single tensor).
Users are encouraged to use `MetricSpec` objects, which are more flexible and
cleaner. They also lead to clearer errors.
Args:
metrics: A dict mapping names to metrics specification, for example
`MetricSpec` objects.
features: A dict of tensors returned from an input_fn as features/inputs.
labels: A single tensor or a dict of tensors returned from an input_fn as
labels.
predictions: A single tensor or a dict of tensors output from a model as
predictions.
Returns:
A dict mapping the friendly given in `metrics` to the result of calling the
given metric function.
Raises:
ValueError: If metrics specifications do not work with the type of
`features`, `labels`, or `predictions` provided. Mostly, a dict is given
but no pred_name specified.
"""
metrics = metrics or {}
# If labels is a dict with a single key, unpack into a single tensor.
labels_tensor_or_dict = labels
if isinstance(labels, dict) and len(labels) == 1:
labels_tensor_or_dict = labels[list(labels.keys())[0]]
result = {}
# Iterate in lexicographic order, so the graph is identical among runs.
for name, metric in sorted(six.iteritems(metrics)):
if isinstance(metric, metric_spec.MetricSpec):
result[name] = metric.create_metric_ops(features, labels, predictions)
continue
# TODO(b/31229024): Remove the rest of this loop
logging.warning('Please specify metrics using MetricSpec. Using bare '
'functions or (key, fn) tuples is deprecated and support '
'for it will be removed on Oct 1, 2016.')
if isinstance(name, tuple):
# Multi-head metrics.
if len(name) != 2:
raise ValueError('Invalid metric for {}. It returned a tuple with '
'len {}, expected 2.'.format(name, len(name)))
if not isinstance(predictions, dict):
raise ValueError('Metrics passed provide (name, prediction), '
'but predictions are not dict. '
'Metrics: %s, Predictions: %s.' % (metrics,
predictions))
# Here are two options: labels are single Tensor or a dict.
if isinstance(labels, dict) and name[1] in labels:
# If labels are dict and the prediction name is in it, apply metric.
result[name[0]] = metric(predictions[name[1]], labels[name[1]])
else:
# Otherwise pass the labels to the metric.
result[name[0]] = metric(predictions[name[1]], labels_tensor_or_dict)
else:
# Single head metrics.
if isinstance(predictions, dict):
raise ValueError('Metrics passed provide only name, no prediction, '
'but predictions are dict. '
'Metrics: %s, Labels: %s.' % (metrics,
labels_tensor_or_dict))
result[name] = metric(predictions, labels_tensor_or_dict)
return result
def _dict_to_str(dictionary):
"""Get a `str` representation of a `dict`.
Args:
dictionary: The `dict` to be represented as `str`.
Returns:
A `str` representing the `dictionary`.
"""
results = []
for k, v in sorted(dictionary.items()):
if isinstance(v, float) or isinstance(v, np.float32) or isinstance(
v, int) or isinstance(v, np.int64) or isinstance(v, np.int32):
results.append('%s = %s' % (k, v))
else:
results.append('Type of %s = %s' % (k, type(v)))
return ', '.join(results)
def _write_dict_to_summary(output_dir, dictionary, current_global_step):
"""Writes a `dict` into summary file in given output directory.
Args:
output_dir: `str`, directory to write the summary file in.
dictionary: the `dict` to be written to summary file.
current_global_step: `int`, the current global step.
"""
logging.info('Saving dict for global step %d: %s', current_global_step,
_dict_to_str(dictionary))
summary_writer = core_summary.FileWriterCache.get(output_dir)
summary_proto = summary_pb2.Summary()
for key in dictionary:
if dictionary[key] is None:
continue
if key == 'global_step':
continue
if (isinstance(dictionary[key], np.float32) or
isinstance(dictionary[key], float)):
summary_proto.value.add(tag=key, simple_value=float(dictionary[key]))
elif (isinstance(dictionary[key], np.int64) or
isinstance(dictionary[key], np.int32) or
isinstance(dictionary[key], int)):
summary_proto.value.add(tag=key, simple_value=int(dictionary[key]))
elif isinstance(dictionary[key], six.string_types):
try:
summ = summary_pb2.Summary.FromString(dictionary[key])
for i, _ in enumerate(summ.value):
summ.value[i].tag = key
summary_proto.value.extend(summ.value)
except message.DecodeError:
logging.warn('Skipping summary for %s, cannot parse string to Summary.',
key)
continue
elif isinstance(dictionary[key], np.ndarray):
value = summary_proto.value.add()
value.tag = key
value.node_name = key
tensor_proto = tensor_util.make_tensor_proto(dictionary[key])
value.tensor.CopyFrom(tensor_proto)
logging.info(
'Summary for np.ndarray is not visible in Tensorboard by default. '
'Consider using a Tensorboard plugin for visualization (see '
'https://github.com/tensorflow/tensorboard-plugin-example/blob/master/README.md'
' for more information).')
else:
logging.warn(
'Skipping summary for %s, must be a float, np.float32, np.int64, '
'np.int32 or int or np.ndarray or a serialized string of Summary.',
key)
summary_writer.add_summary(summary_proto, current_global_step)
summary_writer.flush()
GraphRewriteSpec = collections.namedtuple('GraphRewriteSpec',
['tags', 'transforms'])
class BaseEstimator(sklearn.BaseEstimator, evaluable.Evaluable,
trainable.Trainable):
"""Abstract BaseEstimator class to train and evaluate TensorFlow models.
THIS CLASS IS DEPRECATED. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for general migration instructions.
Users should not instantiate or subclass this class. Instead, use an
`Estimator`.
"""
# Note that for Google users, this is overridden with
# learn_runner.EstimatorConfig.
# TODO(wicke): Remove this once launcher takes over config functionality
_Config = run_config.RunConfig # pylint: disable=invalid-name
@deprecated(None, 'Please replace uses of any Estimator from tf.contrib.learn'
' with an Estimator from tf.estimator.*')
def __init__(self, model_dir=None, config=None):
"""Initializes a BaseEstimator instance.
Args:
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model. If `None`, the model_dir in
`config` will be used if set. If both are set, they must be same.
config: A RunConfig instance.
"""
# Create a run configuration.
if config is None:
self._config = BaseEstimator._Config()
logging.info('Using default config.')
else:
self._config = config
if self._config.session_config is None:
self._session_config = config_pb2.ConfigProto(allow_soft_placement=True)
else:
self._session_config = self._config.session_config
# Model directory.
if (model_dir is not None) and (self._config.model_dir is not None):
if model_dir != self._config.model_dir:
# TODO(b/9965722): remove this suppression after it is no longer
# necessary.
# pylint: disable=g-doc-exception
raise ValueError(
'model_dir are set both in constructor and RunConfig, but with '
"different values. In constructor: '{}', in RunConfig: "
"'{}' ".format(model_dir, self._config.model_dir))
# pylint: enable=g-doc-exception
self._model_dir = model_dir or self._config.model_dir
if self._model_dir is None:
self._model_dir = tempfile.mkdtemp()
logging.warning('Using temporary folder as model directory: %s',
self._model_dir)
if self._config.model_dir is None:
self._config = self._config.replace(model_dir=self._model_dir)
logging.info('Using config: %s', str(vars(self._config)))
# Set device function depending if there are replicas or not.
self._device_fn = _get_replica_device_setter(self._config)
# Features and labels TensorSignature objects.
# TODO(wicke): Rename these to something more descriptive
self._features_info = None
self._labels_info = None
self._graph = None
@property
def config(self):
# TODO(wicke): make RunConfig immutable, and then return it without a copy.
return copy.deepcopy(self._config)
@property
def model_fn(self):
"""Returns the model_fn which is bound to self.params.
Returns:
The model_fn with the following signature:
`def model_fn(features, labels, mode, metrics)`
"""
def public_model_fn(features, labels, mode, config):
return self._call_model_fn(features, labels, mode, config=config)
return public_model_fn
@deprecated_args(SCIKIT_DECOUPLE_DATE, SCIKIT_DECOUPLE_INSTRUCTIONS,
('x', None), ('y', None), ('batch_size', None))
def fit(self,
x=None,
y=None,
input_fn=None,
steps=None,
batch_size=None,
monitors=None,
max_steps=None):
# pylint: disable=g-doc-args,g-doc-return-or-yield
"""See `Trainable`.
Raises:
ValueError: If `x` or `y` are not `None` while `input_fn` is not `None`.
ValueError: If both `steps` and `max_steps` are not `None`.
"""
if (steps is not None) and (max_steps is not None):
raise ValueError('Can not provide both steps and max_steps.')
_verify_input_args(x, y, input_fn, None, batch_size)
if x is not None:
SKCompat(self).fit(x, y, batch_size, steps, max_steps, monitors)
return self
if max_steps is not None:
try:
start_step = load_variable(self._model_dir, ops.GraphKeys.GLOBAL_STEP)
if max_steps <= start_step:
logging.info('Skipping training since max_steps has already saved.')
return self
except: # pylint: disable=bare-except
pass
hooks = monitor_lib.replace_monitors_with_hooks(monitors, self)
if steps is not None or max_steps is not None:
hooks.append(basic_session_run_hooks.StopAtStepHook(steps, max_steps))
loss = self._train_model(input_fn=input_fn, hooks=hooks)
logging.info('Loss for final step: %s.', loss)
return self
@deprecated_args(SCIKIT_DECOUPLE_DATE, SCIKIT_DECOUPLE_INSTRUCTIONS,
('x', None), ('y', None), ('batch_size', None))
def partial_fit(self,
x=None,
y=None,
input_fn=None,
steps=1,
batch_size=None,
monitors=None):
"""Incremental fit on a batch of samples.
This method is expected to be called several times consecutively
on different or the same chunks of the dataset. This either can
implement iterative training or out-of-core/online training.
This is especially useful when the whole dataset is too big to
fit in memory at the same time. Or when model is taking long time
to converge, and you want to split up training into subparts.
Args:
x: Matrix of shape [n_samples, n_features...]. Can be iterator that
returns arrays of features. The training input samples for fitting the
model. If set, `input_fn` must be `None`.
y: Vector or matrix [n_samples] or [n_samples, n_outputs]. Can be
iterator that returns array of labels. The training label values
(class labels in classification, real numbers in regression). If set,
`input_fn` must be `None`.
input_fn: Input function. If set, `x`, `y`, and `batch_size` must be
`None`.
steps: Number of steps for which to train model. If `None`, train forever.
batch_size: minibatch size to use on the input, defaults to first
dimension of `x`. Must be `None` if `input_fn` is provided.
monitors: List of `BaseMonitor` subclass instances. Used for callbacks
inside the training loop.
Returns:
`self`, for chaining.
Raises:
ValueError: If at least one of `x` and `y` is provided, and `input_fn` is
provided.
"""
logging.warning('The current implementation of partial_fit is not optimized'
' for use in a loop. Consider using fit() instead.')
return self.fit(
x=x,
y=y,
input_fn=input_fn,
steps=steps,
batch_size=batch_size,
monitors=monitors)
@deprecated_args(SCIKIT_DECOUPLE_DATE, SCIKIT_DECOUPLE_INSTRUCTIONS,
('x', None), ('y', None), ('batch_size', None))
def evaluate(self,
x=None,
y=None,
input_fn=None,
feed_fn=None,
batch_size=None,
steps=None,
metrics=None,
name=None,
checkpoint_path=None,
hooks=None,
log_progress=True):
# pylint: disable=g-doc-args,g-doc-return-or-yield
"""See `Evaluable`.
Raises:
ValueError: If at least one of `x` or `y` is provided, and at least one of
`input_fn` or `feed_fn` is provided.
Or if `metrics` is not `None` or `dict`.
"""
_verify_input_args(x, y, input_fn, feed_fn, batch_size)
if x is not None:
return SKCompat(self).score(x, y, batch_size, steps, metrics, name)
if metrics is not None and not isinstance(metrics, dict):
raise ValueError('Metrics argument should be None or dict. '
'Got %s.' % metrics)
eval_results, global_step = self._evaluate_model(
input_fn=input_fn,
feed_fn=feed_fn,
steps=steps,
metrics=metrics,
name=name,
checkpoint_path=checkpoint_path,
hooks=hooks,
log_progress=log_progress)
if eval_results is not None:
eval_results.update({'global_step': global_step})
return eval_results
@deprecated_args(SCIKIT_DECOUPLE_DATE, SCIKIT_DECOUPLE_INSTRUCTIONS,
('x', None), ('batch_size', None), ('as_iterable', True))
def predict(self,
x=None,
input_fn=None,
batch_size=None,
outputs=None,
as_iterable=True,
iterate_batches=False):
"""Returns predictions for given features.
Args:
x: Matrix of shape [n_samples, n_features...]. Can be iterator that
returns arrays of features. The training input samples for fitting the
model. If set, `input_fn` must be `None`.
input_fn: Input function. If set, `x` and 'batch_size' must be `None`.
batch_size: Override default batch size. If set, 'input_fn' must be
'None'.
outputs: list of `str`, name of the output to predict.
If `None`, returns all.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
iterate_batches: If True, yield the whole batch at once instead of
decomposing the batch into individual samples. Only relevant when
as_iterable is True.
Returns:
A numpy array of predicted classes or regression values if the
constructor's `model_fn` returns a `Tensor` for `predictions` or a `dict`
of numpy arrays if `model_fn` returns a `dict`. Returns an iterable of
predictions if as_iterable is True.
Raises:
ValueError: If x and input_fn are both provided or both `None`.
"""
_verify_input_args(x, None, input_fn, None, batch_size)
if x is not None and not as_iterable:
return SKCompat(self).predict(x, batch_size)
input_fn, feed_fn = _get_input_fn(x, None, input_fn, None, batch_size)
return self._infer_model(
input_fn=input_fn,
feed_fn=feed_fn,
outputs=outputs,
as_iterable=as_iterable,
iterate_batches=iterate_batches)
def get_variable_value(self, name):
"""Returns value of the variable given by name.
Args:
name: string, name of the tensor.
Returns:
Numpy array - value of the tensor.
"""
return load_variable(self.model_dir, name)
def get_variable_names(self):
"""Returns list of all variable names in this model.
Returns:
List of names.
"""
return [name for name, _ in list_variables(self.model_dir)]
@property
def model_dir(self):
return self._model_dir
@deprecated('2017-03-25', 'Please use Estimator.export_savedmodel() instead.')
def export(
self,
export_dir,
input_fn=export._default_input_fn, # pylint: disable=protected-access
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
prediction_key=None,
default_batch_size=1,
exports_to_keep=None,
checkpoint_path=None):
"""Exports inference graph into given dir.
Args:
export_dir: A string containing a directory to write the exported graph
and checkpoints.
input_fn: If `use_deprecated_input_fn` is true, then a function that given
`Tensor` of `Example` strings, parses it into features that are then
passed to the model. Otherwise, a function that takes no argument and
returns a tuple of (features, labels), where features is a dict of
string key to `Tensor` and labels is a `Tensor` that's currently not
used (and so can be `None`).
input_feature_key: Only used if `use_deprecated_input_fn` is false. String
key into the features dict returned by `input_fn` that corresponds to a
the raw `Example` strings `Tensor` that the exported model will take as
input. Can only be `None` if you're using a custom `signature_fn` that
does not use the first arg (examples).
use_deprecated_input_fn: Determines the signature format of `input_fn`.
signature_fn: Function that returns a default signature and a named
signature map, given `Tensor` of `Example` strings, `dict` of `Tensor`s
for features and `Tensor` or `dict` of `Tensor`s for predictions.
prediction_key: The key for a tensor in the `predictions` dict (output
from the `model_fn`) to use as the `predictions` input to the
`signature_fn`. Optional. If `None`, predictions will pass to
`signature_fn` without filtering.
default_batch_size: Default batch size of the `Example` placeholder.
exports_to_keep: Number of exports to keep.
checkpoint_path: the checkpoint path of the model to be exported. If it is
`None` (which is default), will use the latest checkpoint in
export_dir.
Returns:
The string path to the exported directory. NB: this functionality was
added ca. 2016/09/25; clients that depend on the return value may need
to handle the case where this function returns None because subclasses
are not returning a value.
"""
# pylint: disable=protected-access
return export._export_estimator(
estimator=self,
export_dir=export_dir,
signature_fn=signature_fn,
prediction_key=prediction_key,
input_fn=input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep,
checkpoint_path=checkpoint_path)
@abc.abstractproperty
def _get_train_ops(self, features, labels):
"""Method that builds model graph and returns trainer ops.
Expected to be overridden by sub-classes that require custom support.
Args:
features: `Tensor` or `dict` of `Tensor` objects.
labels: `Tensor` or `dict` of `Tensor` objects.
Returns:
A `ModelFnOps` object.
"""
pass
@abc.abstractproperty
def _get_predict_ops(self, features):
"""Method that builds model graph and returns prediction ops.
Args:
features: `Tensor` or `dict` of `Tensor` objects.
Returns:
A `ModelFnOps` object.
"""
pass
def _get_eval_ops(self, features, labels, metrics):
"""Method that builds model graph and returns evaluation ops.
Expected to be overridden by sub-classes that require custom support.
Args:
features: `Tensor` or `dict` of `Tensor` objects.
labels: `Tensor` or `dict` of `Tensor` objects.
metrics: Dict of metrics to run. If None, the default metric functions
are used; if {}, no metrics are used. Otherwise, `metrics` should map
friendly names for the metric to a `MetricSpec` object defining which
model outputs to evaluate against which labels with which metric
function. Metric ops should support streaming, e.g., returning
update_op and value tensors. See more details in
`../../../../metrics/python/metrics/ops/streaming_metrics.py` and
`../metric_spec.py`.
Returns:
A `ModelFnOps` object.
"""
raise NotImplementedError('_get_eval_ops not implemented in BaseEstimator')
@deprecated(
'2016-09-23',
'The signature of the input_fn accepted by export is changing to be '
'consistent with what\'s used by tf.Learn Estimator\'s train/evaluate, '
'which makes this function useless. This will be removed after the '
'deprecation date.')
def _get_feature_ops_from_example(self, examples_batch):
"""Returns feature parser for given example batch using features info.
This function requires `fit()` has been called.
Args:
examples_batch: batch of tf.Example
Returns:
features: `Tensor` or `dict` of `Tensor` objects.
Raises:
ValueError: If `_features_info` attribute is not available (usually
because `fit()` has not been called).
"""
if self._features_info is None:
raise ValueError('Features information missing, was fit() ever called?')
return tensor_signature.create_example_parser_from_signatures(
self._features_info, examples_batch)
def _check_inputs(self, features, labels):
if self._features_info is not None:
logging.debug('Given features: %s, required signatures: %s.',
str(features), str(self._features_info))
if not tensor_signature.tensors_compatible(features, self._features_info):
raise ValueError('Features are incompatible with given information. '
'Given features: %s, required signatures: %s.' %
(str(features), str(self._features_info)))
else:
self._features_info = tensor_signature.create_signatures(features)
logging.debug('Setting feature info to %s.', str(self._features_info))
if labels is not None:
if self._labels_info is not None:
logging.debug('Given labels: %s, required signatures: %s.', str(labels),
str(self._labels_info))
if not tensor_signature.tensors_compatible(labels, self._labels_info):
raise ValueError('Labels are incompatible with given information. '
'Given labels: %s, required signatures: %s.' %
(str(labels), str(self._labels_info)))
else:
self._labels_info = tensor_signature.create_signatures(labels)
logging.debug('Setting labels info to %s', str(self._labels_info))
def _extract_metric_update_ops(self, eval_dict):
"""Separate update operations from metric value operations."""
update_ops = []
value_ops = {}
for name, metric_ops in six.iteritems(eval_dict):
if isinstance(metric_ops, (list, tuple)):
if len(metric_ops) == 2:
value_ops[name] = metric_ops[0]
update_ops.append(metric_ops[1])
else:
logging.warning(
'Ignoring metric {}. It returned a list|tuple with len {}, '
'expected 2'.format(name, len(metric_ops)))
value_ops[name] = metric_ops
else:
value_ops[name] = metric_ops
if update_ops:
update_ops = control_flow_ops.group(*update_ops)
else:
update_ops = None
return update_ops, value_ops
def _evaluate_model(self,
input_fn,
steps,
feed_fn=None,
metrics=None,
name='',
checkpoint_path=None,
hooks=None,
log_progress=True):
# TODO(wicke): Remove this once Model and associated code are gone.
if (hasattr(self._config, 'execution_mode') and
self._config.execution_mode not in ('all', 'evaluate', 'eval_evalset')):
return None, None
# Check that model has been trained (if nothing has been set explicitly).
if not checkpoint_path:
latest_path = checkpoint_management.latest_checkpoint(self._model_dir)
if not latest_path:
raise NotFittedError(
"Couldn't find trained model at %s." % self._model_dir)
checkpoint_path = latest_path
# Setup output directory.
eval_dir = os.path.join(self._model_dir, 'eval'
if not name else 'eval_' + name)
with ops.Graph().as_default() as g:
random_seed.set_random_seed(self._config.tf_random_seed)
global_step = training_util.create_global_step(g)
features, labels = input_fn()
self._check_inputs(features, labels)
model_fn_results = self._get_eval_ops(features, labels, metrics)
eval_dict = model_fn_results.eval_metric_ops
update_op, eval_dict = self._extract_metric_update_ops(eval_dict)
# We need to copy the hook array as we modify it, thus [:].
hooks = hooks[:] if hooks else []
if feed_fn:
hooks.append(basic_session_run_hooks.FeedFnHook(feed_fn))
if steps == 0:
logging.warning('evaluation steps are 0. If `input_fn` does not raise '
'`OutOfRangeError`, the evaluation will never stop. '
'Use steps=None if intended.')
if steps:
hooks.append(
evaluation.StopAfterNEvalsHook(steps, log_progress=log_progress))
global_step_key = 'global_step'
while global_step_key in eval_dict:
global_step_key = '_' + global_step_key
eval_dict[global_step_key] = global_step
eval_results = evaluation.evaluate_once(
checkpoint_path=checkpoint_path,
master=self._config.evaluation_master,
scaffold=model_fn_results.scaffold,
eval_ops=update_op,
final_ops=eval_dict,
hooks=hooks,
config=self._session_config)
current_global_step = eval_results[global_step_key]
_write_dict_to_summary(eval_dir, eval_results, current_global_step)
return eval_results, current_global_step
def _get_features_from_input_fn(self, input_fn):
result = input_fn()
if isinstance(result, (list, tuple)):
return result[0]
return result
def _infer_model(self,
input_fn,
feed_fn=None,
outputs=None,
as_iterable=True,
iterate_batches=False):
# Check that model has been trained.
checkpoint_path = checkpoint_management.latest_checkpoint(self._model_dir)
if not checkpoint_path:
raise NotFittedError(
"Couldn't find trained model at %s." % self._model_dir)
with ops.Graph().as_default() as g:
random_seed.set_random_seed(self._config.tf_random_seed)
training_util.create_global_step(g)
features = self._get_features_from_input_fn(input_fn)
infer_ops = self._get_predict_ops(features)
predictions = self._filter_predictions(infer_ops.predictions, outputs)
mon_sess = monitored_session.MonitoredSession(
session_creator=monitored_session.ChiefSessionCreator(
checkpoint_filename_with_path=checkpoint_path,
scaffold=infer_ops.scaffold,
config=self._session_config))
if not as_iterable:
with mon_sess:
if not mon_sess.should_stop():
return mon_sess.run(predictions, feed_fn() if feed_fn else None)
else:
return self._predict_generator(mon_sess, predictions, feed_fn,
iterate_batches)
def _predict_generator(self, mon_sess, predictions, feed_fn, iterate_batches):
with mon_sess:
while not mon_sess.should_stop():
preds = mon_sess.run(predictions, feed_fn() if feed_fn else None)
if iterate_batches:
yield preds
elif not isinstance(predictions, dict):
for pred in preds:
yield pred
else:
first_tensor = list(preds.values())[0]
if isinstance(first_tensor, sparse_tensor.SparseTensorValue):
batch_length = first_tensor.dense_shape[0]
else:
batch_length = first_tensor.shape[0]
for i in range(batch_length):
yield {key: value[i] for key, value in six.iteritems(preds)}
if self._is_input_constant(feed_fn, mon_sess.graph):
return
def _is_input_constant(self, feed_fn, graph):
# If there are no queue_runners, the input `predictions` is a
# constant, and we should stop after the first epoch. If,
# instead, there are queue_runners, eventually they should throw
# an `OutOfRangeError`.
if graph.get_collection(ops.GraphKeys.QUEUE_RUNNERS):
return False
# data_feeder uses feed_fn to generate `OutOfRangeError`.
if feed_fn is not None:
return False
return True
def _filter_predictions(self, predictions, outputs):
if not outputs:
return predictions
if not isinstance(predictions, dict):
raise ValueError(
'outputs argument is not valid in case of non-dict predictions.')
existing_keys = predictions.keys()
predictions = {
key: value
for key, value in six.iteritems(predictions)
if key in outputs
}
if not predictions:
raise ValueError('Expected to run at least one output from %s, '
'provided %s.' % (existing_keys, outputs))
return predictions
def _train_model(self, input_fn, hooks):
all_hooks = []
self._graph = ops.Graph()
with self._graph.as_default() as g, g.device(self._device_fn):
random_seed.set_random_seed(self._config.tf_random_seed)
global_step = training_util.create_global_step(g)
features, labels = input_fn()
self._check_inputs(features, labels)
training_util._get_or_create_global_step_read() # pylint: disable=protected-access
model_fn_ops = self._get_train_ops(features, labels)
ops.add_to_collection(ops.GraphKeys.LOSSES, model_fn_ops.loss)
all_hooks.extend(hooks)
all_hooks.extend([
basic_session_run_hooks.NanTensorHook(model_fn_ops.loss),
basic_session_run_hooks.LoggingTensorHook(
{
'loss': model_fn_ops.loss,
'step': global_step
},
every_n_iter=100)
])
scaffold = model_fn_ops.scaffold or monitored_session.Scaffold()
if not (scaffold.saver or ops.get_collection(ops.GraphKeys.SAVERS)):
ops.add_to_collection(
ops.GraphKeys.SAVERS,
saver.Saver(
sharded=True,
max_to_keep=self._config.keep_checkpoint_max,
keep_checkpoint_every_n_hours=(
self._config.keep_checkpoint_every_n_hours),
defer_build=True,
save_relative_paths=True))
chief_hooks = []
if (self._config.save_checkpoints_secs or
self._config.save_checkpoints_steps):
saver_hook_exists = any(
isinstance(h, basic_session_run_hooks.CheckpointSaverHook)
for h in (all_hooks + model_fn_ops.training_hooks + chief_hooks +
model_fn_ops.training_chief_hooks)
)
if not saver_hook_exists:
chief_hooks = [
basic_session_run_hooks.CheckpointSaverHook(
self._model_dir,
save_secs=self._config.save_checkpoints_secs,
save_steps=self._config.save_checkpoints_steps,
scaffold=scaffold)
]
with monitored_session.MonitoredTrainingSession(
master=self._config.master,
is_chief=self._config.is_chief,
checkpoint_dir=self._model_dir,
scaffold=scaffold,
hooks=all_hooks + model_fn_ops.training_hooks,
chief_only_hooks=chief_hooks + model_fn_ops.training_chief_hooks,
save_checkpoint_secs=0, # Saving is handled by a hook.
save_summaries_steps=self._config.save_summary_steps,
config=self._session_config) as mon_sess:
loss = None
while not mon_sess.should_stop():
_, loss = mon_sess.run([model_fn_ops.train_op, model_fn_ops.loss])
return loss
def _identity_feature_engineering_fn(features, labels):
return features, labels
class Estimator(BaseEstimator):
"""Estimator class is the basic TensorFlow model trainer/evaluator.
THIS CLASS IS DEPRECATED. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for general migration instructions.
"""
def __init__(self,
model_fn=None,
model_dir=None,
config=None,
params=None,
feature_engineering_fn=None):
"""Constructs an `Estimator` instance.
Args:
model_fn: Model function. Follows the signature:
* Args:
* `features`: single `Tensor` or `dict` of `Tensor`s
(depending on data passed to `fit`),
* `labels`: `Tensor` or `dict` of `Tensor`s (for multi-head
models). If mode is `ModeKeys.INFER`, `labels=None` will be
passed. If the `model_fn`'s signature does not accept
`mode`, the `model_fn` must still be able to handle
`labels=None`.
* `mode`: Optional. Specifies if this training, evaluation or
prediction. See `ModeKeys`.
* `params`: Optional `dict` of hyperparameters. Will receive what
is passed to Estimator in `params` parameter. This allows
to configure Estimators from hyper parameter tuning.
* `config`: Optional configuration object. Will receive what is passed
to Estimator in `config` parameter, or the default `config`.
Allows updating things in your model_fn based on configuration
such as `num_ps_replicas`.
* `model_dir`: Optional directory where model parameters, graph etc
are saved. Will receive what is passed to Estimator in
`model_dir` parameter, or the default `model_dir`. Allows
updating things in your model_fn that expect model_dir, such as
training hooks.
* Returns:
`ModelFnOps`
Also supports a legacy signature which returns tuple of:
* predictions: `Tensor`, `SparseTensor` or dictionary of same.
Can also be any type that is convertible to a `Tensor` or
`SparseTensor`, or dictionary of same.
* loss: Scalar loss `Tensor`.
* train_op: Training update `Tensor` or `Operation`.
Supports next three signatures for the function:
* `(features, labels) -> (predictions, loss, train_op)`
* `(features, labels, mode) -> (predictions, loss, train_op)`
* `(features, labels, mode, params) -> (predictions, loss, train_op)`
* `(features, labels, mode, params, config) ->
(predictions, loss, train_op)`
* `(features, labels, mode, params, config, model_dir) ->
(predictions, loss, train_op)`
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
config: Configuration object.
params: `dict` of hyper parameters that will be passed into `model_fn`.
Keys are names of parameters, values are basic python types.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and
returns features and labels which will be fed
into `model_fn`. Please check `model_fn` for
a definition of features and labels.
Raises:
ValueError: parameters of `model_fn` don't match `params`.
"""
super(Estimator, self).__init__(model_dir=model_dir, config=config)
if model_fn is not None:
# Check number of arguments of the given function matches requirements.
model_fn_args = _model_fn_args(model_fn)
if params is not None and 'params' not in model_fn_args:
raise ValueError('Estimator\'s model_fn (%s) does not have a params '
'argument, but params (%s) were passed to the '
'Estimator\'s constructor.' % (model_fn, params))
if params is None and 'params' in model_fn_args:
logging.warning('Estimator\'s model_fn (%s) includes params '
'argument, but params are not passed to Estimator.',
model_fn)
self._model_fn = model_fn
self.params = params
self._feature_engineering_fn = (
feature_engineering_fn or _identity_feature_engineering_fn)
def _call_model_fn(self, features, labels, mode, metrics=None, config=None):
"""Calls model function with support of 2, 3 or 4 arguments.
Args:
features: features dict.
labels: labels dict.
mode: ModeKeys
metrics: Dict of metrics.
config: RunConfig.
Returns:
A `ModelFnOps` object. If model_fn returns a tuple, wraps them up in a
`ModelFnOps` object.
Raises:
ValueError: if model_fn returns invalid objects.
"""
features, labels = self._feature_engineering_fn(features, labels)
model_fn_args = _model_fn_args(self._model_fn)
kwargs = {}
if 'mode' in model_fn_args:
kwargs['mode'] = mode
if 'params' in model_fn_args:
kwargs['params'] = self.params
if 'config' in model_fn_args:
if config:
kwargs['config'] = config
else:
kwargs['config'] = self.config
if 'model_dir' in model_fn_args:
kwargs['model_dir'] = self.model_dir
model_fn_results = self._model_fn(features, labels, **kwargs)
if isinstance(model_fn_results, model_fn_lib.ModelFnOps):
model_fn_ops = model_fn_results
else:
# Here model_fn_results should be a tuple with 3 elements.
if len(model_fn_results) != 3:
raise ValueError('Unrecognized value returned by model_fn, '
'please return ModelFnOps.')
model_fn_ops = model_fn_lib.ModelFnOps(
mode=mode,
predictions=model_fn_results[0],
loss=model_fn_results[1],
train_op=model_fn_results[2])
# Custom metrics should overwrite defaults.
if metrics:
model_fn_ops.eval_metric_ops.update(
_make_metrics_ops(metrics, features, labels,
model_fn_ops.predictions))
return model_fn_ops
def _get_train_ops(self, features, labels):
"""Method that builds model graph and returns trainer ops.
Expected to be overridden by sub-classes that require custom support.
This implementation uses `model_fn` passed as parameter to constructor to
build model.
Args:
features: `Tensor` or `dict` of `Tensor` objects.
labels: `Tensor` or `dict` of `Tensor` objects.
Returns:
`ModelFnOps` object.
"""
return self._call_model_fn(features, labels, model_fn_lib.ModeKeys.TRAIN)
def _get_eval_ops(self, features, labels, metrics):
"""Method that builds model graph and returns evaluation ops.
Expected to be overridden by sub-classes that require custom support.
This implementation uses `model_fn` passed as parameter to constructor to
build model.
Args:
features: `Tensor` or `dict` of `Tensor` objects.
labels: `Tensor` or `dict` of `Tensor` objects.
metrics: Dict of metrics to run. If None, the default metric functions
are used; if {}, no metrics are used. Otherwise, `metrics` should map
friendly names for the metric to a `MetricSpec` object defining which
model outputs to evaluate against which labels with which metric
function. Metric ops should support streaming, e.g., returning
update_op and value tensors. See more details in
`../../../../metrics/python/metrics/ops/streaming_metrics.py` and
`../metric_spec.py`.
Returns:
`ModelFnOps` object.
Raises:
ValueError: if `metrics` don't match `labels`.
"""
model_fn_ops = self._call_model_fn(features, labels,
model_fn_lib.ModeKeys.EVAL, metrics)
if metric_key.MetricKey.LOSS not in model_fn_ops.eval_metric_ops:
model_fn_ops.eval_metric_ops[metric_key.MetricKey.LOSS] = (
metrics_lib.mean(model_fn_ops.loss))
return model_fn_ops
def _get_predict_ops(self, features):
"""Method that builds model graph and returns prediction ops.
Expected to be overridden by sub-classes that require custom support.
This implementation uses `model_fn` passed as parameter to constructor to
build model.
Args:
features: `Tensor` or `dict` of `Tensor` objects.
Returns:
`ModelFnOps` object.
"""
labels = tensor_signature.create_placeholders_from_signatures(
self._labels_info)
return self._call_model_fn(features, labels, model_fn_lib.ModeKeys.INFER)
def export_savedmodel(self,
export_dir_base,
serving_input_fn,
default_output_alternative_key=None,
assets_extra=None,
as_text=False,
checkpoint_path=None,
graph_rewrite_specs=(GraphRewriteSpec(
(tag_constants.SERVING,), ()),),
strip_default_attrs=False):
# pylint: disable=line-too-long
"""Exports inference graph as a SavedModel into given dir.
Args:
export_dir_base: A string containing a directory to write the exported
graph and checkpoints.
serving_input_fn: A function that takes no argument and
returns an `InputFnOps`.
default_output_alternative_key: the name of the head to serve when none is
specified. Not needed for single-headed models.
assets_extra: A dict specifying how to populate the assets.extra directory
within the exported SavedModel. Each key should give the destination
path (including the filename) relative to the assets.extra directory.
The corresponding value gives the full path of the source file to be
copied. For example, the simple case of copying a single file without
renaming it is specified as
`{'my_asset_file.txt': '/path/to/my_asset_file.txt'}`.
as_text: whether to write the SavedModel proto in text format.
checkpoint_path: The checkpoint path to export. If None (the default),
the most recent checkpoint found within the model directory is chosen.
graph_rewrite_specs: an iterable of `GraphRewriteSpec`. Each element will
produce a separate MetaGraphDef within the exported SavedModel, tagged
and rewritten as specified. Defaults to a single entry using the
default serving tag ("serve") and no rewriting.
strip_default_attrs: Boolean. If `True`, default-valued attributes will be
removed from the NodeDefs. For a detailed guide, see
[Stripping Default-Valued
Attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes).
Returns:
The string path to the exported directory.
Raises:
ValueError: if an unrecognized export_type is requested.
"""
# pylint: enable=line-too-long
if serving_input_fn is None:
raise ValueError('serving_input_fn must be defined.')
if not checkpoint_path:
# Locate the latest checkpoint
checkpoint_path = checkpoint_management.latest_checkpoint(self._model_dir)
if not checkpoint_path:
raise NotFittedError(
"Couldn't find trained model at %s." % self._model_dir)
export_dir = saved_model_export_utils.get_timestamped_export_dir(
export_dir_base)
# We'll write the SavedModel to a temporary directory and then atomically
# rename it at the end. This helps to avoid corrupt / incomplete outputs,
# which could otherwise occur if the job is preempted or otherwise fails
# in the middle of SavedModel creation.
temp_export_dir = saved_model_export_utils.get_temp_export_dir(export_dir)
builder = saved_model_builder.SavedModelBuilder(temp_export_dir)
# Build the base graph
with ops.Graph().as_default() as g:
training_util.create_global_step(g)
# Call the serving_input_fn and collect the input alternatives.
input_ops = serving_input_fn()
input_alternatives, features = (
saved_model_export_utils.get_input_alternatives(input_ops))
# TODO(b/34388557) This is a stopgap, pending recording model provenance.
# Record which features are expected at serving time. It is assumed that
# these are the features that were used in training.
for feature_key in input_ops.features.keys():
ops.add_to_collection(
constants.COLLECTION_DEF_KEY_FOR_INPUT_FEATURE_KEYS, feature_key)
# Call the model_fn and collect the output alternatives.
model_fn_ops = self._call_model_fn(features, None,
model_fn_lib.ModeKeys.INFER)
output_alternatives, actual_default_output_alternative_key = (
saved_model_export_utils.get_output_alternatives(
model_fn_ops, default_output_alternative_key))
init_op = control_flow_ops.group(variables.local_variables_initializer(),
resources.initialize_resources(
resources.shared_resources()),
lookup_ops.tables_initializer())
# Build the SignatureDefs from all pairs of input and output alternatives
signature_def_map = saved_model_export_utils.build_all_signature_defs(
input_alternatives, output_alternatives,
actual_default_output_alternative_key)
# Export the first MetaGraphDef with variables, assets etc.
with tf_session.Session('') as session:
# pylint: disable=protected-access
saveables = variables._all_saveable_objects()
# pylint: enable=protected-access
if (model_fn_ops.scaffold is not None and
model_fn_ops.scaffold.saver is not None):
saver_for_restore = model_fn_ops.scaffold.saver
elif saveables:
saver_for_restore = saver.Saver(saveables, sharded=True)
saver_for_restore.restore(session, checkpoint_path)
# Perform the export
if not graph_rewrite_specs or graph_rewrite_specs[0].transforms:
raise ValueError('The first element of graph_rewrite_specs '
'must specify no transforms.')
untransformed_tags = graph_rewrite_specs[0].tags
builder.add_meta_graph_and_variables(
session,
untransformed_tags,
signature_def_map=signature_def_map,
assets_collection=ops.get_collection(ops.GraphKeys.ASSET_FILEPATHS),
main_op=init_op,
strip_default_attrs=strip_default_attrs)
# pylint: disable=protected-access
base_meta_graph_def = builder._saved_model.meta_graphs[0]
# pylint: enable=protected-access
if graph_rewrite_specs[1:]:
# Prepare the input_names and output_names needed for the
# meta_graph_transform call below.
input_names = [
tensor.name
for input_dict in input_alternatives.values()
for tensor in input_dict.values()
]
output_names = [
tensor.name
for output_alternative in output_alternatives.values()
for tensor in output_alternative[1].values()
]
# Write the additional MetaGraphDefs
for graph_rewrite_spec in graph_rewrite_specs[1:]:
# TODO(soergel) consider moving most of this to saved_model.builder_impl
# as e.g. builder.add_rewritten_meta_graph(rewritten_graph_def, tags)
transformed_meta_graph_def = meta_graph_transform.meta_graph_transform(
base_meta_graph_def, input_names, output_names,
graph_rewrite_spec.transforms, graph_rewrite_spec.tags)
# pylint: disable=protected-access
meta_graph_def = builder._saved_model.meta_graphs.add()
# pylint: enable=protected-access
meta_graph_def.CopyFrom(transformed_meta_graph_def)
# Add the extra assets
if assets_extra:
assets_extra_path = os.path.join(
compat.as_bytes(temp_export_dir), compat.as_bytes('assets.extra'))
for dest_relative, source in assets_extra.items():
dest_absolute = os.path.join(
compat.as_bytes(assets_extra_path), compat.as_bytes(dest_relative))
dest_path = os.path.dirname(dest_absolute)
gfile.MakeDirs(dest_path)
gfile.Copy(source, dest_absolute)
builder.save(as_text)
gfile.Rename(temp_export_dir, export_dir)
return export_dir
# For time of deprecation x,y from Estimator allow direct access.
# pylint: disable=protected-access
class SKCompat(sklearn.BaseEstimator):
"""Scikit learn wrapper for TensorFlow Learn Estimator.
THIS CLASS IS DEPRECATED. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for general migration instructions.
"""
@deprecated(None, 'Please switch to the Estimator interface.')
def __init__(self, estimator):
self._estimator = estimator
def fit(self, x, y, batch_size=128, steps=None, max_steps=None,
monitors=None):
input_fn, feed_fn = _get_input_fn(
x,
y,
input_fn=None,
feed_fn=None,
batch_size=batch_size,
shuffle=True,
epochs=None)
all_monitors = []
if feed_fn:
all_monitors = [basic_session_run_hooks.FeedFnHook(feed_fn)]
if monitors:
all_monitors.extend(monitors)
self._estimator.fit(
input_fn=input_fn,
steps=steps,
max_steps=max_steps,
monitors=all_monitors)
return self
def score(self, x, y, batch_size=128, steps=None, metrics=None, name=None):
input_fn, feed_fn = _get_input_fn(
x,
y,
input_fn=None,
feed_fn=None,
batch_size=batch_size,
shuffle=False,
epochs=1)
if metrics is not None and not isinstance(metrics, dict):
raise ValueError('Metrics argument should be None or dict. '
'Got %s.' % metrics)
eval_results, global_step = self._estimator._evaluate_model(
input_fn=input_fn,
feed_fn=feed_fn,
steps=steps,
metrics=metrics,
name=name)
if eval_results is not None:
eval_results.update({'global_step': global_step})
return eval_results
def predict(self, x, batch_size=128, outputs=None):
input_fn, feed_fn = _get_input_fn(
x,
None,
input_fn=None,
feed_fn=None,
batch_size=batch_size,
shuffle=False,
epochs=1)
results = list(
self._estimator._infer_model(
input_fn=input_fn,
feed_fn=feed_fn,
outputs=outputs,
as_iterable=True,
iterate_batches=True))
if not isinstance(results[0], dict):
return np.concatenate([output for output in results], axis=0)
return {
key: np.concatenate([output[key] for output in results], axis=0)
for key in results[0]
}
| apache-2.0 |
hongguangguo/shogun | examples/undocumented/python_modular/mathematics_logdet.py | 29 | 2923 | #!/usr/bin/env python
from numpy import *
from scipy.io import mmread
# Loading an example sparse matrix of dimension 479x479, real, unsymmetric
mtx=mmread('../../../data/logdet/west0479.mtx')
parameter_list=[[mtx,100,60,1]]
def mathematics_logdet (matrix=mtx,max_iter_eig=1000,max_iter_lin=1000,num_samples=1):
from scipy.sparse import eye
# Create a Hermitian sparse matrix
rows=matrix.shape[0]
cols=matrix.shape[1]
A=matrix.transpose()*matrix+eye(rows, cols)
from scipy.sparse import csc_matrix
try:
from shogun.Mathematics import RealSparseMatrixOperator
from shogun.Mathematics import LanczosEigenSolver
from shogun.Mathematics import CGMShiftedFamilySolver
from shogun.Mathematics import LogRationalApproximationCGM
from shogun.Mathematics import ProbingSampler
from shogun.Mathematics import LogDetEstimator
from shogun.Mathematics import Statistics
from shogun.Library import SerialComputationEngine
# creating the linear operator, eigen-solver
op=RealSparseMatrixOperator(A.tocsc())
eig_solver=LanczosEigenSolver(op)
# we can set the iteration limit high for poorly conditioned matrices
eig_solver.set_max_iteration_limit(max_iter_eig)
# alternatively, if the matrix is small, we can compute eigenvalues externally
# and set min/max eigenvalues into the eigensolver
# from scipy.sparse.linalg import eigsh
# eigenvalues=eigsh(A, rows-1)
# eig_solver.set_min_eigenvalue(eigenvalues[0][0])
# eig_solver.set_max_eigenvalue(eigenvalues[0][-1])
# create the shifted-family linear solver which solves for all the shifts
# using as many matrix-vector products as one shift in CG iterations
lin_solver=CGMShiftedFamilySolver()
lin_solver.set_iteration_limit(max_iter_lin)
# computation engine
engine=SerialComputationEngine()
# set the desired accuracy tighter to obtain better results
# this determines the number of contour points in conformal mapping of
# the rational approximation of the Cauchy's integral of f(A)*s, f=log
desired_accuracy=1E-5
# creating the log-linear-operator function
op_func=LogRationalApproximationCGM(op, engine, eig_solver, lin_solver,\
desired_accuracy)
# set the trace sampler to be probing sampler, in which samples are obtained
# by greedy graph coloring of the power of sparse matrix (default is power=1,
# 2-distance coloring)
trace_sampler=ProbingSampler(op)
# estimating log-det
log_det_estimator=LogDetEstimator(trace_sampler, op_func, engine)
# set the number of samples as required
estimates=log_det_estimator.sample(num_samples)
estimated_logdet=sum(estimates)/len(estimates)
actual_logdet=Statistics.log_det(A)
print(actual_logdet, estimated_logdet)
return estimates
except ImportError:
print('One or many of the dependencies (Eigen3/LaPack/ColPack) not found!')
if __name__=='__main__':
print('LogDetEstimator')
mathematics_logdet (*parameter_list[0])
| gpl-3.0 |
40223141/lego | static/Brython3.1.1-20150328-091302/Lib/importlib/__init__.py | 610 | 3472 | """A pure Python implementation of import."""
__all__ = ['__import__', 'import_module', 'invalidate_caches']
# Bootstrap help #####################################################
# Until bootstrapping is complete, DO NOT import any modules that attempt
# to import importlib._bootstrap (directly or indirectly). Since this
# partially initialised package would be present in sys.modules, those
# modules would get an uninitialised copy of the source version, instead
# of a fully initialised version (either the frozen one or the one
# initialised below if the frozen one is not available).
import _imp # Just the builtin component, NOT the full Python module
import sys
from . import machinery #fix me brython
try:
import _frozen_importlib as _bootstrap
except ImportError:
from . import _bootstrap
_bootstrap._setup(sys, _imp)
else:
# importlib._bootstrap is the built-in import, ensure we don't create
# a second copy of the module.
_bootstrap.__name__ = 'importlib._bootstrap'
_bootstrap.__package__ = 'importlib'
_bootstrap.__file__ = __file__.replace('__init__.py', '_bootstrap.py')
sys.modules['importlib._bootstrap'] = _bootstrap
# To simplify imports in test code
_w_long = _bootstrap._w_long
_r_long = _bootstrap._r_long
# Fully bootstrapped at this point, import whatever you like, circular
# dependencies and startup overhead minimisation permitting :)
# Public API #########################################################
from ._bootstrap import __import__
def invalidate_caches():
"""Call the invalidate_caches() method on all meta path finders stored in
sys.meta_path (where implemented)."""
for finder in sys.meta_path:
if hasattr(finder, 'invalidate_caches'):
finder.invalidate_caches()
def find_loader(name, path=None):
"""Find the loader for the specified module.
First, sys.modules is checked to see if the module was already imported. If
so, then sys.modules[name].__loader__ is returned. If that happens to be
set to None, then ValueError is raised. If the module is not in
sys.modules, then sys.meta_path is searched for a suitable loader with the
value of 'path' given to the finders. None is returned if no loader could
be found.
Dotted names do not have their parent packages implicitly imported. You will
most likely need to explicitly import all parent packages in the proper
order for a submodule to get the correct loader.
"""
try:
loader = sys.modules[name].__loader__
if loader is None:
raise ValueError('{}.__loader__ is None'.format(name))
else:
return loader
except KeyError:
pass
return _bootstrap._find_module(name, path)
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
level = 0
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
for character in name:
if character != '.':
break
level += 1
return _bootstrap._gcd_import(name[level:], package, level)
#need at least one import hook for importlib stuff to work.
import basehook
sys.meta_path.append(basehook.BaseHook())
| agpl-3.0 |
losnikitos/googleads-python-lib | examples/dfp/v201505/line_item_service/get_recently_updated_line_items.py | 4 | 2426 | #!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example shows how to get recently updated line items.
To create line items, run create_line_items.py."""
import datetime
# Import appropriate modules from the client library.
from googleads import dfp
ORDER_ID = 'INSERT_ORDER_ID_HERE'
def main(client, order_id):
# Initialize appropriate service.
line_item_service = client.GetService('LineItemService', version='v201505')
# Create statement object to only select line items belonging to the order and
# have been modified in the last 3 days.
three_days_ago = datetime.date.today() - datetime.timedelta(days=3)
values = [{
'key': 'dateTimeString',
'value': {
'xsi_type': 'TextValue',
'value': three_days_ago.strftime('%Y-%m-%dT%H:%M:%S')
}
}, {
'key': 'orderId',
'value': {
'xsi_type': 'NumberValue',
'value': order_id
}
}]
query = 'WHERE lastModifiedDateTime >= :dateTimeString AND orderId = :orderId'
statement = dfp.FilterStatement(query, values)
while True:
# Get line items by statement.
response = line_item_service.getLineItemsByStatement(
statement.ToStatement())
if 'results' in response:
# Display results.
for line_item in response['results']:
print ('Line item with id \'%s\', belonging to order id \'%s\', and '
'named \'%s\' was found.' % (line_item['id'],
line_item['orderId'],
line_item['name']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, ORDER_ID)
| apache-2.0 |
alu0100207385/dsi_3Django | django/contrib/admin/sites.py | 91 | 18706 | from functools import update_wrapper
from django.http import Http404, HttpResponseRedirect
from django.contrib.admin import ModelAdmin, actions
from django.contrib.admin.forms import AdminAuthenticationForm
from django.contrib.auth import logout as auth_logout, REDIRECT_FIELD_NAME
from django.contrib.contenttypes import views as contenttype_views
from django.views.decorators.csrf import csrf_protect
from django.db.models.base import ModelBase
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.response import TemplateResponse
from django.utils import six
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from django.views.decorators.cache import never_cache
from django.conf import settings
LOGIN_FORM_KEY = 'this_is_the_login_form'
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class AdminSite(object):
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the get_urls() method can then be used to access Django view
functions that present a full admin interface for the collection of registered
models.
"""
login_form = None
index_template = None
app_index_template = None
login_template = None
logout_template = None
password_change_template = None
password_change_done_template = None
def __init__(self, name='admin', app_name='admin'):
self._registry = {} # model_class class -> admin_class instance
self.name = name
self.app_name = app_name
self._actions = {'delete_selected': actions.delete_selected}
self._global_actions = self._actions.copy()
def register(self, model_or_iterable, admin_class=None, **options):
"""
Registers the given model(s) with the given admin class.
The model(s) should be Model classes, not instances.
If an admin class isn't given, it will use ModelAdmin (the default
admin options). If keyword arguments are given -- e.g., list_display --
they'll be applied as options to the admin class.
If a model is already registered, this will raise AlreadyRegistered.
If a model is abstract, this will raise ImproperlyConfigured.
"""
if not admin_class:
admin_class = ModelAdmin
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model._meta.abstract:
raise ImproperlyConfigured('The model %s is abstract, so it '
'cannot be registered with admin.' % model.__name__)
if model in self._registry:
raise AlreadyRegistered('The model %s is already registered' % model.__name__)
# Ignore the registration if the model has been
# swapped out.
if not model._meta.swapped:
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type("%sAdmin" % model.__name__, (admin_class,), options)
if admin_class is not ModelAdmin and settings.DEBUG:
admin_class.validate(model)
# Instantiate the admin class to save in the registry
self._registry[model] = admin_class(model, self)
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotRegistered('The model %s is not registered' % model.__name__)
del self._registry[model]
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raises KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitly get a registered global action whether it's enabled or
not. Raises KeyError for invalid names.
"""
return self._global_actions[name]
@property
def actions(self):
"""
Get all the enabled actions as an iterable of (name, func).
"""
return six.iteritems(self._actions)
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def check_dependencies(self):
"""
Check that all things needed to run the admin have been correctly installed.
The default implementation checks that LogEntry, ContentType and the
auth context processor are installed.
"""
from django.contrib.admin.models import LogEntry
from django.contrib.contenttypes.models import ContentType
if not LogEntry._meta.installed:
raise ImproperlyConfigured("Put 'django.contrib.admin' in your "
"INSTALLED_APPS setting in order to use the admin application.")
if not ContentType._meta.installed:
raise ImproperlyConfigured("Put 'django.contrib.contenttypes' in "
"your INSTALLED_APPS setting in order to use the admin application.")
if not ('django.contrib.auth.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS or
'django.core.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS):
raise ImproperlyConfigured("Put 'django.contrib.auth.context_processors.auth' "
"in your TEMPLATE_CONTEXT_PROCESSORS setting in order to use the admin application.")
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import patterns, url
urls = super(MyAdminSite, self).get_urls()
urls += patterns('',
url(r'^my_view/$', self.admin_view(some_view))
)
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if LOGIN_FORM_KEY in request.POST and request.user.is_authenticated():
auth_logout(request)
if not self.has_permission(request):
if request.path == reverse('admin:logout',
current_app=self.name):
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
return self.login(request)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
# We add csrf_protect here so this function can be used as a utility
# function for any view, without having to repeat 'csrf_protect'.
if not getattr(view, 'csrf_exempt', False):
inner = csrf_protect(inner)
return update_wrapper(inner, view)
def get_urls(self):
from django.conf.urls import patterns, url, include
if settings.DEBUG:
self.check_dependencies()
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = patterns('',
url(r'^$',
wrap(self.index),
name='index'),
url(r'^logout/$',
wrap(self.logout),
name='logout'),
url(r'^password_change/$',
wrap(self.password_change, cacheable=True),
name='password_change'),
url(r'^password_change/done/$',
wrap(self.password_change_done, cacheable=True),
name='password_change_done'),
url(r'^jsi18n/$',
wrap(self.i18n_javascript, cacheable=True),
name='jsi18n'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$',
wrap(contenttype_views.shortcut),
name='view_on_site'),
url(r'^(?P<app_label>\w+)/$',
wrap(self.app_index),
name='app_list')
)
# Add in each model's views.
for model, model_admin in six.iteritems(self._registry):
urlpatterns += patterns('',
url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name),
include(model_admin.urls))
)
return urlpatterns
@property
def urls(self):
return self.get_urls(), self.app_name, self.name
def password_change(self, request):
"""
Handles the "change password" task -- both form display and validation.
"""
from django.contrib.auth.views import password_change
url = reverse('admin:password_change_done', current_app=self.name)
defaults = {
'current_app': self.name,
'post_change_redirect': url
}
if self.password_change_template is not None:
defaults['template_name'] = self.password_change_template
return password_change(request, **defaults)
def password_change_done(self, request, extra_context=None):
"""
Displays the "success" page after a password change.
"""
from django.contrib.auth.views import password_change_done
defaults = {
'current_app': self.name,
'extra_context': extra_context or {},
}
if self.password_change_done_template is not None:
defaults['template_name'] = self.password_change_done_template
return password_change_done(request, **defaults)
def i18n_javascript(self, request):
"""
Displays the i18n JavaScript that the Django admin requires.
This takes into account the USE_I18N setting. If it's set to False, the
generated JavaScript will be leaner and faster.
"""
if settings.USE_I18N:
from django.views.i18n import javascript_catalog
else:
from django.views.i18n import null_javascript_catalog as javascript_catalog
return javascript_catalog(request, packages=['django.conf', 'django.contrib.admin'])
@never_cache
def logout(self, request, extra_context=None):
"""
Logs out the user for the given HttpRequest.
This should *not* assume the user is already logged in.
"""
from django.contrib.auth.views import logout
defaults = {
'current_app': self.name,
'extra_context': extra_context or {},
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
return logout(request, **defaults)
@never_cache
def login(self, request, extra_context=None):
"""
Displays the login form for the given HttpRequest.
"""
from django.contrib.auth.views import login
context = {
'title': _('Log in'),
'app_path': request.get_full_path(),
REDIRECT_FIELD_NAME: request.get_full_path(),
}
context.update(extra_context or {})
defaults = {
'extra_context': context,
'current_app': self.name,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'admin/login.html',
}
return login(request, **defaults)
@never_cache
def index(self, request, extra_context=None):
"""
Displays the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_dict = {}
user = request.user
for model, model_admin in self._registry.items():
app_label = model._meta.app_label
has_module_perms = user.has_module_perms(app_label)
if has_module_perms:
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True in perms.values():
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change', False):
try:
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
except NoReverseMatch:
pass
if perms.get('add', False):
try:
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
except NoReverseMatch:
pass
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'name': app_label.title(),
'app_label': app_label,
'app_url': reverse('admin:app_list', kwargs={'app_label': app_label}, current_app=self.name),
'has_module_perms': has_module_perms,
'models': [model_dict],
}
# Sort the apps alphabetically.
app_list = list(six.itervalues(app_dict))
app_list.sort(key=lambda x: x['name'])
# Sort the models alphabetically within each app.
for app in app_list:
app['models'].sort(key=lambda x: x['name'])
context = {
'title': _('Site administration'),
'app_list': app_list,
}
context.update(extra_context or {})
return TemplateResponse(request, self.index_template or
'admin/index.html', context,
current_app=self.name)
def app_index(self, request, app_label, extra_context=None):
user = request.user
has_module_perms = user.has_module_perms(app_label)
app_dict = {}
for model, model_admin in self._registry.items():
if app_label == model._meta.app_label:
if has_module_perms:
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True in perms.values():
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change', False):
try:
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
except NoReverseMatch:
pass
if perms.get('add', False):
try:
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
except NoReverseMatch:
pass
if app_dict:
app_dict['models'].append(model_dict),
else:
# First time around, now that we know there's
# something to display, add in the necessary meta
# information.
app_dict = {
'name': app_label.title(),
'app_label': app_label,
'app_url': '',
'has_module_perms': has_module_perms,
'models': [model_dict],
}
if not app_dict:
raise Http404('The requested admin page does not exist.')
# Sort the models alphabetically within each app.
app_dict['models'].sort(key=lambda x: x['name'])
context = {
'title': _('%s administration') % capfirst(app_label),
'app_list': [app_dict],
}
context.update(extra_context or {})
return TemplateResponse(request, self.app_index_template or [
'admin/%s/app_index.html' % app_label,
'admin/app_index.html'
], context, current_app=self.name)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
| bsd-3-clause |
sodafree/backend | build/ipython/scripts/ipython_win_post_install.py | 1 | 5291 | #!python
"""Windows-specific part of the installation"""
from __future__ import print_function
import os, sys, shutil
pjoin = os.path.join
# import setuptools if we can
try:
import setuptools
except ImportError:
pass
def mkshortcut(target,description,link_file,*args,**kw):
"""make a shortcut if it doesn't exist, and register its creation"""
create_shortcut(target, description, link_file,*args,**kw)
file_created(link_file)
def suffix(s):
"""add '3' suffix to programs for Python 3"""
if sys.version_info[0] == 3:
s = s+'3'
return s
def install():
"""Routine to be run by the win32 installer with the -install switch."""
# Get some system constants
prefix = sys.prefix
python = pjoin(prefix, 'python.exe')
pythonw = pjoin(prefix, 'pythonw.exe')
have_setuptools = 'setuptools' in sys.modules
if not have_setuptools:
# This currently doesn't work without setuptools,
# so don't bother making broken links
print("Distribute (setuptools) is required to create Start Menu items.", file=sys.stderr)
print("Re-run this installer after installing distribute to get Start Menu items.", file=sys.stderr)
return
# Lookup path to common startmenu ...
ip_start_menu = pjoin(get_special_folder_path('CSIDL_COMMON_PROGRAMS'),
'IPython (Py%i.%i %i bit)' % (sys.version_info[0],
sys.version_info[1],
8*tuple.__itemsize__))
# Create IPython entry ...
if not os.path.isdir(ip_start_menu):
os.mkdir(ip_start_menu)
directory_created(ip_start_menu)
# Create .py and .bat files to make things available from
# the Windows command line. Thanks to the Twisted project
# for this logic!
programs = [
'ipython',
'iptest',
'ipcontroller',
'ipengine',
'ipcluster',
'irunner'
]
programs = [ suffix(p) for p in programs ]
scripts = pjoin(prefix,'scripts')
if not have_setuptools:
# only create .bat files if we don't have setuptools
for program in programs:
raw = pjoin(scripts, program)
bat = raw + '.bat'
py = raw + '.py'
# Create .py versions of the scripts
shutil.copy(raw, py)
# Create .bat files for each of the scripts
bat_file = file(bat,'w')
bat_file.write("@%s %s %%*" % (python, py))
bat_file.close()
# Now move onto setting the Start Menu up
ipybase = suffix(pjoin(scripts, 'ipython'))
if have_setuptools:
# let setuptools take care of the scripts:
ipybase = ipybase + '-script.py'
workdir = "%HOMEDRIVE%%HOMEPATH%"
link = pjoin(ip_start_menu, 'IPython.lnk')
cmd = '"%s"' % ipybase
mkshortcut(python, 'IPython', link, cmd, workdir)
# Disable pysh Start item until the profile restores functionality
# Most of this code is in IPython/deathrow, and needs to be updated
# to 0.11 APIs
# link = pjoin(ip_start_menu, 'pysh.lnk')
# cmd = '"%s" profile=pysh --init' % ipybase
# mkshortcut(python, 'IPython (command prompt mode)', link, cmd, workdir)
link = pjoin(ip_start_menu, 'pylab.lnk')
cmd = '"%s" --pylab' % ipybase
mkshortcut(python, 'IPython (pylab mode)', link, cmd, workdir)
link = pjoin(ip_start_menu, 'ipcontroller.lnk')
cmdbase = suffix(pjoin(scripts, 'ipcontroller'))
if have_setuptools:
cmdbase += '-script.py'
cmd = '"%s"' % cmdbase
mkshortcut(python, 'IPython controller', link, cmd, workdir)
link = pjoin(ip_start_menu, 'ipengine.lnk')
cmdbase = suffix(pjoin(scripts, 'ipengine'))
if have_setuptools:
cmdbase += '-script.py'
cmd = '"%s"' % cmdbase
mkshortcut(python, 'IPython engine', link, cmd, workdir)
link = pjoin(ip_start_menu, 'ipythonqt.lnk')
cmdbase = suffix(pjoin(scripts, 'ipython'))
if have_setuptools:
cmdbase += '-script.py'
cmd = '"%s" qtconsole' % cmdbase
mkshortcut(pythonw, 'IPython Qt Console', link, cmd, workdir)
# FIXME: These below are commented out because we don't ship the html built
# docs anymore. We should make the shortcut to continue existing, but as a
# URL to the online the docs for the right version of IPython. The stable
# URLs have the pattern:
# http://ipython.org/ipython-doc/rel-X.Y.Z/html
# For IPython version X.Y.Z.
## # Create documentation shortcuts ...
## t = prefix + r'\share\doc\ipython\manual\index.html'
## f = ip_start_menu + r'\Manual in HTML.lnk'
## mkshortcut(t,'IPython Manual - HTML-Format',f)
def remove():
"""Routine to be run by the win32 installer with the -remove switch."""
pass
# main()
if len(sys.argv) > 1:
if sys.argv[1] == '-install':
try:
install()
except OSError:
print("Failed to create Start Menu items, try running installer as administrator.", file=sys.stderr)
elif sys.argv[1] == '-remove':
remove()
else:
print("Script was called with option %s" % sys.argv[1], file=sys.stderr)
| bsd-3-clause |
DamCB/tyssue | tyssue/utils/utils.py | 2 | 12124 | import warnings
import numpy as np
import logging
import pandas as pd
logger = logging.getLogger(name=__name__)
def _to_2d(df):
df_2d = to_nd(df, 2)
return df_2d
def _to_3d(df):
df_3d = to_nd(df, 3)
return df_3d
def to_nd(df, ndim):
"""
Give a new shape to an input data by duplicating its column.
Parameters
----------
df : input data that will be reshape
ndim : dimension of the new reshape data.
Returns
-------
df_nd : return array reshaped in ndim.
"""
df_nd = np.asarray(df).reshape((df.size, 1))
return df_nd
def combine_specs(*specs):
combined = {}
for spec in specs:
for key in spec:
if key in combined:
combined[key].update(spec[key])
else:
combined[key] = spec[key]
return combined
def spec_updater(specs, new):
"""
Add element to the new dictionary to the specs dictionary.
Update value if the key already exist.
Parameters
----------
specs: specification that will be modified
new: dictionary of new specification
"""
for key in new.keys():
if specs.get(key) is not None:
specs[key].update(new[key])
else:
specs[key] = new[key]
def set_data_columns(datasets, specs, reset=False):
"""Sets the columns of the dataframes in the datasets dictionnary to
the uniform values in the specs sub-dictionnaries.
Parameters
----------
datasets : dict of dataframes
specs : dict of dicts
reset : bool, default False
For each key in specs, the value is a dictionnary whose
keys are column names for the corresponding dataframe in
datasets. If there is no such column in the dataframe,
it is created. If the columns allready exists and reset is `True`,
the new value is used.
"""
for name, spec in specs.items():
if not len(spec):
continue
if "setting" in name:
continue
df = datasets.get(name)
if df is None:
warnings.warn(
f"There is no {name} dataset, so the {name}" " spec have no effect."
)
continue
for col, default in spec.items():
if col in df.columns and reset:
logger.warning(
"Reseting column %s of the %s" " dataset with new specs", col, name
)
if col not in df.columns or reset:
df[col] = default
def data_at_opposite(sheet, edge_data, free_value=None):
"""
Returns a pd.DataFrame with the values of the input edge_data
at the opposite edges. For free edges, optionaly replaces Nan values
with free_value
Parameters
----------
sheet: a :class:`Sheet` instance
edge_data: dataframe contain value of edge
Returns
-------
opposite: pandas series contain value of opposite edge
"""
if isinstance(edge_data, pd.Series):
opposite = pd.Series(
edge_data.reindex(sheet.edge_df["opposite"]).to_numpy(),
index=edge_data.index,
)
elif isinstance(edge_data, pd.DataFrame):
opposite = pd.DataFrame(
edge_data.reindex(sheet.edge_df["opposite"]).to_numpy(),
index=edge_data.index,
columns=edge_data.columns,
)
else:
opposite = pd.DataFrame(
np.asarray(edge_data).take(sheet.edge_df["opposite"].to_numpy(), axis=0),
index=sheet.edge_df.index,
)
if free_value is not None:
opposite = opposite.replace(np.nan, free_value)
return opposite
def get_sub_eptm(eptm, edges, copy=False):
"""
Define sub-epithelium corresponding to the edges.
Parameters
----------
eptm: a :class:`Epithelium` instance
edges: list of edges includes in the sub-epithelium
Returns
-------
sub_eptm: a :class:`Epithelium` instance
"""
from ..core.objects import Epithelium
datasets = {}
edge_df = eptm.edge_df.loc[edges]
if edge_df.empty:
warnings.warn("Sub epithelium appears to be empty")
return None
datasets["edge"] = edge_df
datasets["vert"] = eptm.vert_df.loc[set(edge_df["srce"])]
datasets["face"] = eptm.face_df.loc[set(edge_df["face"])]
if "cell" in eptm.datasets:
datasets["cell"] = eptm.cell_df.loc[set(edge_df["cell"])]
if copy:
for elem, df in datasets.items():
datasets[elem] = df.copy()
sub_eptm = Epithelium("sub", datasets, eptm.specs)
sub_eptm.datasets["edge"]["edge_o"] = edges
sub_eptm.datasets["edge"]["srce_o"] = edge_df["srce"]
sub_eptm.datasets["edge"]["trgt_o"] = edge_df["trgt"]
sub_eptm.datasets["edge"]["face_o"] = edge_df["face"]
if "cell" in eptm.datasets:
sub_eptm.datasets["edge"]["cell_o"] = edge_df["cell"]
sub_eptm.datasets["vert"]["srce_o"] = set(edge_df["srce"])
sub_eptm.datasets["face"]["face_o"] = set(edge_df["face"])
if "cell" in eptm.datasets:
sub_eptm.datasets["cell"]["cell_o"] = set(edge_df["cell"])
sub_eptm.reset_index()
sub_eptm.reset_topo()
return sub_eptm
def single_cell(eptm, cell, copy=False):
"""
Define epithelium instance for all element to a define cell.
Parameters
----------
eptm : a :class:`Epithelium` instance
cell : identifier of a cell
copy : bool, default `False`
Returns
-------
sub_etpm: class:'Epithelium' instance corresponding to the cell
"""
edges = eptm.edge_df[eptm.edge_df["cell"] == cell].index
return get_sub_eptm(eptm, edges, copy)
def scaled_unscaled(func, scale, eptm, geom, args=(), kwargs={}, coords=None):
"""Scales the epithelium by an homotetic factor `scale`, applies
the function `func`, and scales back to original size.
Parameters
----------
func: the function to apply to the scaled epithelium
scale: float, the scale to apply
eptm: a :class:`Epithelium` instance
geom: a :class:`Geometry` class
args: sequence, the arguments to pass to func
kwargs: dictionary, the keywords arguments
to pass to func
coords: the coordinates on which the scaling applies
If the execution of function fails, the scaling is still reverted
Returns
-------
res: the result of the function func
"""
if coords is None:
coords = eptm.coords
geom.scale(eptm, scale, coords)
geom.update_all(eptm)
try:
res = func(*args, **kwargs)
except:
raise
finally:
geom.scale(eptm, 1 / scale, coords)
geom.update_all(eptm)
return res
def modify_segments(eptm, modifiers):
"""Modifies the datasets of a segmented epithelium
according to the passed modifiers.
Parameters
----------
eptm : :class:`tyssue.Epithelium`
modifiers : nested dictionnary
Note
----
This functions assumes that the epithelium has a `segment_index`
method as implemented in the :class:`tyssue.Monolayer`.
Example
-------
>>> modifiers = {
>>> 'apical' : {
>>> 'edge': {'line_tension': 1.},
>>> 'face': {'prefered_area': 0.2},
>>> },
>>> 'basal' : {
>>> 'edge': {'line_tension': 3.},
>>> 'face': {'prefered_area': 0.1},
>>> }
>>> modify_segments(monolayer, modifiers)
>>> monolayer.ver_df.loc[monolayer.apical_edges,
>>> 'line_tension'].unique()[0] == 1.
True
"""
for segment, spec in modifiers.items():
for element, parameters in spec.items():
idx = eptm.segment_index(segment, element)
for param_name, param_value in parameters.items():
eptm.datasets[element].loc[idx, param_name] = param_value
def _compute_ar(df, coords):
u, v = coords
major = np.ptp(df[u].values)
minor = np.ptp(df[v].values)
if major < minor:
minor, major = major, minor
return 0 if minor == 0 else major / minor
def ar_calculation(sheet, coords=["x", "y"]):
"""Calculates the aspect ratio of each face of the sheet
Parameters
----------
eptm : a :class:`Sheet` object
coords : list of str, optional, default ['x', 'y']
the coordinates on which to compute the aspect ratio
Returns
-------
AR: pandas series of aspect ratio for all faces.
Note
----
As is the case in ImageJ, the returned aspect ratio is always higher than 1
"""
srce_pos = sheet.upcast_srce(sheet.vert_df[sheet.coords])
srce_pos["face"] = sheet.edge_df["face"]
return srce_pos.groupby("face").apply(_compute_ar, coords)
def get_next(eptm):
"""
Returns the indices of the next edge for each edge
"""
fs_indexed = (
eptm.edge_df[["face", "srce"]]
.reset_index()
.set_index(["face", "srce"], drop=False)
)
ft_index = pd.MultiIndex.from_frame(
eptm.edge_df[["face", "trgt"]], names=["face", "srce"]
)
next_ = fs_indexed.loc[ft_index, "edge"].values
return next_
## small utlity to swap apical and basal segments
def swap_apico_basal(organo):
"""Swap apical and basal segments of an organoid."""
for elem in ["vert", "face", "edge"]:
swaped = organo.datasets[elem]["segment"].copy()
swaped.loc[organo.segment_index("apical", elem)] = "basal"
swaped.loc[organo.segment_index("basal", elem)] = "apical"
organo.datasets[elem]["segment"] = swaped
def elem_centered_patch(eptm, elem_idx, neighbour_order, elem):
"""
Return subeptm centered on the element (cell or face) with index elem_idx
with neighbour_order neighbours around it.
Parameters
----------
eptm : a :class:`Epithelim` instance
index : int, id of the center element
neighbour_order: int,
neighbourhood 'degree' around the center element
Returns
-------
patch: an object with the same class as eptm
"""
if elem not in ("face", "cell"):
raise ValueError
elems = pd.Series(elem_idx).append(
eptm.get_neighborhood(elem_idx, neighbour_order, elem)[elem]
)
print(elems, elem)
edges = eptm.edge_df[eptm.edge_df[elem].isin(elems)].copy()
vertices = eptm.vert_df.loc[set(edges["srce"])].copy()
if elem == "cell":
faces = eptm.face_df.loc[set(edges["face"])].copy()
cells = eptm.cell_df.loc[elems].copy()
elif "cell" in edges.columns:
faces = eptm.face_df.loc[elems].copy()
cells = eptm.cell_df.loc[set(edges["cell"])].copy()
else:
faces = eptm.face_df.loc[elems].copy()
cells = None
pos = (
vertices[eptm.coords].values
- vertices[eptm.coords].mean(axis=0).values[None, :]
)
u, v, rotation = np.linalg.svd(pos, full_matrices=False)
vertices[eptm.coords] = np.dot(pos, rotation.T)
patch_dset = {"vert": vertices, "face": faces, "edge": edges}
if cells is not None:
patch_dset["cell"] = cells
patch = eptm.__class__("patch", patch_dset, eptm.specs)
patch.reset_index()
return patch
def face_centered_patch(sheet, face, neighbour_order):
"""
Return subsheet centered on face with a distance of
neighbour order around the face
Parameters
----------
sheet : a :class:`Sheet` object
face : int, id of the center face
neighbour_order: int, number of neighbour around the center face
Returns
-------
patch: an object of the same class as the input object
"""
return elem_centered_patch(sheet, face, neighbour_order, "face")
def cell_centered_patch(eptm, cell, neighbour_order):
"""
Return subsheet centered on cell with a distance of
neighbour order around the cell
Parameters
----------
eptm : a :class:`Epithelium` instance
face : int, id of the center face
neighbour_order: int, number of neighbour around the center face
Returns
-------
patch: an object of the same class as the input object
"""
return elem_centered_patch(eptm, cell, neighbour_order, "cell")
| gpl-3.0 |
MediaSapiens/autonormix | django/contrib/gis/tests/distapp/tests.py | 35 | 20147 | import os, unittest
from decimal import Decimal
from django.db import connection
from django.db.models import Q
from django.contrib.gis.gdal import DataSource
from django.contrib.gis.geos import GEOSGeometry, Point, LineString
from django.contrib.gis.measure import D # alias for Distance
from django.contrib.gis.tests.utils import oracle, postgis, spatialite, no_oracle, no_spatialite
from models import AustraliaCity, Interstate, SouthTexasInterstate, \
SouthTexasCity, SouthTexasCityFt, CensusZipcode, SouthTexasZipcode
from data import au_cities, interstates, stx_interstates, stx_cities, stx_zips
class DistanceTest(unittest.TestCase):
# A point we are testing distances with -- using a WGS84
# coordinate that'll be implicitly transormed to that to
# the coordinate system of the field, EPSG:32140 (Texas South Central
# w/units in meters)
stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)
# Another one for Australia
au_pnt = GEOSGeometry('POINT (150.791 -34.4919)', 4326)
def get_names(self, qs):
cities = [c.name for c in qs]
cities.sort()
return cities
def test01_init(self):
"Initialization of distance models."
# Loading up the cities.
def load_cities(city_model, data_tup):
for name, x, y in data_tup:
city_model(name=name, point=Point(x, y, srid=4326)).save()
def load_interstates(imodel, data_tup):
for name, wkt in data_tup:
imodel(name=name, path=wkt).save()
load_cities(SouthTexasCity, stx_cities)
load_cities(SouthTexasCityFt, stx_cities)
load_cities(AustraliaCity, au_cities)
self.assertEqual(9, SouthTexasCity.objects.count())
self.assertEqual(9, SouthTexasCityFt.objects.count())
self.assertEqual(11, AustraliaCity.objects.count())
# Loading up the South Texas Zip Codes.
for name, wkt in stx_zips:
poly = GEOSGeometry(wkt, srid=4269)
SouthTexasZipcode(name=name, poly=poly).save()
CensusZipcode(name=name, poly=poly).save()
self.assertEqual(4, SouthTexasZipcode.objects.count())
self.assertEqual(4, CensusZipcode.objects.count())
# Loading up the Interstates.
load_interstates(Interstate, interstates)
load_interstates(SouthTexasInterstate, stx_interstates)
self.assertEqual(1, Interstate.objects.count())
self.assertEqual(1, SouthTexasInterstate.objects.count())
@no_spatialite
def test02_dwithin(self):
"Testing the `dwithin` lookup type."
# Distances -- all should be equal (except for the
# degree/meter pair in au_cities, that's somewhat
# approximate).
tx_dists = [(7000, 22965.83), D(km=7), D(mi=4.349)]
au_dists = [(0.5, 32000), D(km=32), D(mi=19.884)]
# Expected cities for Australia and Texas.
tx_cities = ['Downtown Houston', 'Southside Place']
au_cities = ['Mittagong', 'Shellharbour', 'Thirroul', 'Wollongong']
# Performing distance queries on two projected coordinate systems one
# with units in meters and the other in units of U.S. survey feet.
for dist in tx_dists:
if isinstance(dist, tuple): dist1, dist2 = dist
else: dist1 = dist2 = dist
qs1 = SouthTexasCity.objects.filter(point__dwithin=(self.stx_pnt, dist1))
qs2 = SouthTexasCityFt.objects.filter(point__dwithin=(self.stx_pnt, dist2))
for qs in qs1, qs2:
self.assertEqual(tx_cities, self.get_names(qs))
# Now performing the `dwithin` queries on a geodetic coordinate system.
for dist in au_dists:
if isinstance(dist, D) and not oracle: type_error = True
else: type_error = False
if isinstance(dist, tuple):
if oracle: dist = dist[1]
else: dist = dist[0]
# Creating the query set.
qs = AustraliaCity.objects.order_by('name')
if type_error:
# A ValueError should be raised on PostGIS when trying to pass
# Distance objects into a DWithin query using a geodetic field.
self.assertRaises(ValueError, AustraliaCity.objects.filter(point__dwithin=(self.au_pnt, dist)).count)
else:
self.assertEqual(au_cities, self.get_names(qs.filter(point__dwithin=(self.au_pnt, dist))))
def test03a_distance_method(self):
"Testing the `distance` GeoQuerySet method on projected coordinate systems."
# The point for La Grange, TX
lagrange = GEOSGeometry('POINT(-96.876369 29.905320)', 4326)
# Reference distances in feet and in meters. Got these values from
# using the provided raw SQL statements.
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 32140)) FROM distapp_southtexascity;
m_distances = [147075.069813, 139630.198056, 140888.552826,
138809.684197, 158309.246259, 212183.594374,
70870.188967, 165337.758878, 139196.085105]
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 2278)) FROM distapp_southtexascityft;
# Oracle 11 thinks this is not a projected coordinate system, so it's s
# not tested.
ft_distances = [482528.79154625, 458103.408123001, 462231.860397575,
455411.438904354, 519386.252102563, 696139.009211594,
232513.278304279, 542445.630586414, 456679.155883207]
# Testing using different variations of parameters and using models
# with different projected coordinate systems.
dist1 = SouthTexasCity.objects.distance(lagrange, field_name='point')
dist2 = SouthTexasCity.objects.distance(lagrange) # Using GEOSGeometry parameter
if spatialite or oracle:
dist_qs = [dist1, dist2]
else:
dist3 = SouthTexasCityFt.objects.distance(lagrange.ewkt) # Using EWKT string parameter.
dist4 = SouthTexasCityFt.objects.distance(lagrange)
dist_qs = [dist1, dist2, dist3, dist4]
# Original query done on PostGIS, have to adjust AlmostEqual tolerance
# for Oracle.
if oracle: tol = 2
else: tol = 5
# Ensuring expected distances are returned for each distance queryset.
for qs in dist_qs:
for i, c in enumerate(qs):
self.assertAlmostEqual(m_distances[i], c.distance.m, tol)
self.assertAlmostEqual(ft_distances[i], c.distance.survey_ft, tol)
@no_spatialite
def test03b_distance_method(self):
"Testing the `distance` GeoQuerySet method on geodetic coordnate systems."
if oracle: tol = 2
else: tol = 5
# Testing geodetic distance calculation with a non-point geometry
# (a LineString of Wollongong and Shellharbour coords).
ls = LineString( ( (150.902, -34.4245), (150.87, -34.5789) ) )
if oracle or connection.ops.geography:
# Reference query:
# SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326)) FROM distapp_australiacity ORDER BY name;
distances = [1120954.92533513, 140575.720018241, 640396.662906304,
60580.9693849269, 972807.955955075, 568451.8357838,
40435.4335201384, 0, 68272.3896586844, 12375.0643697706, 0]
qs = AustraliaCity.objects.distance(ls).order_by('name')
for city, distance in zip(qs, distances):
# Testing equivalence to within a meter.
self.assertAlmostEqual(distance, city.distance.m, 0)
else:
# PostGIS 1.4 and below is limited to disance queries only
# to/from point geometries, check for raising of ValueError.
self.assertRaises(ValueError, AustraliaCity.objects.distance, ls)
self.assertRaises(ValueError, AustraliaCity.objects.distance, ls.wkt)
# Got the reference distances using the raw SQL statements:
# SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326), 'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
# SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326)) FROM distapp_australiacity WHERE (NOT (id = 11)); st_distance_sphere
if connection.ops.postgis and connection.ops.proj_version_tuple() >= (4, 7, 0):
# PROJ.4 versions 4.7+ have updated datums, and thus different
# distance values.
spheroid_distances = [60504.0628957201, 77023.9489850262, 49154.8867574404,
90847.4358768573, 217402.811919332, 709599.234564757,
640011.483550888, 7772.00667991925, 1047861.78619339,
1165126.55236034]
sphere_distances = [60580.9693849267, 77144.0435286473, 49199.4415344719,
90804.7533823494, 217713.384600405, 709134.127242793,
639828.157159169, 7786.82949717788, 1049204.06569028,
1162623.7238134]
else:
spheroid_distances = [60504.0628825298, 77023.948962654, 49154.8867507115,
90847.435881812, 217402.811862568, 709599.234619957,
640011.483583758, 7772.00667666425, 1047861.7859506,
1165126.55237647]
sphere_distances = [60580.7612632291, 77143.7785056615, 49199.2725132184,
90804.4414289463, 217712.63666124, 709131.691061906,
639825.959074112, 7786.80274606706, 1049200.46122281,
1162619.7297006]
# Testing with spheroid distances first.
hillsdale = AustraliaCity.objects.get(name='Hillsdale')
qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(hillsdale.point, spheroid=True)
for i, c in enumerate(qs):
self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
if postgis:
# PostGIS uses sphere-only distances by default, testing these as well.
qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(hillsdale.point)
for i, c in enumerate(qs):
self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
@no_oracle # Oracle already handles geographic distance calculation.
def test03c_distance_method(self):
"Testing the `distance` GeoQuerySet method used with `transform` on a geographic field."
# Normally you can't compute distances from a geometry field
# that is not a PointField (on PostGIS 1.4 and below).
if not connection.ops.geography:
self.assertRaises(ValueError, CensusZipcode.objects.distance, self.stx_pnt)
# We'll be using a Polygon (created by buffering the centroid
# of 77005 to 100m) -- which aren't allowed in geographic distance
# queries normally, however our field has been transformed to
# a non-geographic system.
z = SouthTexasZipcode.objects.get(name='77005')
# Reference query:
# SELECT ST_Distance(ST_Transform("distapp_censuszipcode"."poly", 32140), ST_GeomFromText('<buffer_wkt>', 32140)) FROM "distapp_censuszipcode";
dists_m = [3553.30384972258, 1243.18391525602, 2186.15439472242]
# Having our buffer in the SRID of the transformation and of the field
# -- should get the same results. The first buffer has no need for
# transformation SQL because it is the same SRID as what was given
# to `transform()`. The second buffer will need to be transformed,
# however.
buf1 = z.poly.centroid.buffer(100)
buf2 = buf1.transform(4269, clone=True)
ref_zips = ['77002', '77025', '77401']
for buf in [buf1, buf2]:
qs = CensusZipcode.objects.exclude(name='77005').transform(32140).distance(buf)
self.assertEqual(ref_zips, self.get_names(qs))
for i, z in enumerate(qs):
self.assertAlmostEqual(z.distance.m, dists_m[i], 5)
def test04_distance_lookups(self):
"Testing the `distance_lt`, `distance_gt`, `distance_lte`, and `distance_gte` lookup types."
# Retrieving the cities within a 20km 'donut' w/a 7km radius 'hole'
# (thus, Houston and Southside place will be excluded as tested in
# the `test02_dwithin` above).
qs1 = SouthTexasCity.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(point__distance_lte=(self.stx_pnt, D(km=20)))
# Can't determine the units on SpatiaLite from PROJ.4 string, and
# Oracle 11 incorrectly thinks it is not projected.
if spatialite or oracle:
dist_qs = (qs1,)
else:
qs2 = SouthTexasCityFt.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(point__distance_lte=(self.stx_pnt, D(km=20)))
dist_qs = (qs1, qs2)
for qs in dist_qs:
cities = self.get_names(qs)
self.assertEqual(cities, ['Bellaire', 'Pearland', 'West University Place'])
# Doing a distance query using Polygons instead of a Point.
z = SouthTexasZipcode.objects.get(name='77005')
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=275)))
self.assertEqual(['77025', '77401'], self.get_names(qs))
# If we add a little more distance 77002 should be included.
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=300)))
self.assertEqual(['77002', '77025', '77401'], self.get_names(qs))
def test05_geodetic_distance_lookups(self):
"Testing distance lookups on geodetic coordinate systems."
# Line is from Canberra to Sydney. Query is for all other cities within
# a 100km of that line (which should exclude only Hobart & Adelaide).
line = GEOSGeometry('LINESTRING(144.9630 -37.8143,151.2607 -33.8870)', 4326)
dist_qs = AustraliaCity.objects.filter(point__distance_lte=(line, D(km=100)))
if oracle or connection.ops.geography:
# Oracle and PostGIS 1.5 can do distance lookups on arbitrary geometries.
self.assertEqual(9, dist_qs.count())
self.assertEqual(['Batemans Bay', 'Canberra', 'Hillsdale',
'Melbourne', 'Mittagong', 'Shellharbour',
'Sydney', 'Thirroul', 'Wollongong'],
self.get_names(dist_qs))
else:
# PostGIS 1.4 and below only allows geodetic distance queries (utilizing
# ST_Distance_Sphere/ST_Distance_Spheroid) from Points to PointFields
# on geometry columns.
self.assertRaises(ValueError, dist_qs.count)
# Ensured that a ValueError was raised, none of the rest of the test is
# support on this backend, so bail now.
if spatialite: return
# Too many params (4 in this case) should raise a ValueError.
self.assertRaises(ValueError, len,
AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)', D(km=100), 'spheroid', '4')))
# Not enough params should raise a ValueError.
self.assertRaises(ValueError, len,
AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)',)))
# Getting all cities w/in 550 miles of Hobart.
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.exclude(name='Hobart').filter(point__distance_lte=(hobart.point, D(mi=550)))
cities = self.get_names(qs)
self.assertEqual(cities, ['Batemans Bay', 'Canberra', 'Melbourne'])
# Cities that are either really close or really far from Wollongong --
# and using different units of distance.
wollongong = AustraliaCity.objects.get(name='Wollongong')
d1, d2 = D(yd=19500), D(nm=400) # Yards (~17km) & Nautical miles.
# Normal geodetic distance lookup (uses `distance_sphere` on PostGIS.
gq1 = Q(point__distance_lte=(wollongong.point, d1))
gq2 = Q(point__distance_gte=(wollongong.point, d2))
qs1 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq1 | gq2)
# Geodetic distance lookup but telling GeoDjango to use `distance_spheroid`
# instead (we should get the same results b/c accuracy variance won't matter
# in this test case).
if postgis:
gq3 = Q(point__distance_lte=(wollongong.point, d1, 'spheroid'))
gq4 = Q(point__distance_gte=(wollongong.point, d2, 'spheroid'))
qs2 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq3 | gq4)
querysets = [qs1, qs2]
else:
querysets = [qs1]
for qs in querysets:
cities = self.get_names(qs)
self.assertEqual(cities, ['Adelaide', 'Hobart', 'Shellharbour', 'Thirroul'])
def test06_area(self):
"Testing the `area` GeoQuerySet method."
# Reference queries:
# SELECT ST_Area(poly) FROM distapp_southtexaszipcode;
area_sq_m = [5437908.90234375, 10183031.4389648, 11254471.0073242, 9881708.91772461]
# Tolerance has to be lower for Oracle and differences
# with GEOS 3.0.0RC4
tol = 2
for i, z in enumerate(SouthTexasZipcode.objects.area()):
self.assertAlmostEqual(area_sq_m[i], z.area.sq_m, tol)
def test07_length(self):
"Testing the `length` GeoQuerySet method."
# Reference query (should use `length_spheroid`).
# SELECT ST_length_spheroid(ST_GeomFromText('<wkt>', 4326) 'SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]]');
len_m1 = 473504.769553813
len_m2 = 4617.668
if spatialite:
# Does not support geodetic coordinate systems.
self.assertRaises(ValueError, Interstate.objects.length)
else:
qs = Interstate.objects.length()
if oracle: tol = 2
else: tol = 5
self.assertAlmostEqual(len_m1, qs[0].length.m, tol)
# Now doing length on a projected coordinate system.
i10 = SouthTexasInterstate.objects.length().get(name='I-10')
self.assertAlmostEqual(len_m2, i10.length.m, 2)
@no_spatialite
def test08_perimeter(self):
"Testing the `perimeter` GeoQuerySet method."
# Reference query:
# SELECT ST_Perimeter(distapp_southtexaszipcode.poly) FROM distapp_southtexaszipcode;
perim_m = [18404.3550889361, 15627.2108551001, 20632.5588368978, 17094.5996143697]
if oracle: tol = 2
else: tol = 7
for i, z in enumerate(SouthTexasZipcode.objects.perimeter()):
self.assertAlmostEqual(perim_m[i], z.perimeter.m, tol)
# Running on points; should return 0.
for i, c in enumerate(SouthTexasCity.objects.perimeter(model_att='perim')):
self.assertEqual(0, c.perim.m)
def test09_measurement_null_fields(self):
"Testing the measurement GeoQuerySet methods on fields with NULL values."
# Creating SouthTexasZipcode w/NULL value.
SouthTexasZipcode.objects.create(name='78212')
# Performing distance/area queries against the NULL PolygonField,
# and ensuring the result of the operations is None.
htown = SouthTexasCity.objects.get(name='Downtown Houston')
z = SouthTexasZipcode.objects.distance(htown.point).area().get(name='78212')
self.assertEqual(None, z.distance)
self.assertEqual(None, z.area)
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(DistanceTest))
return s
| bsd-3-clause |
raulshepherd/ardupilot-solo | Tools/scripts/magfit_flashlog.py | 278 | 4744 | #!/usr/bin/env python
''' fit best estimate of magnetometer offsets from ArduCopter flashlog
using the algorithm from Bill Premerlani
'''
import sys, time, os, math
# command line option handling
from optparse import OptionParser
parser = OptionParser("magfit_flashlog.py [options]")
parser.add_option("--verbose", action='store_true', default=False, help="verbose offset output")
parser.add_option("--gain", type='float', default=0.01, help="algorithm gain")
parser.add_option("--noise", type='float', default=0, help="noise to add")
parser.add_option("--max-change", type='float', default=10, help="max step change")
parser.add_option("--min-diff", type='float', default=50, help="min mag vector delta")
parser.add_option("--history", type='int', default=20, help="how many points to keep")
parser.add_option("--repeat", type='int', default=1, help="number of repeats through the data")
(opts, args) = parser.parse_args()
from rotmat import Vector3, Matrix3
if len(args) < 1:
print("Usage: magfit_flashlog.py [options] <LOGFILE...>")
sys.exit(1)
def noise():
'''a noise vector'''
from random import gauss
v = Vector3(gauss(0, 1), gauss(0, 1), gauss(0, 1))
v.normalize()
return v * opts.noise
def find_offsets(data, ofs):
'''find mag offsets by applying Bills "offsets revisited" algorithm
on the data
This is an implementation of the algorithm from:
http://gentlenav.googlecode.com/files/MagnetometerOffsetNullingRevisited.pdf
'''
# a limit on the maximum change in each step
max_change = opts.max_change
# the gain factor for the algorithm
gain = opts.gain
data2 = []
for d in data:
d = d.copy() + noise()
d.x = float(int(d.x + 0.5))
d.y = float(int(d.y + 0.5))
d.z = float(int(d.z + 0.5))
data2.append(d)
data = data2
history_idx = 0
mag_history = data[0:opts.history]
for i in range(opts.history, len(data)):
B1 = mag_history[history_idx] + ofs
B2 = data[i] + ofs
diff = B2 - B1
diff_length = diff.length()
if diff_length <= opts.min_diff:
# the mag vector hasn't changed enough - we don't get any
# information from this
history_idx = (history_idx+1) % opts.history
continue
mag_history[history_idx] = data[i]
history_idx = (history_idx+1) % opts.history
# equation 6 of Bills paper
delta = diff * (gain * (B2.length() - B1.length()) / diff_length)
# limit the change from any one reading. This is to prevent
# single crazy readings from throwing off the offsets for a long
# time
delta_length = delta.length()
if max_change != 0 and delta_length > max_change:
delta *= max_change / delta_length
# set the new offsets
ofs = ofs - delta
if opts.verbose:
print ofs
return ofs
def plot_corrected_field(filename, data, offsets):
f = open(filename, mode='w')
for d in data:
corrected = d + offsets
f.write("%.1f\n" % corrected.length())
f.close()
def magfit(logfile):
'''find best magnetometer offset fit to a log file'''
print("Processing log %s" % filename)
# open the log file
flog = open(filename, mode='r')
data = []
data_no_motors = []
mag = None
offsets = None
# now gather all the data
for line in flog:
if not line.startswith('COMPASS,'):
continue
line = line.rstrip()
line = line.replace(' ', '')
a = line.split(',')
ofs = Vector3(float(a[4]), float(a[5]), float(a[6]))
if offsets is None:
initial_offsets = ofs
offsets = ofs
motor_ofs = Vector3(float(a[7]), float(a[8]), float(a[9]))
mag = Vector3(float(a[1]), float(a[2]), float(a[3]))
mag = mag - offsets
data.append(mag)
data_no_motors.append(mag - motor_ofs)
print("Extracted %u data points" % len(data))
print("Current offsets: %s" % initial_offsets)
# run the fitting algorithm
ofs = initial_offsets
for r in range(opts.repeat):
ofs = find_offsets(data, ofs)
plot_corrected_field('plot.dat', data, ofs)
plot_corrected_field('initial.dat', data, initial_offsets)
plot_corrected_field('zero.dat', data, Vector3(0,0,0))
plot_corrected_field('hand.dat', data, Vector3(-25,-8,-2))
plot_corrected_field('zero-no-motors.dat', data_no_motors, Vector3(0,0,0))
print('Loop %u offsets %s' % (r, ofs))
sys.stdout.flush()
print("New offsets: %s" % ofs)
total = 0.0
for filename in args:
magfit(filename)
| gpl-3.0 |
rven/odoo | addons/payment_ingenico/controllers/main.py | 3 | 5278 | # -*- coding: utf-8 -*-
import logging
import pprint
import werkzeug
from werkzeug.urls import url_unquote_plus
from odoo import http
from odoo.http import request
from odoo.addons.payment.models.payment_acquirer import ValidationError
from odoo.addons.payment.controllers.portal import PaymentProcessing
_logger = logging.getLogger(__name__)
class OgoneController(http.Controller):
_accept_url = '/payment/ogone/test/accept'
_decline_url = '/payment/ogone/test/decline'
_exception_url = '/payment/ogone/test/exception'
_cancel_url = '/payment/ogone/test/cancel'
@http.route([
'/payment/ogone/accept', '/payment/ogone/test/accept',
'/payment/ogone/decline', '/payment/ogone/test/decline',
'/payment/ogone/exception', '/payment/ogone/test/exception',
'/payment/ogone/cancel', '/payment/ogone/test/cancel',
], type='http', auth='public', csrf=False)
def ogone_form_feedback(self, **post):
""" Handle both redirection from Ingenico (GET) and s2s notification (POST/GET) """
_logger.info('Ogone: entering form_feedback with post data %s', pprint.pformat(post)) # debug
request.env['payment.transaction'].sudo().form_feedback(post, 'ogone')
return werkzeug.utils.redirect("/payment/process")
@http.route(['/payment/ogone/s2s/create_json'], type='json', auth='public', csrf=False)
def ogone_s2s_create_json(self, **kwargs):
if not kwargs.get('partner_id'):
kwargs = dict(kwargs, partner_id=request.env.user.partner_id.id)
new_id = request.env['payment.acquirer'].browse(int(kwargs.get('acquirer_id'))).s2s_process(kwargs)
return new_id.id
@http.route(['/payment/ogone/s2s/create_json_3ds'], type='json', auth='public', csrf=False)
def ogone_s2s_create_json_3ds(self, verify_validity=False, **kwargs):
if not kwargs.get('partner_id'):
kwargs = dict(kwargs, partner_id=request.env.user.partner_id.id)
token = False
error = None
try:
token = request.env['payment.acquirer'].browse(int(kwargs.get('acquirer_id'))).s2s_process(kwargs)
except Exception as e:
error = str(e)
if not token:
res = {
'result': False,
'error': error,
}
return res
res = {
'result': True,
'id': token.id,
'short_name': token.short_name,
'3d_secure': False,
'verified': False,
}
if verify_validity != False:
baseurl = request.env['ir.config_parameter'].sudo().get_param('web.base.url')
params = {
'accept_url': baseurl + '/payment/ogone/validate/accept',
'decline_url': baseurl + '/payment/ogone/validate/decline',
'exception_url': baseurl + '/payment/ogone/validate/exception',
'return_url': kwargs.get('return_url', baseurl)
}
tx = token.validate(**params)
res['verified'] = token.verified
if tx and tx.html_3ds:
res['3d_secure'] = tx.html_3ds
return res
@http.route(['/payment/ogone/s2s/create'], type='http', auth='public', methods=["POST"], csrf=False)
def ogone_s2s_create(self, **post):
error = ''
acq = request.env['payment.acquirer'].browse(int(post.get('acquirer_id')))
try:
token = acq.s2s_process(post)
except Exception as e:
# synthax error: 'CHECK ERROR: |Not a valid date\n\n50001111: None'
token = False
error = str(e).splitlines()[0].split('|')[-1] or ''
if token and post.get('verify_validity'):
baseurl = request.env['ir.config_parameter'].sudo().get_param('web.base.url')
params = {
'accept_url': baseurl + '/payment/ogone/validate/accept',
'decline_url': baseurl + '/payment/ogone/validate/decline',
'exception_url': baseurl + '/payment/ogone/validate/exception',
'return_url': post.get('return_url', baseurl)
}
tx = token.validate(**params)
if tx and tx.html_3ds:
return tx.html_3ds
# add the payment transaction into the session to let the page /payment/process to handle it
PaymentProcessing.add_payment_transaction(tx)
return werkzeug.utils.redirect("/payment/process")
@http.route([
'/payment/ogone/validate/accept',
'/payment/ogone/validate/decline',
'/payment/ogone/validate/exception',
], type='http', auth='public')
def ogone_validation_form_feedback(self, **post):
""" Feedback from 3d secure for a bank card validation """
request.env['payment.transaction'].sudo().form_feedback(post, 'ogone')
return werkzeug.utils.redirect("/payment/process")
@http.route(['/payment/ogone/s2s/feedback'], auth='public', csrf=False)
def feedback(self, **kwargs):
try:
tx = request.env['payment.transaction'].sudo()._ogone_form_get_tx_from_data(kwargs)
tx._ogone_s2s_validate_tree(kwargs)
except ValidationError:
return 'ko'
return 'ok'
| agpl-3.0 |
ravindrapanda/tensorflow | tensorflow/contrib/distributions/python/kernel_tests/bijectors/affine_linear_operator_test.py | 26 | 4018 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""AffineLinearOperator Tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib import linalg
from tensorflow.contrib.distributions.python.ops.bijectors.affine_linear_operator import AffineLinearOperator
from tensorflow.python.platform import test
class AffineLinearOperatorTest(test.TestCase):
def testIdentity(self):
with self.test_session():
affine = AffineLinearOperator(
validate_args=True)
x = np.array([[1, 0, -1], [2, 3, 4]], dtype=np.float32)
y = x
ildj = 0.
self.assertEqual(affine.name, "affine_linear_operator")
self.assertAllClose(y, affine.forward(x).eval())
self.assertAllClose(x, affine.inverse(y).eval())
self.assertAllClose(ildj, affine.inverse_log_det_jacobian(y).eval())
self.assertAllClose(-affine.inverse_log_det_jacobian(y).eval(),
affine.forward_log_det_jacobian(x).eval())
def testDiag(self):
with self.test_session():
shift = np.array([-1, 0, 1], dtype=np.float32)
diag = np.array([[1, 2, 3],
[2, 5, 6]], dtype=np.float32)
scale = linalg.LinearOperatorDiag(diag, is_non_singular=True)
affine = AffineLinearOperator(
shift=shift, scale=scale, validate_args=True)
x = np.array([[1, 0, -1], [2, 3, 4]], dtype=np.float32)
y = diag * x + shift
ildj = -np.sum(np.log(np.abs(diag)), axis=-1)
self.assertEqual(affine.name, "affine_linear_operator")
self.assertAllClose(y, affine.forward(x).eval())
self.assertAllClose(x, affine.inverse(y).eval())
self.assertAllClose(ildj, affine.inverse_log_det_jacobian(y).eval())
self.assertAllClose(-affine.inverse_log_det_jacobian(y).eval(),
affine.forward_log_det_jacobian(x).eval())
def testTriL(self):
with self.test_session():
shift = np.array([-1, 0, 1], dtype=np.float32)
tril = np.array([[[1, 0, 0],
[2, -1, 0],
[3, 2, 1]],
[[2, 0, 0],
[3, -2, 0],
[4, 3, 2]]],
dtype=np.float32)
scale = linalg.LinearOperatorLowerTriangular(tril, is_non_singular=True)
affine = AffineLinearOperator(
shift=shift, scale=scale, validate_args=True)
x = np.array([[[1, 0, -1],
[2, 3, 4]],
[[4, 1, -7],
[6, 9, 8]]],
dtype=np.float32)
# If we made the bijector do x*A+b then this would be simplified to:
# y = np.matmul(x, tril) + shift.
y = np.squeeze(np.matmul(tril, np.expand_dims(x, -1)), -1) + shift
ildj = -np.sum(np.log(np.abs(np.diagonal(
tril, axis1=-2, axis2=-1))),
axis=-1)
self.assertEqual(affine.name, "affine_linear_operator")
self.assertAllClose(y, affine.forward(x).eval())
self.assertAllClose(x, affine.inverse(y).eval())
self.assertAllClose(ildj, affine.inverse_log_det_jacobian(y).eval())
self.assertAllClose(-affine.inverse_log_det_jacobian(y).eval(),
affine.forward_log_det_jacobian(x).eval())
if __name__ == "__main__":
test.main()
| apache-2.0 |
patwat/python-unitex | unitex/tools.py | 1 | 44038 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# NOTE: The documentation is somehow a copy/paste from the Unitex
# manual.
from __future__ import unicode_literals
import logging
import _unitex
from unitex import *
from unitex.config import CheckDicOptions,\
CompressOptions,\
ConcordOptions,\
DicoOptions,\
ExtractOptions,\
Fst2TxtOptions,\
Grf2Fst2Options,\
LocateOptions,\
NormalizeOptions,\
SortTxtOptions,\
TokenizeOptions,\
Txt2TFstOptions
from unitex.io import exists
_LOGGER = logging.getLogger(__name__)
def check_dic(dictionary, dtype, alphabet, **kwargs):
"""
This function checks the format of <dela> and produces a file named
CHECK_DIC.TXT that contains check result informations. This file is
stored in the <dela> directory.
*Arguments:*
- **dictionary [str]** -- the dictionary file path.
- **dtype [str]** -- the dictionary type:
- UnitexConstants.DELAF (inflected);
- UnitexConstants.DELAS (non inflected).
- **alphabet [str]** -- the alphabet file path.
*Keyword arguments:*
- **strict [bool]** -- strict syntax checking against unprotected
dot and comma (default: False).
- **no_space_warning [bool]** -- tolerates spaces in grammatical,
semantic and inflectional codes (default: True).
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = CheckDicOptions()
options.load(kwargs)
if exists(dictionary) is False:
raise UnitexException("[CHECKDIC] Dictionary file '%s' doesn't exists" % dictionary)
command = ["UnitexTool", "CheckDic"]
if dtype == UnitexConstants.DELAF:
command.append("--delaf")
elif dtype == UnitexConstants.DELAS:
command.append("--delas")
if options["strict"] is True:
command.append("--strict")
if options["no_space_warning"] is True:
command.append("--no_space_warning")
command .append("--alphabet=%s" % alphabet)
command.append(dictionary)
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Checking dic '%s'" % dictionary)
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
def compress(dictionary, **kwargs):
"""
This function takes a DELAF dictionary as a parameter and compresses
it. The compression of a dictionary dico.dic produces two files:
- dico.bin: a binary file containing the minimum automaton of
the inflected forms of the dictionary;
- dico.inf: a text file containing the compressed forms required
for the reconstruction of the dictionary lines from the inflected
forms contained in the automaton.
*Arguments:*
- **dictionary [str]** -- the dictionary file path.
*Keyword arguments:*
- **output [str]** -- sets the output file. By default, a file
xxx.dic will produce a file xxx.bin.
- **flip [bool]** -- indicates that the inflected and canonical
forms should be swapped in the compressed dictionary. This option
is used to construct an inverse dictionary which is necessary for
the program 'Reconstrucao' (default: False).
- **semitic [bool]** -- indicates that the semitic compression
algorithm should be used. Setting this option with semitic
languages like Arabic significantly reduces the size of the output
dictionary (default: False).
- **version [str]** -- Possible values are:
- UnitexConstants.DICTIONARY_VERSION_1: produces an old style .bin
- UnitexConstants.DICTIONARY_VERSION_2: produces a new style .bin
file, with no file size limitation to 16 Mb and a smaller size
(default).
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = CompressOptions()
options.load(kwargs)
if exists(dictionary) is False:
raise UnitexException("[COMPRESS] Dictionary file '%s' doesn't exists" % dictionary)
command = ["UnitexTool", "Compress"]
if options["output"] is not None:
command.append("--output=%s" % options["output"])
if options["flip"] is True:
command.append("--flip")
if options["semitic"] is True:
command.append("--semitic")
if options["version"] == UnitexConstants.DICTIONARY_VERSION_1:
command.append("--v1")
elif options["version"] == UnitexConstants.DICTIONARY_VERSION_2:
command.append("--v2")
command.append(dictionary)
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Compressing dic '%s'" % dictionary)
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
def concord(index, alphabet, **kwargs):
"""
This function takes a concordance index file produced by the
function 'locate' and produces a concordance. It is also possible to
produce a modified text version taking into account the transducer
outputs associated to the occurrences.
The result of the application of this function is a file called
concord.txt if the concordance was constructed in text mode, a file
called concord.html if 'output_mode' is UnitexConstants.FORMAT_HTML,
UnitexConstants.FORMAT_GLOSSANET' or UnitexConstants.FORMAT_SCRIPT,
and a text file with the name defined by the user of the function if
the function has constructed a modified version of the text.
In html mode, the occurrence is coded as a hypertext link. The
reference associated to this link is of the form <a href="X Y Z">.
X et Y represent the beginning and ending positions of the
occurrence in characters in the file text_name.snt. Z represents the
number of the sentence in which the occurrence was found.
*Arguments:*
- **index [str]** -- the index file path (produced by the 'locate'
function).
- **alphabet [str]** -- alphabet file used for sorting.
*Keyword arguments:*
- *Generic options:*
- **font [str]** -- the name of the font to use if the output is
an HTML file.
- **fontsize [int]** -- the font size to use if the output is an
HTML file.
- **only_ambiguous [bool]** -- Only displays identical occurrences
with ambiguous outputs, in text order (default: False).
- **only_matches [bool]** -- this option will force empty right
and left contexts. Moreover, if used with
UnitexConstants.FORMAT_TEXT, the function will not surround
matches with tabulations (default: False).
- **left [str]** -- number of characters on the left of the
occurrences (default=0). In Thai mode, this means the number of
non-diacritic characters.
- **right [str]** -- number of characters (non-diacritic ones in
Thai mode) on the right of the occurrences (default=0). If the
occurrence is shorter than this value, the concordance line is
completed up to right. If the occurrence is longer than the
length defined by right, it is nevertheless saved as whole.
**NOTE:** For both 'left' and 'right', you can add the 's'
character to stop at the first {S} tag. For instance, if you set
'40s' for the left value, the left context will end at 40
characters at most, less if the {S} tag is found before.
- *Sort options:*
- **sort [str]** -- specifies the sort order. Possible values:
- UnitexConstants.SORT_TEXT_ORDER: order in which the
occurrences appear in the text (default);
- UnitexConstants.SORT_LEFT_CENTER: left context for primary
sort, then occurrence for secondary sort;
- UnitexConstants.SORT_LEFT_RIGHT: left context, then right
context;
- UnitexConstants.SORT_CENTER_LEFT: occurrence, then left
context;
- UnitexConstants.SORT_CENTER_RIGHT: occurrence, then right
context;
- UnitexConstants.SORT_RIGHT_LEFT: right context, then left
context;
- UnitexConstants.SORT_RIGHT_CENTER: left context, then
occurrence.
- *Output options:*
- **format [str]** -- specifies the output fomat. Possible values:
- UnitexConstants.FORMAT_HTML: produces a concordance in HTML
format encoded in UTF-8 (default);
- UnitexConstants.FORMAT_TEXT: produces a concordance in Unicode
text format;
- UnitexConstants.FORMAT_GLOSSANET: produces a concordance for
GlossaNet in HTML format where occurrences are links described
by the 'script' argument (cf. Unitex manual p. 268). The HTML
file is encoded in UTF-8;
- UnitexConstants.FORMAT_SCRIPT: produces a HTML concordance
file where occurrences are links described by the 'script'
argument;
- UnitexConstants.FORMAT_INDEX: produces an index of the
concordance, made of the content of the occurrences (with the
grammar outputs, if any), preceded by the positions of the
occurrences in the text file given in characters;
- UnitexConstants.FORMAT_UIMA: produces an index of the
concordance relative to the original text file, before any
Unitex operation. The 'offsets' argument must be provided;
- UnitexConstants.FORMAT_PRLG: produces a concordance for PRLG
corpora where each line is prefixed by information extracted
with Unxmlize’s 'prlg' option. You must provide both the
'offsets' and the 'unxmlize' argument;
- UnitexConstants.FORMAT_XML: produces an xml index of the
concordance;
- UnitexConstants.FORMAT_XML_WITH_HEADER: produces an xml index
of the concordance with full xml header;
- UnitexConstants.FORMAT_AXIS: quite the same as 'index', but
the numbers represent the median character of each occurrence;
- UnitexConstants.FORMAT_XALIGN: another index file, used by the
text alignment module. Each line is made of 3 integers X Y Z
followed by the content of the occurrence. X is the sentence
number, starting from 1. Y and Z are the starting and ending
positions of the occurrence in the sentence, given in
characters;
- UnitexConstants.FORMAT_MERGE: indicates to the function that
it is supposed to produce a modified version of the text and
save it in a file. The filename must be provided with the
'output' argument.
- **script [str]** -- string describing the links format for
'glossanet' and 'script' output. For instance, if you use
'http://www.google.com/search?q=', you will obtain a HTML
concordance file where occurrences are hyperlinks to Google
queries.
- **offsets [str]** -- the file produced by Tokenize’s
output_offsets option (needed by the 'uima' and the 'prlg'
format).
- **unxmlize [str]** -- file produced by Unxmlize’s 'prlg' option
(needed by the 'prlg' format).
- **output [str]** -- the output filename (needed by the 'merge'
format).
- *Other options:*
- **directory [str]** -- indicates to the function that it must
not work in the same directory than <index> but in
'directory'.
- **thai [bool]** -- option to use for Thai concordances
(default: False).
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = ConcordOptions()
options.load(kwargs)
if exists(index) is False:
raise UnitexException("[CONCORD] Index file '%s' doesn't exists" % index)
if exists(alphabet) is False:
raise UnitexException("[CONCORD] Alphabet file '%s' doesn't exists" % alphabet)
command = ["UnitexTool", "Concord"]
if options["font"] is not None:
command.append("--font=%s" % options["font"])
if options["fontsize"] is not None:
command.append("--fontsize=%s" % options["fontsize"])
if options["only_ambiguous"] is True:
command.append("--only_ambiguous")
if options["only_matches"] is True:
command.append("--only_matches")
command.append("--left=%s" % options["left"])
command.append("--right=%s" % options["right"])
if options["sort"] == UnitexConstants.SORT_TEXT_ORDER:
command.append("--TO")
elif options["sort"] == UnitexConstants.SORT_LEFT_CENTER:
command.append("--LC")
elif options["sort"] == UnitexConstants.SORT_LEFT_RIGHT:
command.append("--LR")
elif options["sort"] == UnitexConstants.SORT_CENTER_LEFT:
command.append("--CL")
elif options["sort"] == UnitexConstants.SORT_CENTER_RIGHT:
command.append("--CR")
elif options["sort"] == UnitexConstants.SORT_RIGHT_LEFT:
command.append("--RL")
elif options["sort"] == UnitexConstants.SORT_RIGHT_CENTER:
command.append("--RC")
if options["format"] == UnitexConstants.FORMAT_HTML:
command.append("--html")
elif options["format"] == UnitexConstants.FORMAT_TEXT:
command.append("--text")
elif options["format"] == UnitexConstants.FORMAT_GLOSSANET:
command.append("--glossanet=%s" % options["script"])
elif options["format"] == UnitexConstants.FORMAT_SCRIPT:
command.append("--script=%s" % options["script"])
elif options["format"] == UnitexConstants.FORMAT_INDEX:
command.append("--index")
elif options["format"] == UnitexConstants.FORMAT_UIMA:
command.append("--uima=%s" % options["offsets"])
elif options["format"] == UnitexConstants.FORMAT_PRLG:
command.append("--PRLG=%s,%s" % options["unxmlize"], options["offsets"])
elif options["format"] == UnitexConstants.FORMAT_XML:
command.append("--xml")
elif options["format"] == UnitexConstants.FORMAT_XML_WITH_HEADERS:
command.append("--xml-with-header")
elif options["format"] == UnitexConstants.FORMAT_AXIS:
command.append("--axis")
elif options["format"] == UnitexConstants.FORMAT_XALIGN:
command.append("--xalign")
elif options["format"] == UnitexConstants.FORMAT_MERGE:
command.append("--merge=%s" % options["output"])
if options["directory"] is not None:
command.append("--directory=%s" % options["directory"])
command.append("--alphabet=%s" % alphabet)
if options["thai"] is not True:
command.append("--thai")
command.append(index)
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Create concordance for '%s'" % index)
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
def dico(dictionaries, text, alphabet, **kwargs):
"""
This function applies dictionaries to a text. The text must have
been cut up into lexical units by the 'tokenize' function.
The function 'dico' produces the following files, and saves them in
the directory of the text:
- dlf: dictionary of simple words in the text;
- dlc: dictionary of compound words in the text;
- err: list of unknown words in the text;
- tags_err: unrecognized simple words that are not matched by the
tags.ind file;
- tags.ind: sequences to be inserted in the text automaton (see
section 3.8.3, page 69);
- stat_dic.n: file containing the number of simple words, the number
of compound words, and the number of unknown words in the text.
**NOTE:** Files dlf, dlc, err and tags_err are not sorted. Use the
function 'sort_txt' to sort them.
*Arguments:*
- **dictionaries [list(str)]** -- list of dictionary pathes ('bin'
or 'fst2' formats).
- **text [str]** -- text (snt format) file path.
- **alphabet [str]** -- alphabet file path.
*Keyword arguments:*
- **morpho [list(str)]** -- this optional argument indicates which
morphological mode dictionaries are to be used, if needed by some
.fst2 dictionaries. The argument is a list of dictionary path (bin
format).
- **korean [bool]** -- specify the dictionary is in korean
(default: False).
- **semitic [bool]** -- specify the dictionary is in a semitic
language (default: False).
- **arabic_rules [str]** -- specifies the Arabic typographic rule
configuration file path.
- **raw [str]** -- alternative output file path containing both
simple and compound words, without requiring a text directory.
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = DicoOptions()
options.load(kwargs)
for dictionary in dictionaries:
if exists(dictionary) is False:
raise UnitexException("[DICO] Dictionary file '%s' doesn't exists" % dictionary)
if exists(text) is False:
raise UnitexException("[DICO] Text file '%s' doesn't exists" % text)
if exists(alphabet) is False:
raise UnitexException("[DICO] Alphabet file '%s' doesn't exists" % alphabet)
command = ["UnitexTool", "Dico"]
command.append("--text=%s" % text)
command.append("--alphabet=%s" % alphabet)
if options["morpho"] is not None:
command.append("--morpho=%s" % ",".join(options["morpho"]))
if options["korean"] is True:
command.append("--korean")
if options["semitic"] is True:
command.append("--semitic")
if options["arabic_rules"] is not None:
command.append("--arabic_rules=%s" % options["arabic_rules"])
if options["raw"] is not None:
command.append("--raw=%s" % raw)
command += dictionaries
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Applying dictionaries")
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
def extract(text, output, index, **kwargs):
"""
This function extracts from the given text all sentences that
contain at least one occurrence from the concordance. The parameter
<text> represents the complete path of the text file, without
omitting the extension .snt.
*Arguments:*
- **text [str]** -- the text file (.snt format).
- **output [str]** -- the output text file.
- **index [str]** -- the index file path (produced by the 'locate'
function).
*Keyword arguments:*
- **non_matching_sentences [bool]** -- extracts all sentences that
don’t contain matching units (default: False).
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = ExtractOptions()
options.load(kwargs)
if exists(text) is False:
raise UnitexException("[EXTRACT] Text file '%s' doesn't exists" % text)
if exists(index) is False:
raise UnitexException("[EXTRACT] Index file '%s' doesn't exists" % index)
command = ["UnitexTool", "Extract"]
if options["non_matching_sentences"] is False:
command.append("--yes")
else:
command.append("--no")
command.append("--output=%s" % output)
command.append("--index=%s" % index)
command.append(text)
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Extracting sentences")
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
def fst2txt(grammar, text, alphabet, **kwargs):
"""
This function applies a transducer to a text in longest match mode
at the preprocessing stage, when the text has not been cut into
lexical units yet. This function modifies the input text file.
**NOTE:** This function modifies the input text file.
*Arguments:*
- **grammar [str]** -- the fst2 to apply on the text.
- **text [str]** -- the (.snt) text file to be modified.
- **alphabet [str]** -- the alphabet file of the language of the
text.
*Keyword arguments:*
- **start_on_space [bool]** -- this parameter indicates that the
search will start at any position in the text, even before a
space. This parameter should only be used to carry out
morphological searches (default: False).
- **char_by_char [bool]** -- works in character by character
tokenization mode. This is useful for languages like Thai
(default: False).
- **merge [bool]** -- merge (instead of replace) transducer outputs
with text inputs (default: True).
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = Fst2TxtOptions()
options.load(kwargs)
if exists(grammar) is False:
raise UnitexException("[FST2TXT] Grammar file '%s' doesn't exists" % grammar)
if exists(text) is False:
raise UnitexException("[FST2TXT] Text file '%s' doesn't exists" % text)
if exists(alphabet) is False:
raise UnitexException("[FST2TXT] Alphabet file '%s' doesn't exists" % alphabet)
command = ["UnitexTool", "Fst2Txt"]
command.append("--text=%s" % text)
command.append("--alphabet=%s" % alphabet)
if options["start_on_space"] is False:
command.append("--dont_start_on_space")
else:
command.append("--start_on_space")
if options["char_by_char"] is False:
command.append("--word_by_word")
else:
command.append("--char_by_char")
if options["merge"] is True:
command.append("--merge")
else:
command.append("--replace")
command.append(grammar)
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Applying grammar '%s'..." % grammar)
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
def grf2fst2(grammar, alphabet, **kwargs):
"""
This function compiles a grammar into a .fst2 file (for more details
see section 6.2). The parameter <grf> denotes the complete path of
the main graph of the grammar, without omitting the extension .grf.
The result is a file with the same name as the graph passed to the
function as a parameter, but with extension .fst2. This file is
saved in the same directory as <grf>.
*Arguments:*
- **grammar [str]** -- the grf to compile.
- **alphabet [str]** -- specifies the alphabet file to be used for
tokenizing the content of the grammar boxes into lexical units.
*Keyword arguments:*
- **loop_check [bool]** -- enables error (loop) checking
(default: False).
- **char_by_char [bool]** -- tokenization will be done character by
character. If neither -c nor -a option is used, lexical units will
be sequences of any Unicode letters (default: False).
- **pkgdir [str]** -- specifies the repository directory to use (see
section 5.2.2, page 99).
- **no_empty_graph_warning [bool]** -- no warning will be emitted
when a graph matches the empty word. This option is used by
MultiFlex in order not to scare users with meaningless error
messages when they design an inflection grammar that matches the
empty word (default: False).
- **tfst_check [bool]** -- checks wether the given graph can be
considered as a valid sentence automaton or not (default: False).
- **silent_grf_name [bool]** -- does not print the graph names
(default: True).
- **named_repositories [list(str)]** -- declaration of named
repositories. This argument is made of one or more X=Y sequences,
separated by ‘;’, where X is the name of the repository denoted by
pathname Y.
- **debug [bool]** -- compile graphs in debug mode (default: False).
- **check_variables [bool]** -- check output validity to avoid
malformed variable expressions (default: True).
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = Grf2Fst2Options()
options.load(kwargs)
if exists(grammar) is False:
raise UnitexException("[GRF2FST2] Grammar file '%s' doesn't exists" % grammar)
if exists(alphabet) is False:
raise UnitexException("[GRF2FST2] Alphabet file '%s' doesn't exists" % alphabet)
command = ["UnitexTool", "Grf2Fst2"]
if options["loop_check"] is False:
command.append("--no_loop_check")
else:
command.append("--loop_check")
command.append("--alphabet=%s" % alphabet)
if options["char_by_char"] is True:
command.append("--char_by_char")
if options["pkgdir"] is not None:
command.append("--pkgdir=%s" % options["pkgdir"])
if options["no_empty_graph_warning"] is True:
command.append("--no_empty_graph_warning")
if options["tfst_check"] is True:
command.append("--tfst_check")
if options["silent_grf_name"] is True:
command.append("--silent_grf_name")
if options["named_repositories"] is not None:
command.append("--named_repositories=%s" % ";".join(options["named_repositories"]))
if options["debug"] is True:
command.append("--debug")
if options["check_variables"] is True:
command.append("--check_variables")
command.append(grammar)
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Compiling grammar '%s'..." % grammar)
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
def locate(grammar, text, alphabet, **kwargs):
"""
This function applies a grammar to a text and constructs an index of
the occurrences found.
This function saves the references to the found occurrences in a
file called concord.ind. The number of occurrences, the number of
units belonging to those occurrences, as well as the percentage of
recognized units within the text are saved in a file called
concord.n. These two files are stored in the directory of the text.
*Arguments:*
- **grammar [str]** -- the fst2 to apply on the text.
- **text [str]** -- the text file, with extension .snt.
- **alphabet [str]** -- the alphabet file of the language of the
text.
*Keyword arguments:*
- *Generic options*:
- **start_on_space [bool]** -- this parameter indicates that the
search will start at any position in the text, even before a
space. This parameter should only be used to carry out
morphological searches (default: False).
- **char_by_char [bool]** -- works in character by character
tokenization mode. This is useful for languages like Thai
(default: False).
- **morpho [list(str)]** -- this optional argument indicates which
morphological mode dictionaries are to be used, if needed by
some .fst2 dictionaries. The argument is a list of dictionary
path (bin format).
- **korean [bool]** -- specify the dictionary is in korean
(default: False).
- **arabic_rules [str]** -- specifies the Arabic typographic rule
configuration file path.
- **sntdir [str]** -- puts produced files in 'sntdir' instead of
the text directory. Note that 'sntdir' must end with a file
separator (\ or /).
- **negation_operator [str]** -- specifies the negation operator
to be used in Locate patterns. The two legal values for X are
minus and tilde (default). Using minus provides backward
compatibility with previous versions of Unitex.
- *Search limit options:*
- **number_of_matches [int]** -- stops after the first N matches
(default: all matches).
- *Maximum iterations per token options:*
- **stop_token_count [list(int_1, int_2)]** -- emits a warning
after 'int_1' iterations on a token and stops after 'int_2'
iterations.
- *Matching mode options:*
- **match_mode [str]** -- Possible values are:
- UnitexConstants.MATCH_MODE_SHORTEST
- UnitexConstants.MATCH_MODE_LONGEST (default)
- UnitexConstants.MATCH_MODE_ALL
- Output options:
- **output_mode [str]** -- Possible values are:
- UnitexConstants.OUTPUT_MODE_IGNORE (default)
- UnitexConstants.OUTPUT_MODE_MERGE
- UnitexConstants.OUTPUT_MODE_REPLACE
- **protect_dic_chars [bool]** -- when 'merge' or 'replace' mode
is used, this option protects some input characters with a
backslash. This is useful when Locate is called by 'dico' in
order to avoid producing bad lines like: 3,14,.PI.NUM
(default: True).
- **variable [list(str_1, str_2)]** -- sets an output variable
named str_1 with content str_2. Note that str_2 must be ASCII.
- *Ambiguous output options:*
- **ambiguous_outputs [bool]** -- allows the production of several
matches with same input but different outputs. If False, in case
of ambiguous outputs, one will be arbitrarily chosen and kept,
depending on the internal state of the function (default: True).
- **variable_error [str]** -- Possible values are:
- UnitexConstants.ON_ERROR_EXIT
- UnitexConstants.ON_ERROR_IGNORE (default)
- UnitexConstants.ON_ERROR_BACKTRACK
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = LocateOptions()
options.load(kwargs)
if exists(grammar) is False:
raise UnitexException("[LOCATE] Grammar file '%s' doesn't exists" % grammar)
if exists(text) is False:
raise UnitexException("[LOCATE] Text file '%s' doesn't exists" % text)
if exists(alphabet) is False:
raise UnitexException("[LOCATE] Alphabet file '%s' doesn't exists" % alphabet)
command = ["UnitexTool", "Locate"]
command.append("--text=%s" % text)
command.append("--alphabet=%s" % alphabet)
if options["morpho"] is not None:
command.append("--morpho=%s" % ",".join(options["morpho"]))
if options["start_on_space"] is False:
command.append("--dont_start_on_space")
else:
command.append("--start_on_space")
if options["char_by_char"] is False:
command.append("--word_by_word")
else:
command.append("--char_by_char")
if options["sntdir"] is not None:
command.append("--sntdir=%s" % options["sntdir"])
if options["korean"] is True:
command.append("--korean")
if options["arabic_rules"] is not None:
command.append("--arabic_rules=%s" % options["arabic_rules"])
if options["negation_operator"] is not None:
command.append("--negation_operator=%s" % options["negation_operator"])
if options["number_of_matches"] is None:
command.append("--all")
else:
command.append("--number_of_matches=%s" % options["number_of_matches"])
if options["stop_token_count"] is not None:
if options["stop_token_count[0]"] is None:
command.append("--stop_token_count=%s" % stop_token_count[1])
else:
command.append("--stop_token_count=%s,%s" % (stop_token_count[0], stop_token_count[1]))
if options["match_mode"] == UnitexConstants.MATCH_MODE_LONGEST:
command.append("--longest_matches")
elif options["match_mode"] == UnitexConstants.MATCH_MODE_SHORTEST:
command.append("--shortest_matches")
elif options["match_mode"] == UnitexConstants.MATCH_MODE_ALL:
command.append("--all_matches")
if options["output_mode"] == UnitexConstants.OUTPUT_MODE_IGNORE:
command.append("--ignore")
elif options["output_mode"] == UnitexConstants.OUTPUT_MODE_MERGE:
command.append("--merge")
elif options["output_mode"] == UnitexConstants.OUTPUT_MODE_REPLACE:
command.append("--replace")
if options["protect_dic_chars"] is True:
command.append("--protect_dic_chars")
if options["variable"] is not None:
command.append("--variable=%s=%s" % (options["variable"][0], options["variable"][1]))
if options["ambiguous_outputs"] is True:
command.append("--ambiguous_outputs")
else:
command.append("--no_ambiguous_outputs")
if options["variable_error"] == UnitexConstants.ON_ERROR_IGNORE:
command.append("--ignore_variable_error")
elif options["variable_error"] == UnitexConstants.ON_ERROR_EXIT:
command.append("--exit_on_variable_error")
elif options["variable_error"] == UnitexConstants.ON_ERROR_BACKTRACK:
command.append("--backtrack_on_variable_error")
command.append(grammar)
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Locating pattern '%s'..." % grammar)
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
def normalize(text, **kwargs):
"""
This function carries out a normalization of text separators. The
separators are space, tab, and newline. Every sequence of separators
that contains at least one newline is replaced by a unique newline.
All other sequences of separators are replaced by a single space.
This function also checks the syntax of lexical tags found in the
text. All sequences in curly brackets should be either the sentence
delimiter {S}, the stop marker {STOP}, or valid entries in the DELAF
format ({aujourd’hui,.ADV}).
**NOTE:** the function creates a modified version of the text that is
saved in a file with extension .snt.
**WARNING:** if you specify a normalization rule file, its rules
will be applied prior to anything else. So, you have to be very
careful if you manipulate separators in such rules.
*Arguments:*
- **text [str]** -- the text file to normalize.
*Keyword arguments:*
- ** no_carriage_return [bool]** -- every separator sequence will be
turned into a single space (default: False).
- **input_offsets [str]** -- base offset file to be used.
- **output_offsets [str]** -- offset file to be produced.
- **replacement_rules [str]** -- specifies the normalization rule
file to be used. See section 14.13.6 for details about the format
of this file. By default, the function only replaces { and } by
[ and ].
- **no_separator_normalization [bool]** -- only applies replacement
rules specified with the 'replacement_rules' option
(default: False).
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = NormalizeOptions()
options.load(kwargs)
if exists(text) is False:
raise UnitexException("[NORMALIZE] Text file '%s' doesn't exists" % text)
command = ["UnitexTool", "Normalize"]
if options["no_carriage_return"] is True:
command.append("--no_carriage_return")
if options["input_offsets"] is not None:
command.append("--input_offsets=%s" % options["input_offsets"])
if options["output_offsets"] is not None:
command.append("--output_offsets=%s" % options["output_offsets"])
if options["replacement_rules"] is not None:
command.append("--replacement_rules=%s" % options["replacement_rules"])
if options["no_separator_normalization"] is True:
command.append("--no_separator_normalization")
command.append(text)
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Normalizing text '%s'..." % text)
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
def sort_txt(text, **kwargs):
"""
This function carries out a lexicographical sorting of the lines of
file <txt>. <txt> represents the complete path of the file to be
sorted.
The input text file is modified. By default, the sorting is
performed in the order of Unicode characters, removing duplicate
lines.
*Arguments:*
- **text [str]** -- the text file to sort.
*Keyword arguments:*
- **duplicates [bool]** -- keep duplicate lines (default: False).
- **reverse [bool]** -- sort in descending order (default: False).
- **sort_order [str]** -- sorts using the alphabet order defined in
this file. If this parameter is missing, the sorting is done
according to the order of Unicode characters.
- **line_info [str]** -- backup the number of lines of the result
file in this file.
- **thai [bool]** -- option for sorting Thai text (default: False).
- **factorize_inflectional_codes [bool]** -- makes two entries
X,Y.Z:A and X,Y.Z:B become a single entry X,Y.Z:A:B
(default: False).
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = SortTxtOptions()
options.load(kwargs)
if exists(text) is False:
raise UnitexException("[SORTTXT] Text file '%s' doesn't exists" % text)
command = ["UnitexTool", "SortTxt"]
if options["duplicates"] is False:
command.append("--no_duplicates")
else:
command.append("--duplicates")
if options["reverse"] is True:
command.append("--reverse")
if options["sort_order"] is None:
command.append("--sort_order=%s" % options["sort_order"])
if options["line_info"] is None:
command.append("--line_info=%s" % options["line_info"])
if options["thai"] is True:
command.append("--thai")
if options["factorize_inflectional_codes"] is True:
command.append("--factorize_inflectional_codes")
command.append(text)
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Sorting file '%s'..." % text)
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
def tokenize(text, alphabet, **kwargs):
"""
This function tokenizes a tet text into lexical units. <txt> the
complete path of the text file, without omitting the .snt extension.
The function codes each unit as a whole. The list of units is saved
in a text file called tokens.txt. The sequence of codes representing
the units now allows the coding of the text. This sequence is saved
in a binary file named text.cod. The function also produces the
following four files:
- tok_by_freq.txt: text file containing the units sorted by
frequency.
- tok_by_alph.txt: text file containing the units sorted
alphabetically.
- stats.n: text file containing information on the number of
sentence separators, the number of units, the number of simple
words and the number of numbers.
- enter.pos: binary file containing the list of newline positions in
the text. The coded representation of the text does not contain
newlines, but spaces. Since a newline counts as two characters and
a space as a single one, it is necessary to know where newlines
occur in the text when the positions of occurrences located by the
'locate' function are to be synchronized with the text file. File
enter.pos is used for this by the 'concord' function. Thanks to
this, when clicking on an occurrence in a concordance, it is
correctly selected in the text. File enter.pos is a binary file
containing the list of the positions of newlines in the text.
All produced files are saved in the text directory
*Arguments:*
- **text [str]** -- the text file to tokenize (.snt format).
- **alphabet [str]** -- the alphabet file.
*Keyword arguments:*
- *Generic options:*
- **char_by_char [bool]** -- indicates whether the function is
applied character by character, with the exceptions of the
sentence delimiter {S}, the stop marker {STOP} and lexical
tags like {today,.ADV} which are considered to be single units
(default: False).
- **tokens [str]** -- specifies a tokens.txt file to load and
modify, instead of creating a new one from scratch.
- *Offsets options:*
- **input_offsets [str]** -- base offset file to be used.
- **output_offsets [str]** -- offset file to be produced.
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = TokenizeOptions()
options.load(kwargs)
if exists(text) is False:
raise UnitexException("[TOKENIZE] Text file '%s' doesn't exists" % text)
if exists(alphabet) is False:
raise UnitexException("[TOKENIZE] Alphabet file '%s' doesn't exists" % alphabet)
command = ["UnitexTool", "Tokenize"]
command.append("--alphabet=%s" % alphabet)
if options["char_by_char"] is True:
command.append("--char_by_char")
else:
command.append("--word_by_word")
if options["tokens"] is not None:
command.append("--tokens=%s" % options["tokens"])
if options["input_offsets"] is not None:
command.append("--input_offsets=%s" % options["input_offsets"])
if options["output_offsets"] is not None:
command.append("--output_offsets=%s" % options["output_offsets"])
command.append(text)
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Tokenizing file '%s'..." % text)
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
def txt2tfst(text, alphabet, **kwargs):
"""
This function constructs an automaton of a text.
If the text is separated into sentences, the function constructs an
automaton for each sentence. If this is not the case, the function
arbitrarily cuts the text into sequences of 2000 tokens and produces
an automaton for each of these sequences.
The result is a file called text.tfst which is saved in the
directory of the text. Another file named text.tind is also produced.
*Arguments:*
- **text [str]** -- the path to the text file in .snt format.
- alphabet [str]** -- the alphabet file.
*Keyword arguments:*
- **clean [bool]** -- indicates whether the rule of conservation of
the best paths (see section 7.2.4) should be applied
(default: False).
- **normalization_grammar [str]** -- name of a normalization grammar
that is to be applied to the text automaton.
- **tagset [str]** -- Elag tagset file to use to normalize
dictionary entries.
- **korean [bool]** -- tells the function that it works on Korean
(default: False).
*Return [bool]:*
**True** if it succeeds, **False** otherwise.
"""
options = Txt2TFstOptions()
options.load(kwargs)
if exists(text) is False:
raise UnitexException("[TXT2TFST] Text file '%s' doesn't exists" % text)
if exists(alphabet) is False:
raise UnitexException("[TXT2TFST] Alphabet file '%s' doesn't exists" % alphabet)
command = ["UnitexTool", "Txt2Tfst"]
command.append("--alphabet=%s" % alphabet)
if options["clean"] is not False:
command.append("--clean")
if options["normalization_grammar"] is not None:
command.append("--normalization_grammar=%s" % options["normalization_grammar"])
if options["tagset"] is not None:
command.append("--tagset=%s" % options["tagset"])
if options["korean"] is not False:
command.append("--korean")
command.append(text)
command.append("-qutf8-no-bom")
command = " ".join(command)
_LOGGER.info("Building text automaton for '%s'..." % text)
_LOGGER.debug("Command: %s", command)
ret = _unitex.unitex_tool(command)
return ret
| gpl-3.0 |
p0cisk/Quantum-GIS | python/ext-libs/requests/packages/urllib3/response.py | 152 | 18542 | from __future__ import absolute_import
from contextlib import contextmanager
import zlib
import io
from socket import timeout as SocketTimeout
from socket import error as SocketError
from ._collections import HTTPHeaderDict
from .exceptions import (
ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
)
from .packages.six import string_types as basestring, binary_type, PY3
from .packages.six.moves import http_client as httplib
from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed, is_response_to_head
class DeflateDecoder(object):
def __init__(self):
self._first_try = True
self._data = binary_type()
self._obj = zlib.decompressobj()
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
if not self._first_try:
return self._obj.decompress(data)
self._data += data
try:
return self._obj.decompress(data)
except zlib.error:
self._first_try = False
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self.decompress(self._data)
finally:
self._data = None
class GzipDecoder(object):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
return self._obj.decompress(data)
def _get_decoder(mode):
if mode == 'gzip':
return GzipDecoder()
return DeflateDecoder()
class HTTPResponse(io.IOBase):
"""
HTTP Response container.
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
loaded and decoded on-demand when the ``data`` property is accessed. This
class is also compatible with the Python standard library's :mod:`io`
module, and can hence be treated as a readable object in the context of that
framework.
Extra parameters for behaviour not present in httplib.HTTPResponse:
:param preload_content:
If True, the response's body will be preloaded during construction.
:param decode_content:
If True, attempts to decode specific content-encoding's based on headers
(like 'gzip' and 'deflate') will be skipped and raw data will be used
instead.
:param original_response:
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
object, it's convenient to include the original for debug purposes. It's
otherwise unused.
"""
CONTENT_DECODERS = ['gzip', 'deflate']
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
strict=0, preload_content=True, decode_content=True,
original_response=None, pool=None, connection=None):
if isinstance(headers, HTTPHeaderDict):
self.headers = headers
else:
self.headers = HTTPHeaderDict(headers)
self.status = status
self.version = version
self.reason = reason
self.strict = strict
self.decode_content = decode_content
self._decoder = None
self._body = None
self._fp = None
self._original_response = original_response
self._fp_bytes_read = 0
if body and isinstance(body, (basestring, binary_type)):
self._body = body
self._pool = pool
self._connection = connection
if hasattr(body, 'read'):
self._fp = body
# Are we using the chunked-style of transfer encoding?
self.chunked = False
self.chunk_left = None
tr_enc = self.headers.get('transfer-encoding', '').lower()
# Don't incur the penalty of creating a list and then discarding it
encodings = (enc.strip() for enc in tr_enc.split(","))
if "chunked" in encodings:
self.chunked = True
# If requested, preload the body.
if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
"""
Should we redirect and where to?
:returns: Truthy redirect location string if we got a redirect status
code and valid location. ``None`` if redirect status and no
location. ``False`` if not a redirect status code.
"""
if self.status in self.REDIRECT_STATUSES:
return self.headers.get('location')
return False
def release_conn(self):
if not self._pool or not self._connection:
return
self._pool._put_conn(self._connection)
self._connection = None
@property
def data(self):
# For backwords-compat with earlier urllib3 0.4 and earlier.
if self._body:
return self._body
if self._fp:
return self.read(cache_content=True)
def tell(self):
"""
Obtain the number of bytes pulled over the wire so far. May differ from
the amount of content returned by :meth:``HTTPResponse.read`` if bytes
are encoded on the wire (e.g, compressed).
"""
return self._fp_bytes_read
def _init_decoder(self):
"""
Set-up the _decoder attribute if necessar.
"""
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
content_encoding = self.headers.get('content-encoding', '').lower()
if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
def _decode(self, data, decode_content, flush_decoder):
"""
Decode the data passed in and potentially flush the decoder.
"""
try:
if decode_content and self._decoder:
data = self._decoder.decompress(data)
except (IOError, zlib.error) as e:
content_encoding = self.headers.get('content-encoding', '').lower()
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding, e)
if flush_decoder and decode_content:
data += self._flush_decoder()
return data
def _flush_decoder(self):
"""
Flushes the decoder. Should only be called if the decoder is actually
being used.
"""
if self._decoder:
buf = self._decoder.decompress(b'')
return buf + self._decoder.flush()
return b''
@contextmanager
def _error_catcher(self):
"""
Catch low-level python exceptions, instead re-raising urllib3
variants, so that low-level exceptions are not leaked in the
high-level api.
On exit, release the connection back to the pool.
"""
clean_exit = False
try:
try:
yield
except SocketTimeout:
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
# there is yet no clean way to get at it from this context.
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
if 'read operation timed out' not in str(e): # Defensive:
# This shouldn't happen but just in case we're missing an edge
# case, let's avoid swallowing SSL errors.
raise
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
except (HTTPException, SocketError) as e:
# This includes IncompleteRead.
raise ProtocolError('Connection broken: %r' % e, e)
# If no exception is thrown, we should avoid cleaning up
# unnecessarily.
clean_exit = True
finally:
# If we didn't terminate cleanly, we need to throw away our
# connection.
if not clean_exit:
# The response may not be closed but we're not going to use it
# anymore so close it now to ensure that the connection is
# released back to the pool.
if self._original_response:
self._original_response.close()
# Closing the response may not actually be sufficient to close
# everything, so if we have a hold of the connection close that
# too.
if self._connection:
self._connection.close()
# If we hold the original response but it's closed now, we should
# return the connection back to the pool.
if self._original_response and self._original_response.isclosed():
self.release_conn()
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param cache_content:
If True, will save the returned data such that the same result is
returned despite of the state of the underlying file object. This
is useful if you want the ``.data`` property to continue working
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
self._init_decoder()
if decode_content is None:
decode_content = self.decode_content
if self._fp is None:
return
flush_decoder = False
data = None
with self._error_catcher():
if amt is None:
# cStringIO doesn't like amt=None
data = self._fp.read()
flush_decoder = True
else:
cache_content = False
data = self._fp.read(amt)
if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
# Close the connection when no data is returned
#
# This is redundant to what httplib/http.client _should_
# already do. However, versions of python released before
# December 15, 2012 (http://bugs.python.org/issue16298) do
# not properly close the connection in all cases. There is
# no harm in redundantly calling close.
self._fp.close()
flush_decoder = True
if data:
self._fp_bytes_read += len(data)
data = self._decode(data, decode_content, flush_decoder)
if cache_content:
self._body = data
return data
def stream(self, amt=2**16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
``amt`` bytes have been read from the connection or until the
connection is closed.
:param amt:
How much of the content to read. The generator will return up to
much data per iteration, but may return less. This is particularly
likely when using compressed data. However, the empty string will
never be returned.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
if self.chunked:
for line in self.read_chunked(amt, decode_content=decode_content):
yield line
else:
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if data:
yield data
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``.
"""
headers = r.msg
if not isinstance(headers, HTTPHeaderDict):
if PY3: # Python 3
headers = HTTPHeaderDict(headers.items())
else: # Python 2
headers = HTTPHeaderDict.from_httplib(headers)
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0)
resp = ResponseCls(body=r,
headers=headers,
status=r.status,
version=r.version,
reason=r.reason,
strict=strict,
original_response=r,
**response_kw)
return resp
# Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self):
return self.headers
def getheader(self, name, default=None):
return self.headers.get(name, default)
# Overrides from io.IOBase
def close(self):
if not self.closed:
self._fp.close()
if self._connection:
self._connection.close()
@property
def closed(self):
if self._fp is None:
return True
elif hasattr(self._fp, 'closed'):
return self._fp.closed
elif hasattr(self._fp, 'isclosed'): # Python 2
return self._fp.isclosed()
else:
return True
def fileno(self):
if self._fp is None:
raise IOError("HTTPResponse has no file to get a fileno from")
elif hasattr(self._fp, "fileno"):
return self._fp.fileno()
else:
raise IOError("The file-like object this HTTPResponse is wrapped "
"around has no file descriptor")
def flush(self):
if self._fp is not None and hasattr(self._fp, 'flush'):
return self._fp.flush()
def readable(self):
# This method is required for `io` module compatibility.
return True
def readinto(self, b):
# This method is required for `io` module compatibility.
temp = self.read(len(b))
if len(temp) == 0:
return 0
else:
b[:len(temp)] = temp
return len(temp)
def _update_chunk_length(self):
# First, we'll figure out length of a chunk and then
# we'll try to read it from socket.
if self.chunk_left is not None:
return
line = self._fp.fp.readline()
line = line.split(b';', 1)[0]
try:
self.chunk_left = int(line, 16)
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
raise httplib.IncompleteRead(line)
def _handle_chunk(self, amt):
returned_chunk = None
if amt is None:
chunk = self._fp._safe_read(self.chunk_left)
returned_chunk = chunk
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
elif amt < self.chunk_left:
value = self._fp._safe_read(amt)
self.chunk_left = self.chunk_left - amt
returned_chunk = value
elif amt == self.chunk_left:
value = self._fp._safe_read(amt)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
returned_chunk = value
else: # amt > self.chunk_left
returned_chunk = self._fp._safe_read(self.chunk_left)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
return returned_chunk
def read_chunked(self, amt=None, decode_content=None):
"""
Similar to :meth:`HTTPResponse.read`, but with an additional
parameter: ``decode_content``.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
self._init_decoder()
# FIXME: Rewrite this method and make it a class with a better structured logic.
if not self.chunked:
raise ResponseNotChunked(
"Response is not chunked. "
"Header 'transfer-encoding: chunked' is missing.")
# Don't bother reading the body of a HEAD request.
if self._original_response and is_response_to_head(self._original_response):
self._original_response.close()
return
with self._error_catcher():
while True:
self._update_chunk_length()
if self.chunk_left == 0:
break
chunk = self._handle_chunk(amt)
decoded = self._decode(chunk, decode_content=decode_content,
flush_decoder=False)
if decoded:
yield decoded
if decode_content:
# On CPython and PyPy, we should never need to flush the
# decoder. However, on Jython we *might* need to, so
# lets defensively do it anyway.
decoded = self._flush_decoder()
if decoded: # Platform-specific: Jython.
yield decoded
# Chunk content ends with \r\n: discard it.
while True:
line = self._fp.fp.readline()
if not line:
# Some sites may not end with '\r\n'.
break
if line == b'\r\n':
break
# We read everything; close the "file".
if self._original_response:
self._original_response.close()
| gpl-2.0 |
mrkm4ntr/incubator-airflow | airflow/providers/google/cloud/operators/stackdriver.py | 6 | 42186 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Optional, Sequence, Union
from google.api_core.gapic_v1.method import DEFAULT
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.stackdriver import StackdriverHook
from airflow.utils.decorators import apply_defaults
class StackdriverListAlertPoliciesOperator(BaseOperator):
"""
Fetches all the Alert Policies identified by the filter passed as
filter parameter. The desired return type can be specified by the
format parameter, the supported formats are "dict", "json" and None
which returns python dictionary, stringified JSON and protobuf
respectively.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:StackdriverListAlertPoliciesOperator`
:param format_: (Optional) Desired output format of the result. The
supported formats are "dict", "json" and None which returns
python dictionary, stringified JSON and protobuf respectively.
:type format_: str
:param filter_: If provided, this field specifies the criteria that must be met by alert
policies to be included in the response.
For more details, see https://cloud.google.com/monitoring/api/v3/sorting-and-filtering.
:type filter_: str
:param order_by: A comma-separated list of fields by which to sort the result.
Supports the same set of field references as the ``filter`` field. Entries
can be prefixed with a minus sign to sort by the field in descending order.
For more details, see https://cloud.google.com/monitoring/api/v3/sorting-and-filtering.
:type order_by: str
:param page_size: The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
:type page_size: int
:param retry: A retry object used to retry requests. If ``None`` is
specified, requests will be retried using a default configuration.
:type retry: str
:param timeout: The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google
Cloud Platform.
:type gcp_conn_id: str
:param project_id: The project to fetch alerts from.
:type project_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = (
'filter_',
'impersonation_chain',
)
ui_color = "#e5ffcc"
# pylint: disable=too-many-arguments
@apply_defaults
def __init__(
self,
*,
format_: Optional[str] = None,
filter_: Optional[str] = None,
order_by: Optional[str] = None,
page_size: Optional[int] = None,
retry: Optional[str] = DEFAULT,
timeout: Optional[float] = DEFAULT,
metadata: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.format_ = format_
self.filter_ = filter_
self.order_by = order_by
self.page_size = page_size
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
self.hook = None
def execute(self, context):
self.log.info(
'List Alert Policies: Project id: %s Format: %s Filter: %s Order By: %s Page Size: %d',
self.project_id,
self.format_,
self.filter_,
self.order_by,
self.page_size,
)
if self.hook is None:
self.hook = StackdriverHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
return self.hook.list_alert_policies(
project_id=self.project_id,
format_=self.format_,
filter_=self.filter_,
order_by=self.order_by,
page_size=self.page_size,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
class StackdriverEnableAlertPoliciesOperator(BaseOperator):
"""
Enables one or more disabled alerting policies identified by filter
parameter. Inoperative in case the policy is already enabled.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:StackdriverEnableAlertPoliciesOperator`
:param filter_: If provided, this field specifies the criteria that
must be met by alert policies to be enabled.
For more details, see https://cloud.google.com/monitoring/api/v3/sorting-and-filtering.
:type filter_: str
:param retry: A retry object used to retry requests. If ``None`` is
specified, requests will be retried using a default configuration.
:type retry: str
:param timeout: The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google
Cloud Platform.
:type gcp_conn_id: str
:param project_id: The project in which alert needs to be enabled.
:type project_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
ui_color = "#e5ffcc"
template_fields = (
'filter_',
'impersonation_chain',
)
@apply_defaults
def __init__(
self,
*,
filter_: Optional[str] = None,
retry: Optional[str] = DEFAULT,
timeout: Optional[float] = DEFAULT,
metadata: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.filter_ = filter_
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.impersonation_chain = impersonation_chain
self.hook = None
def execute(self, context):
self.log.info('Enable Alert Policies: Project id: %s Filter: %s', self.project_id, self.filter_)
if self.hook is None:
self.hook = StackdriverHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.hook.enable_alert_policies(
filter_=self.filter_,
project_id=self.project_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
# Disable Alert Operator
class StackdriverDisableAlertPoliciesOperator(BaseOperator):
"""
Disables one or more enabled alerting policies identified by filter
parameter. Inoperative in case the policy is already disabled.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:StackdriverDisableAlertPoliciesOperator`
:param filter_: If provided, this field specifies the criteria that
must be met by alert policies to be disabled.
For more details, see https://cloud.google.com/monitoring/api/v3/sorting-and-filtering.
:type filter_: str
:param retry: A retry object used to retry requests. If ``None`` is
specified, requests will be retried using a default configuration.
:type retry: str
:param timeout: The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google
Cloud Platform.
:type gcp_conn_id: str
:param project_id: The project in which alert needs to be disabled.
:type project_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
ui_color = "#e5ffcc"
template_fields = (
'filter_',
'impersonation_chain',
)
@apply_defaults
def __init__(
self,
*,
filter_: Optional[str] = None,
retry: Optional[str] = DEFAULT,
timeout: Optional[float] = DEFAULT,
metadata: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.filter_ = filter_
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.impersonation_chain = impersonation_chain
self.hook = None
def execute(self, context):
self.log.info('Disable Alert Policies: Project id: %s Filter: %s', self.project_id, self.filter_)
if self.hook is None:
self.hook = StackdriverHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.hook.disable_alert_policies(
filter_=self.filter_,
project_id=self.project_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
class StackdriverUpsertAlertOperator(BaseOperator):
"""
Creates a new alert or updates an existing policy identified
the name field in the alerts parameter.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:StackdriverUpsertAlertOperator`
:param alerts: A JSON string or file that specifies all the alerts that needs
to be either created or updated. For more details, see
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.alertPolicies#AlertPolicy.
(templated)
:type alerts: str
:param retry: A retry object used to retry requests. If ``None`` is
specified, requests will be retried using a default configuration.
:type retry: str
:param timeout: The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google
Cloud Platform.
:type gcp_conn_id: str
:param project_id: The project in which alert needs to be created/updated.
:type project_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = (
'alerts',
'impersonation_chain',
)
template_ext = ('.json',)
ui_color = "#e5ffcc"
@apply_defaults
def __init__(
self,
*,
alerts: str,
retry: Optional[str] = DEFAULT,
timeout: Optional[float] = DEFAULT,
metadata: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.alerts = alerts
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
self.hook = None
def execute(self, context):
self.log.info('Upsert Alert Policies: Alerts: %s Project id: %s', self.alerts, self.project_id)
if self.hook is None:
self.hook = StackdriverHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.hook.upsert_alert(
alerts=self.alerts,
project_id=self.project_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
class StackdriverDeleteAlertOperator(BaseOperator):
"""
Deletes an alerting policy.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:StackdriverDeleteAlertOperator`
:param name: The alerting policy to delete. The format is:
``projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID]``.
:type name: str
:param retry: A retry object used to retry requests. If ``None`` is
specified, requests will be retried using a default configuration.
:type retry: str
:param timeout: The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google
Cloud Platform.
:type gcp_conn_id: str
:param project_id: The project from which alert needs to be deleted.
:type project_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = (
'name',
'impersonation_chain',
)
ui_color = "#e5ffcc"
@apply_defaults
def __init__(
self,
*,
name: str,
retry: Optional[str] = DEFAULT,
timeout: Optional[float] = DEFAULT,
metadata: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.name = name
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
self.hook = None
def execute(self, context):
self.log.info('Delete Alert Policy: Project id: %s Name: %s', self.project_id, self.name)
if self.hook is None:
self.hook = StackdriverHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.hook.delete_alert_policy(
name=self.name,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
class StackdriverListNotificationChannelsOperator(BaseOperator):
"""
Fetches all the Notification Channels identified by the filter passed as
filter parameter. The desired return type can be specified by the
format parameter, the supported formats are "dict", "json" and None
which returns python dictionary, stringified JSON and protobuf
respectively.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:StackdriverListNotificationChannelsOperator`
:param format_: (Optional) Desired output format of the result. The
supported formats are "dict", "json" and None which returns
python dictionary, stringified JSON and protobuf respectively.
:type format_: str
:param filter_: If provided, this field specifies the criteria that
must be met by notification channels to be included in the response.
For more details, see https://cloud.google.com/monitoring/api/v3/sorting-and-filtering.
:type filter_: str
:param order_by: A comma-separated list of fields by which to sort the result.
Supports the same set of field references as the ``filter`` field. Entries
can be prefixed with a minus sign to sort by the field in descending order.
For more details, see https://cloud.google.com/monitoring/api/v3/sorting-and-filtering.
:type order_by: str
:param page_size: The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
:type page_size: int
:param retry: A retry object used to retry requests. If ``None`` is
specified, requests will be retried using a default configuration.
:type retry: str
:param timeout: The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google
Cloud Platform.
:type gcp_conn_id: str
:param project_id: The project to fetch notification channels from.
:type project_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = (
'filter_',
'impersonation_chain',
)
ui_color = "#e5ffcc"
# pylint: disable=too-many-arguments
@apply_defaults
def __init__(
self,
*,
format_: Optional[str] = None,
filter_: Optional[str] = None,
order_by: Optional[str] = None,
page_size: Optional[int] = None,
retry: Optional[str] = DEFAULT,
timeout: Optional[float] = DEFAULT,
metadata: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.format_ = format_
self.filter_ = filter_
self.order_by = order_by
self.page_size = page_size
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
self.hook = None
def execute(self, context):
self.log.info(
'List Notification Channels: Project id: %s Format: %s Filter: %s Order By: %s Page Size: %d',
self.project_id,
self.format_,
self.filter_,
self.order_by,
self.page_size,
)
if self.hook is None:
self.hook = StackdriverHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
return self.hook.list_notification_channels(
format_=self.format_,
project_id=self.project_id,
filter_=self.filter_,
order_by=self.order_by,
page_size=self.page_size,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
class StackdriverEnableNotificationChannelsOperator(BaseOperator):
"""
Enables one or more disabled alerting policies identified by filter
parameter. Inoperative in case the policy is already enabled.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:StackdriverEnableNotificationChannelsOperator`
:param filter_: If provided, this field specifies the criteria that
must be met by notification channels to be enabled.
For more details, see https://cloud.google.com/monitoring/api/v3/sorting-and-filtering.
:type filter_: str
:param retry: A retry object used to retry requests. If ``None`` is
specified, requests will be retried using a default configuration.
:type retry: str
:param timeout: The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google
Cloud Platform.
:type gcp_conn_id: str
:param project_id: The location used for the operation.
:type project_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = (
'filter_',
'impersonation_chain',
)
ui_color = "#e5ffcc"
@apply_defaults
def __init__(
self,
*,
filter_: Optional[str] = None,
retry: Optional[str] = DEFAULT,
timeout: Optional[float] = DEFAULT,
metadata: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.filter_ = filter_
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
self.hook = None
def execute(self, context):
self.log.info(
'Enable Notification Channels: Project id: %s Filter: %s', self.project_id, self.filter_
)
if self.hook is None:
self.hook = StackdriverHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.hook.enable_notification_channels(
filter_=self.filter_,
project_id=self.project_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
class StackdriverDisableNotificationChannelsOperator(BaseOperator):
"""
Disables one or more enabled notification channels identified by filter
parameter. Inoperative in case the policy is already disabled.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:StackdriverDisableNotificationChannelsOperator`
:param filter_: If provided, this field specifies the criteria that
must be met by alert policies to be disabled.
For more details, see https://cloud.google.com/monitoring/api/v3/sorting-and-filtering.
:type filter_: str
:param retry: A retry object used to retry requests. If ``None`` is
specified, requests will be retried using a default configuration.
:type retry: str
:param timeout: The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google
Cloud Platform.
:type gcp_conn_id: str
:param project_id: The project in which notification channels needs to be enabled.
:type project_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = (
'filter_',
'impersonation_chain',
)
ui_color = "#e5ffcc"
@apply_defaults
def __init__(
self,
*,
filter_: Optional[str] = None,
retry: Optional[str] = DEFAULT,
timeout: Optional[float] = DEFAULT,
metadata: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.filter_ = filter_
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
self.hook = None
def execute(self, context):
self.log.info(
'Disable Notification Channels: Project id: %s Filter: %s', self.project_id, self.filter_
)
if self.hook is None:
self.hook = StackdriverHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.hook.disable_notification_channels(
filter_=self.filter_,
project_id=self.project_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
class StackdriverUpsertNotificationChannelOperator(BaseOperator):
"""
Creates a new notification or updates an existing notification channel
identified the name field in the alerts parameter.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:StackdriverUpsertNotificationChannelOperator`
:param channels: A JSON string or file that specifies all the alerts that needs
to be either created or updated. For more details, see
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.notificationChannels.
(templated)
:type channels: str
:param retry: A retry object used to retry requests. If ``None`` is
specified, requests will be retried using a default configuration.
:type retry: str
:param timeout: The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google
Cloud Platform.
:type gcp_conn_id: str
:param project_id: The project in which notification channels needs to be created/updated.
:type project_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = (
'channels',
'impersonation_chain',
)
template_ext = ('.json',)
ui_color = "#e5ffcc"
@apply_defaults
def __init__(
self,
*,
channels: str,
retry: Optional[str] = DEFAULT,
timeout: Optional[str] = DEFAULT,
metadata: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.channels = channels
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
self.hook = None
def execute(self, context):
self.log.info(
'Upsert Notification Channels: Channels: %s Project id: %s', self.channels, self.project_id
)
if self.hook is None:
self.hook = StackdriverHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.hook.upsert_channel(
channels=self.channels,
project_id=self.project_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
class StackdriverDeleteNotificationChannelOperator(BaseOperator):
"""
Deletes a notification channel.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:StackdriverDeleteNotificationChannelOperator`
:param name: The alerting policy to delete. The format is:
``projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID]``.
:type name: str
:param retry: A retry object used to retry requests. If ``None`` is
specified, requests will be retried using a default configuration.
:type retry: str
:param timeout: The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
:type timeout: float
:param metadata: Additional metadata that is provided to the method.
:type metadata: str
:param gcp_conn_id: (Optional) The connection ID used to connect to Google
Cloud Platform.
:type gcp_conn_id: str
:param project_id: The project from which notification channel needs to be deleted.
:type project_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]]
"""
template_fields = (
'name',
'impersonation_chain',
)
ui_color = "#e5ffcc"
@apply_defaults
def __init__(
self,
*,
name: str,
retry: Optional[str] = DEFAULT,
timeout: Optional[float] = DEFAULT,
metadata: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
project_id: Optional[str] = None,
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.name = name
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
self.project_id = project_id
self.delegate_to = delegate_to
self.impersonation_chain = impersonation_chain
self.hook = None
def execute(self, context):
self.log.info('Delete Notification Channel: Project id: %s Name: %s', self.project_id, self.name)
if self.hook is None:
self.hook = StackdriverHook(
gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
impersonation_chain=self.impersonation_chain,
)
self.hook.delete_notification_channel(
name=self.name, retry=self.retry, timeout=self.timeout, metadata=self.metadata
)
| apache-2.0 |
levi-rs/traw | tests/test_client.py | 1 | 126791 | from datetime import datetime as dt
import sys
import mock
import pytest
import traw
from traw import models
from traw.exceptions import TRAWClientError, UnknownCustomStatusError
USER = 'mock username'
PASS = 'mock password'
URL = 'mock url'
CASE1 = {'name': 'case1', 'id': 991}
CASE2 = {'name': 'case2', 'id': 992}
CASE3 = {'name': 'case3', 'id': 993}
CONF1 = {'group_id': 1, 'id': 1, 'name': 'config 1 name'}
CONF2 = {'group_id': 1, 'id': 2, 'name': 'config 2 name'}
CONF3 = {'group_id': 2, 'id': 3, 'name': 'config 3 name'}
CONF4 = {'group_id': 2, 'id': 4, 'name': 'config 4 name'}
CONF5 = {'group_id': 3, 'id': 5, 'name': 'config 5 name'}
CONF6 = {'group_id': 3, 'id': 6, 'name': 'config 6 name'}
CG1 = {'name': 'configgroup1', 'id': 661, 'project_id': 15,
'configs': [CONF1, CONF2]}
CG2 = {'name': 'configgroup2', 'id': 662, 'project_id': 15,
'configs': [CONF3, CONF4]}
CG3 = {'name': 'configgroup3', 'id': 663, 'project_id': 15,
'configs': [CONF5, CONF6]}
CT1 = {'name': 'casetype1', 'id': 331}
CT2 = {'name': 'casetype2', 'id': 332}
CT3 = {'name': 'casetype3', 'id': 333}
MILE1 = {'name': 'milestone1'}
MILE2 = {'name': 'milestone2'}
MILE3 = {'name': 'milestone3'}
PRIO1 = {'name': 'priority1', 'id': 111}
PRIO2 = {'name': 'priority2', 'id': 112}
PRIO3 = {'name': 'priority3', 'id': 113}
PROJ1 = {'name': 'project1'}
PROJ2 = {'name': 'project2'}
PROJ3 = {'name': 'project3'}
RESU1 = {'name': 'result1', 'id': 771}
RESU2 = {'name': 'result2', 'id': 772}
RESU3 = {'name': 'result3', 'id': 773}
RUN1 = {'name': 'run1', 'id': 881}
RUN2 = {'name': 'run2', 'id': 882}
RUN3 = {'name': 'run3', 'id': 883}
RUN4 = {'name': 'run4', 'id': 884}
SECT1 = {'name': 'section1', 'id': 991}
SECT2 = {'name': 'section2', 'id': 992}
SECT3 = {'name': 'section3', 'id': 993}
STAT1 = {'name': 'status1', 'id': 221, 'label': 'Passed'}
STAT2 = {'name': 'status2', 'id': 222, 'label': 'Failed'}
STAT3 = {'name': 'status3', 'id': 223, 'label': 'failed'}
STAT4 = {'name': 'status4', 'id': 8, 'label': 'custom-failed'}
SUIT1 = {'name': 'suite1', 'id': 551}
SUIT2 = {'name': 'suite2', 'id': 552}
SUIT3 = {'name': 'suite3', 'id': 553}
TEMP1 = {'name': 'template1', 'id': 991}
TEMP2 = {'name': 'template2', 'id': 992}
TEMP3 = {'name': 'template3', 'id': 993}
TEST1 = {'name': 'test1', 'id': 441}
TEST2 = {'name': 'test2', 'id': 442}
TEST3 = {'name': 'test3', 'id': 443}
USER1 = {'name': 'user1'}
USER2 = {'name': 'user2'}
USER3 = {'name': 'user3'}
def test___init__():
client = traw.Client(username=USER, password=PASS, url=URL)
assert hasattr(client, 'api')
assert isinstance(client.api, traw.api.API)
def test_add_exception_no_obj(client):
""" Verify the Client raises an exception if add is called directly """
with pytest.raises(TypeError) as exc:
client.add()
if sys.version_info.major == 2:
assert "takes exactly 2 arguments (1 given)" in str(exc)
else:
assert "required positional argument: 'obj'" in str(exc)
def test_add_exception_w_obj(client):
""" Verify the Client raises an exception if add is called with unsupported type """
with pytest.raises(TypeError) as exc:
client.add(1)
assert "support adding objects of type" in str(exc)
def test_add_config(client):
CONFIG_ID = 15
CONFIG_GROUP_ID = 15
config_config = {traw.const.NAME: 'mock name',
'group_id': CONFIG_GROUP_ID}
config = models.Config(client, dict(extra='extra', **config_config))
client.api.config_add.return_value = dict(id=CONFIG_ID, **config_config)
response = client.add(config)
assert isinstance(response, models.Config)
assert response.id == CONFIG_ID
assert client.api.config_add.called
assert 'mock name' in str(client.api.config_add.call_args)
assert 'extra' not in str(client.api.config_add.call_args)
def test_add_config_group(client):
CONFIG_GROUP_ID = 15
PROJECT_ID = 15
config_group_config = {traw.const.NAME: 'mock name',
'project_id': PROJECT_ID}
con_grp = models.ConfigGroup(client, dict(extra='extra', **config_group_config))
client.api.config_group_add.return_value = dict(id=CONFIG_GROUP_ID, **config_group_config)
response = client.add(con_grp)
assert isinstance(response, models.ConfigGroup)
assert response.id == CONFIG_GROUP_ID
assert client.api.config_group_add.called
assert 'mock name' in str(client.api.config_group_add.call_args)
assert 'extra' not in str(client.api.config_group_add.call_args)
def test_add_milestone(client):
MILESTONE_ID = 111
PROJECT_ID = 15
milestone_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.DUE_ON: 123456,
traw.const.START_ON: 12345,
traw.const.PROJECT_ID: PROJECT_ID}
milestone = models.Milestone(client, dict(extra='extra', **milestone_config))
client.api.milestone_add.return_value = dict(id=MILESTONE_ID, **milestone_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.add(milestone)
assert isinstance(response, models.Milestone)
assert response.id == MILESTONE_ID
assert client.api.milestone_add.called
assert 'mock name' in str(client.api.milestone_add.call_args)
assert 'extra' not in str(client.api.milestone_add.call_args)
def test_add_project(client):
PROJECT_ID = 15
project_config = {traw.const.NAME: 'mock name',
traw.const.ANNOUNCEMENT: 'mock announcement',
traw.const.SHOW_ANNOUNCEMENT: False,
traw.const.SUITE_MODE: 1}
project = models.Project(client, dict(extra='extra', **project_config))
client.api.project_add.return_value = dict(id=PROJECT_ID, **project_config)
response = client.add(project)
assert isinstance(response, models.Project)
assert response.id == PROJECT_ID
assert client.api.project_add.called
assert 'mock name' in str(client.api.project_add.call_args)
assert 'extra' not in str(client.api.project_add.call_args)
def test_add_run_no_case_ids(client):
RUN_ID = 111
PROJECT_ID = 15
run_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.MILESTONE_ID: '22',
traw.const.ASSIGNEDTO_ID: '33',
traw.const.INCLUDE_ALL: True,
traw.const.CASE_IDS: list(),
traw.const.PROJECT_ID: PROJECT_ID}
run = models.Run(client, dict(extra='extra', **run_config))
client.api.run_add.return_value = dict(id=RUN_ID, **run_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.add(run)
assert isinstance(response, models.Run)
assert response.id == RUN_ID
assert client.api.run_add.called
assert 'mock name' in str(client.api.run_add.call_args)
assert 'extra' not in str(client.api.run_add.call_args)
def test_add_result(client):
RESULT_ID = 111
TEST_ID = 1155
result_config = {traw.const.TEST_ID: 998877,
traw.const.STATUS_ID: 8,
traw.const.COMMENT: 'mock comment',
traw.const.VERSION: 'VER.SI.ON.RC',
traw.const.ELAPSED: 12345,
traw.const.DEFECTS: 'DEF1,DEF2',
traw.const.ASSIGNEDTO_ID: 77}
result = models.Result(client, dict(extra='extra', **result_config))
client.api.result_add.return_value = dict(id=RESULT_ID, **result_config)
with mock.patch.object(client, 'test') as test_mock:
test_mock.return_value = models.Test(client, {'id': TEST_ID})
response = client.add(result)
assert isinstance(response, models.Result)
assert response.id == RESULT_ID
assert client.api.result_add.called
assert 'mock comment' in str(client.api.result_add.call_args)
assert 'extra' not in str(client.api.result_add.call_args)
def test_add_run_with_case_ids(client):
RUN_ID = 111
PROJECT_ID = 15
run_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.MILESTONE_ID: '22',
traw.const.ASSIGNEDTO_ID: '33',
traw.const.INCLUDE_ALL: True,
traw.const.CASE_IDS: [1, 2, 3, 4],
traw.const.PROJECT_ID: PROJECT_ID}
run = models.Run(client, dict(extra='extra', **run_config))
client.api.run_add.return_value = dict(id=RUN_ID, **run_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.add(run)
assert isinstance(response, models.Run)
assert response.id == RUN_ID
assert client.api.run_add.called
assert 'mock name' in str(client.api.run_add.call_args)
assert '[1, 2, 3, 4]' in str(client.api.run_add.call_args)
assert 'extra' not in str(client.api.run_add.call_args)
def test_add_section(client):
SECTION_ID = 14
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
section_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.PROJECT_ID: PROJECT_ID}
section = models.Section(client, dict(extra='extra', **section_config))
client.api.section_add.return_value = dict(id=SECTION_ID, **section_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, PROJECT_DICT)
response = client.add(section)
assert isinstance(response, models.Section)
assert response.id == SECTION_ID
assert client.api.section_add.called
assert 'mock name' in str(client.api.section_add.call_args)
assert 'extra' not in str(client.api.section_add.call_args)
def test_add_section_w_suite(client):
SECTION_ID = 13
SUITE_ID = 14
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 2}
section_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.PROJECT_ID: PROJECT_ID,
traw.const.SUITE_ID: SUITE_ID}
section = models.Section(client, dict(extra='extra', **section_config))
client.api.section_add.return_value = dict(id=SECTION_ID, **section_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, PROJECT_DICT)
response = client.add(section)
assert isinstance(response, models.Section)
assert response.id == SECTION_ID
assert client.api.section_add.called
assert 'mock name' in str(client.api.section_add.call_args)
assert 'extra' not in str(client.api.section_add.call_args)
def test_add_section_exc(client):
SECTION_ID = 13
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 2}
section_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.PROJECT_ID: PROJECT_ID}
section = models.Section(client, dict(extra='extra', **section_config))
client.api.section_add.return_value = dict(id=SECTION_ID, **section_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, PROJECT_DICT)
with pytest.raises(ValueError) as exc:
client.add(section)
assert "not in Single Suite mode" in str(exc)
assert not client.api.section_add.called
proj_mock.assert_called_once_with(PROJECT_ID)
def test_add_sub_milestone(client):
SUB_MILESTONE_ID = 111
PARENT_ID = 222
PROJECT_ID = 15
milestone_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.DUE_ON: 123456,
traw.const.START_ON: 12345,
traw.const.PROJECT_ID: PROJECT_ID}
milestone = models.Milestone(client, dict(extra='extra', **milestone_config))
sub_milestone = milestone.add_parent(models.Milestone(client, {'id': PARENT_ID}))
client.api.milestone_add.return_value = dict(id=SUB_MILESTONE_ID,
parent_id=PARENT_ID,
**milestone_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.add(sub_milestone)
assert isinstance(response, models.SubMilestone)
assert response.id == SUB_MILESTONE_ID
assert client.api.milestone_add.called
assert 'mock name' in str(client.api.milestone_add.call_args)
assert 'extra' not in str(client.api.milestone_add.call_args)
def test_add_suite(client):
SUITE_ID = 111
PROJECT_ID = 15
suite_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.PROJECT_ID: PROJECT_ID}
suite = models.Suite(client, dict(extra='extra', **suite_config))
client.api.suite_add.return_value = dict(id=SUITE_ID, **suite_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.add(suite)
assert isinstance(response, models.Suite)
assert response.id == SUITE_ID
assert client.api.suite_add.called
assert 'mock name' in str(client.api.suite_add.call_args)
assert 'extra' not in str(client.api.suite_add.call_args)
def test_close_exception_no_obj(client):
""" Verify the Client raises an exception if close is called directly """
with pytest.raises(TypeError) as exc:
client.close()
if sys.version_info.major == 2:
assert "takes exactly 2 arguments (1 given)" in str(exc)
else:
assert "required positional argument: 'obj'" in str(exc)
def test_close_exception_w_obj(client):
""" Verify the Client raises an exception if close is called with unsupporte type """
with pytest.raises(TypeError) as exc:
client.close(1)
assert "support closing objects of type" in str(exc)
def test_close_run(client):
RUN_ID = 111
PROJECT_ID = 15
run_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.MILESTONE_ID: '22',
traw.const.ASSIGNEDTO_ID: '33',
traw.const.INCLUDE_ALL: True,
traw.const.CASE_IDS: list(),
traw.const.PROJECT_ID: PROJECT_ID}
run = models.Run(client, dict(extra='extra', **run_config))
client.api.run_close.return_value = dict(id=RUN_ID, **run_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.close(run)
assert isinstance(response, models.Run)
assert response.id == RUN_ID
assert client.api.run_close.called
assert 'mock name' not in str(client.api.run_close.call_args)
assert 'extra' not in str(client.api.run_close.call_args)
def test_delete_exception_no_obj(client):
""" Verify the Client raises an exception if delete is called directly """
with pytest.raises(TypeError) as exc:
client.delete()
if sys.version_info.major == 2:
assert "takes exactly 2 arguments (1 given)" in str(exc)
else:
assert "required positional argument: 'obj'" in str(exc)
def test_delete_exception_w_obj(client):
""" Verify the Client raises an exception if delete is called with unsupporte type """
with pytest.raises(TypeError) as exc:
client.delete(1)
assert "support deleting objects of type" in str(exc)
def test_delete_config(client):
CONFIG_GROUP_ID = 456
CONFIG_ID = 123
PROJECT_ID = 15
config_config = {traw.const.NAME: 'mock name',
traw.const.PROJECT_ID: PROJECT_ID,
'group_id': CONFIG_GROUP_ID}
config = models.Config(client, dict(id=CONFIG_ID, **config_config))
client.api.project_delete.return_value = dict()
response = client.delete(config)
assert response is None
client.api.config_delete.assert_called_once_with(CONFIG_ID)
def test_delete_config_group(client):
CONFIG_GROUP_ID = 456
PROJECT_ID = 15
config_group_config = {traw.const.NAME: 'mock name',
traw.const.PROJECT_ID: PROJECT_ID}
con_grp = models.ConfigGroup(client, dict(id=CONFIG_GROUP_ID, **config_group_config))
client.api.config_group_delete.return_value = dict()
response = client.delete(con_grp)
assert response is None
client.api.config_group_delete.assert_called_once_with(CONFIG_GROUP_ID)
def test_delete_milestone(client):
MILESTONE_ID = 111
PROJECT_ID = 15
milestone_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.DUE_ON: 123456,
traw.const.START_ON: 12345,
traw.const.PROJECT_ID: PROJECT_ID}
milestone = models.Milestone(client, dict(id=MILESTONE_ID, **milestone_config))
client.api.milestone_delete.return_value = dict()
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.delete(milestone)
assert response is None
client.api.milestone_delete.assert_called_once_with(MILESTONE_ID)
def test_delete_project(client):
PROJECT_ID = 15
project_config = {traw.const.NAME: 'mock name',
traw.const.ANNOUNCEMENT: 'mock announcement',
traw.const.SHOW_ANNOUNCEMENT: False,
traw.const.SUITE_MODE: 1}
project = models.Project(client, dict(id=PROJECT_ID, **project_config))
client.api.project_delete.return_value = dict()
response = client.delete(project)
assert response is None
client.api.project_delete.assert_called_once_with(PROJECT_ID)
def test_delete_run(client):
RUN_ID = 111
PROJECT_ID = 15
run_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.MILESTONE_ID: '22',
traw.const.ASSIGNEDTO_ID: '33',
traw.const.INCLUDE_ALL: True,
traw.const.CASE_IDS: list(),
traw.const.PROJECT_ID: PROJECT_ID}
run = models.Run(client, dict(id=RUN_ID, **run_config))
client.api.run_delete.return_value = dict()
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.delete(run)
assert response is None
client.api.run_delete.assert_called_once_with(RUN_ID)
def test_delete_section(client):
SECTION_ID = 111
PROJECT_ID = 15
section_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.PROJECT_ID: PROJECT_ID}
section = models.Section(client, dict(id=SECTION_ID, **section_config))
client.api.section_delete.return_value = dict()
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.delete(section)
assert response is None
client.api.section_delete.assert_called_once_with(SECTION_ID)
def test_delete_suite(client):
SUITE_ID = 111
PROJECT_ID = 15
suite_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.PROJECT_ID: PROJECT_ID}
suite = models.Suite(client, dict(id=SUITE_ID, **suite_config))
client.api.suite_delete.return_value = dict()
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.delete(suite)
assert response is None
client.api.suite_delete.assert_called_once_with(SUITE_ID)
def test_update_exception_no_obj(client):
""" Verify the Client raises an exception if update is called directly """
with pytest.raises(TypeError) as exc:
client.update()
if sys.version_info.major == 2:
assert "takes exactly 2 arguments (1 given)" in str(exc)
else:
assert "required positional argument: 'obj'" in str(exc)
def test_update_exception_w_obj(client):
""" Verify the Client raises an exception if update is called with unsupporte type """
with pytest.raises(TypeError) as exc:
client.update(1)
assert "support updating objects of type" in str(exc)
def test_update_config(client):
CONFIG_ID = 16
CONFIG_GROUP_ID = 17
config_config = {traw.const.NAME: 'mock name',
'group_id': CONFIG_GROUP_ID}
config = models.Config(client, dict(extra='extra', **config_config))
client.api.config_update.return_value = dict(id=CONFIG_ID, **config_config)
response = client.update(config)
assert isinstance(response, models.Config)
assert response.id == CONFIG_ID
assert client.api.config_update.called
assert 'mock name' in str(client.api.config_update.call_args)
assert 'extra' not in str(client.api.config_update.call_args)
def test_update_config_group(client):
CONFIG_GROUP_ID = 17
config_group_config = {traw.const.NAME: 'mock name'}
con_grp = models.ConfigGroup(client, dict(extra='extra', **config_group_config))
client.api.config_group_update.return_value = dict(id=CONFIG_GROUP_ID, **config_group_config)
response = client.update(con_grp)
assert isinstance(response, models.ConfigGroup)
assert response.id == CONFIG_GROUP_ID
assert client.api.config_group_update.called
assert 'mock name' in str(client.api.config_group_update.call_args)
assert 'extra' not in str(client.api.config_group_update.call_args)
def test_update_milestone(client):
MILESTONE_ID = 111
PROJECT_ID = 15
milestone_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.DUE_ON: 123456,
traw.const.START_ON: 12345,
traw.const.PROJECT_ID: PROJECT_ID}
milestone = models.Milestone(client, dict(extra='extra', **milestone_config))
client.api.milestone_update.return_value = dict(id=MILESTONE_ID, **milestone_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.update(milestone)
assert isinstance(response, models.Milestone)
assert response.id == MILESTONE_ID
assert client.api.milestone_update.called
assert 'mock name' in str(client.api.milestone_update.call_args)
assert 'extra' not in str(client.api.milestone_update.call_args)
def test_update_project(client):
PROJECT_ID = 15
project_config = {traw.const.NAME: 'mock name',
traw.const.ANNOUNCEMENT: 'mock announcement',
traw.const.SHOW_ANNOUNCEMENT: False,
traw.const.SUITE_MODE: 1,
traw.const.ID: PROJECT_ID}
project = models.Project(client, dict(extra='extra', **project_config))
client.api.project_update.return_value = project_config
response = client.update(project)
assert isinstance(response, models.Project)
assert response.id == PROJECT_ID
assert client.api.project_update.called
assert 'mock name' in str(client.api.project_update.call_args)
assert 'extra' not in str(client.api.project_update.call_args)
def test_update_run_include_all(client):
RUN_ID = 111
PROJECT_ID = 15
run_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.MILESTONE_ID: '22',
traw.const.ASSIGNEDTO_ID: '33',
traw.const.INCLUDE_ALL: True,
traw.const.CASE_IDS: list(),
traw.const.PROJECT_ID: PROJECT_ID}
run = models.Run(client, dict(extra='extra', **run_config))
client.api.run_update.return_value = dict(id=RUN_ID, **run_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.update(run)
assert isinstance(response, models.Run)
assert response.id == RUN_ID
assert client.api.run_update.called
assert 'mock name' in str(client.api.run_update.call_args)
assert 'extra' not in str(client.api.run_update.call_args)
def test_update_run_w_case_ids(client):
RUN_ID = 111
PROJECT_ID = 15
run_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.MILESTONE_ID: '22',
traw.const.ASSIGNEDTO_ID: '33',
traw.const.INCLUDE_ALL: True,
traw.const.CASE_IDS: [1, 2, 3],
traw.const.PROJECT_ID: PROJECT_ID}
run = models.Run(client, dict(extra='extra', **run_config))
client.api.run_update.return_value = dict(id=RUN_ID, **run_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.update(run)
assert isinstance(response, models.Run)
assert response.id == RUN_ID
assert client.api.run_update.called
assert 'mock name' in str(client.api.run_update.call_args)
assert '1,2,3' in str(client.api.run_update.call_args)
assert 'extra' not in str(client.api.run_update.call_args)
def test_update_section(client):
SECTION_ID = 111
PROJECT_ID = 15
section_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.DUE_ON: 123456,
traw.const.START_ON: 12345,
traw.const.PROJECT_ID: PROJECT_ID}
section = models.Section(client, dict(extra='extra', **section_config))
client.api.section_update.return_value = dict(id=SECTION_ID, **section_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.update(section)
assert isinstance(response, models.Section)
assert response.id == SECTION_ID
assert client.api.section_update.called
assert 'mock name' in str(client.api.section_update.call_args)
assert 'extra' not in str(client.api.section_update.call_args)
def test_update_sub_milestone(client):
SUB_MILESTONE_ID = 111
PARENT_ID = 222
PROJECT_ID = 15
milestone_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.DUE_ON: 123456,
traw.const.START_ON: 12345,
traw.const.PROJECT_ID: PROJECT_ID}
milestone = models.Milestone(client, dict(extra='extra', **milestone_config))
sub_milestone = milestone.add_parent(models.Milestone(client, {'id': PARENT_ID}))
client.api.milestone_update.return_value = dict(id=SUB_MILESTONE_ID,
parent_id=PARENT_ID,
**milestone_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.update(sub_milestone)
assert isinstance(response, models.SubMilestone)
assert response.id == SUB_MILESTONE_ID
assert client.api.milestone_update.called
assert 'mock name' in str(client.api.milestone_update.call_args)
assert 'extra' not in str(client.api.milestone_update.call_args)
def test_update_suite(client):
SUITE_ID = 111
PROJECT_ID = 15
suite_config = {traw.const.NAME: 'mock name',
traw.const.DESCRIPTION: 'mock description',
traw.const.PROJECT_ID: PROJECT_ID}
suite = models.Suite(client, dict(extra='extra', **suite_config))
client.api.suite_update.return_value = dict(id=SUITE_ID, **suite_config)
with mock.patch.object(client, 'project') as proj_mock:
proj_mock.return_value = models.Project(client, {'id': PROJECT_ID})
response = client.update(suite)
assert isinstance(response, models.Suite)
assert response.id == SUITE_ID
assert client.api.suite_update.called
assert 'mock name' in str(client.api.suite_update.call_args)
assert 'extra' not in str(client.api.suite_update.call_args)
def test_case(client):
""" Verify case method returns a new models.Case instance if called without
any parameters
"""
case = client.case()
assert isinstance(case, models.Case)
assert case.created_by is None
assert case.created_on is None
assert case.estimate is None
assert case.estimate_forecast is None
assert case.id is None
assert case.milestone is None
assert case.priority is None
assert case.suite is None
assert case.template is None
assert case.title is None
assert case.case_type is None
assert case.updated_by is None
assert case.updated_on is None
def test_case_by_id(client):
""" Verify calling ``client.case(123)`` with an ID returns that case """
client.api.case_by_id.return_value = {'id': 1234}
case = client.case(1234)
assert isinstance(case, models.Case)
assert case.id == 1234
client.api.case_by_id.assert_called_once_with(1234)
def test_cases_exc(client):
""" Verify the Client's ``cases`` method throws an exception if called """
with pytest.raises(NotImplementedError) as exc:
client.cases()
assert 'You must pass in models.Project or int object' in str(exc)
assert not client.api.cases_by_project_id.called
def test_cases_by_project_id(client):
""" Verify calling ``client.cases(123)`` with an ID returns case
generator
"""
PROJECT_ID = 15
client.api.project_by_id.return_value = {'id': PROJECT_ID, 'suite_mode': 1}
client.api.cases_by_project_id.return_value = [CASE1, CASE2, CASE3]
cases = client.cases(PROJECT_ID)
case1 = next(cases)
assert isinstance(case1, models.Case)
assert case1.id == 991
case2 = next(cases)
assert isinstance(case2, models.Case)
assert case2.id == 992
case3 = next(cases)
assert isinstance(case3, models.Case)
assert case3.id == 993
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project(client):
""" Verify calling ``client.cases(Project)`` with an ID returns
case generator
"""
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
client.api.project_by_id.return_value = PROJECT_DICT
client.api.cases_by_project_id.return_value = [CASE1, CASE2, CASE3]
cases = client.cases(models.Project(client, PROJECT_DICT))
case1 = next(cases)
assert isinstance(case1, models.Case)
assert case1.id == 991
case2 = next(cases)
assert isinstance(case2, models.Case)
assert case2.id == 992
case3 = next(cases)
assert isinstance(case3, models.Case)
assert case3.id == 993
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_and_suite_and_section(client):
""" Verify calling ``client.cases(Project, Suite, Section)`` returns
case generator
"""
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 2}
SUITE_ID = 16
SUITE_DICT = {'id': SUITE_ID}
SECTION_ID = 17
SECTION_DICT = {'id': SECTION_ID}
client.api.project_by_id.return_value = PROJECT_DICT
client.api.cases_by_project_id.return_value = [CASE1, ]
project = models.Project(client, PROJECT_DICT)
suite = models.Suite(client, SUITE_DICT)
section = models.Section(client, SECTION_DICT)
cases = client.cases(project, suite, section)
case1 = next(cases)
assert isinstance(case1, models.Case)
assert case1.id == 991
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
client.api.cases_by_project_id.assert_called_once_with(
PROJECT_ID, suite_id=SUITE_ID, section_id=SECTION_ID)
def test_cases_by_project_and_suite_id_and_section_id(client):
""" Verify calling ``client.cases(Project, 16, 17)`` returns
case generator
"""
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 2}
SUITE_ID = 16
SECTION_ID = 17
client.api.project_by_id.return_value = PROJECT_DICT
client.api.cases_by_project_id.return_value = [CASE1, ]
project = models.Project(client, PROJECT_DICT)
cases = client.cases(project, SUITE_ID, SECTION_ID)
case1 = next(cases)
assert isinstance(case1, models.Case)
assert case1.id == 991
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
client.api.cases_by_project_id.assert_called_once_with(
PROJECT_ID, suite_id=SUITE_ID, section_id=SECTION_ID)
def test_cases_by_project_exc_1(client):
""" Verify calling ``client.cases(Project)`` when the project is a
suite_mode of 2 raises an exception
"""
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 2}
client.api.project_by_id.return_value = PROJECT_DICT
client.api.cases_by_project_id.return_value = [SECT1, ]
with pytest.raises(TypeError) as exc:
list(client.cases(models.Project(client, PROJECT_DICT)))
assert 'suite_mode of 2' in str(exc)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
assert not client.api.cases_by_project_id.called
def test_cases_by_project_exc_2(client):
""" Verify calling ``client.cases(Project, 'asdf')`` when the project
is a suite_mode of 2 raises an exception
"""
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 2}
client.api.project_by_id.return_value = PROJECT_DICT
client.api.cases_by_project_id.return_value = [CASE1, ]
with pytest.raises(TypeError) as exc:
list(client.cases(models.Project(client, PROJECT_DICT), 'asdf'))
assert 'models.Suite' in str(exc)
assert 'int ID of a suite in testrail' in str(exc)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
assert not client.api.cases_by_project_id.called
def test_cases_by_project_exc_3(client):
""" Verify calling ``client.cases(Project, None, 'asdf')`` when the project
is a suite_mode of 2 raises an exception
"""
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 2}
SUITE_ID = 16
client.api.project_by_id.return_value = PROJECT_DICT
client.api.cases_by_project_id.return_value = [CASE1, ]
with pytest.raises(TypeError) as exc:
list(client.cases(models.Project(client, PROJECT_DICT), SUITE_ID, 'asdf'))
assert 'models.Section' in str(exc)
assert 'int ID of a section in testrail' in str(exc)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
assert not client.api.cases_by_project_id.called
def test_cases_by_project_w_case_type(client):
""" Verify calling ``client.cases(Project)`` with case_type """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
ct = models.CaseType(client, {'id': 11})
list(client.cases(project, case_type=ct))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, type_id='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_case_type_list(client):
""" Verify calling ``client.cases(Project)`` with case_type """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
ct1 = models.CaseType(client, {'id': 11})
ct2 = models.CaseType(client, {'id': 12})
list(client.cases(project, case_type=[ct1, ct2]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, type_id='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_case_type_id(client):
""" Verify calling ``client.cases(Project)`` with case_type """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, case_type=11))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, type_id='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_case_type_id_list(client):
""" Verify calling ``client.cases(Project)`` with case_type """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, case_type=[11, 12]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, type_id='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_case_type_exc(client):
""" Verify calling ``client.cases(Project)`` with case_type exception """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
with pytest.raises(TypeError) as exc:
list(client.cases(project, case_type='asdf'))
assert 'asdf' in str(exc)
assert str(models.CaseType) in str(exc)
assert str(int) in str(exc)
def test_cases_by_project_w_int_created_after(client):
""" Verify calling ``client.cases(Project)`` with created_after """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, created_after=1112))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, created_after=1112)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_float_created_after(client):
""" Verify calling ``client.cases(Project)`` with created_after """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, created_after=11.12))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, created_after=11)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_datetime_created_after(client):
""" Verify calling ``client.cases(Project)`` with created_after """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
ca_dt = dt.fromtimestamp(33.22)
list(client.cases(project, created_after=ca_dt))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, created_after=33)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_created_after_exc(client):
""" Verify calling ``client.cases(Project)`` with created_after exception """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
client.api.project_by_id.return_value = PROJECT_DICT
with pytest.raises(TypeError) as exc:
list(client.cases(
models.Project(client, {'id': 1234}), created_after='asdf'))
assert 'created/updated after/before' in str(exc)
assert 'asdf' in str(exc)
assert not client.api.cases_by_project_id.called
def test_cases_by_project_w_int_created_before(client):
""" Verify calling ``client.cases(Project)`` with created_before """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, created_before=1112))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, created_before=1112)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_float_created_before(client):
""" Verify calling ``client.cases(Project)`` with created_before """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, created_before=11.12))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, created_before=11)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_datetime_created_before(client):
""" Verify calling ``client.cases(Project)`` with created_before """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
ca_dt = dt.fromtimestamp(33.22)
list(client.cases(project, created_before=ca_dt))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, created_before=33)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_created_before_exc(client):
""" Verify calling ``client.cases(Project)`` with created_before exception """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
with pytest.raises(TypeError) as exc:
list(client.cases(project, created_before='asdf'))
assert 'created/updated after/before' in str(exc)
assert 'asdf' in str(exc)
assert not client.api.cases_by_project_id.called
def test_cases_by_project_w_created_by_user(client):
""" Verify calling ``client.cases(Project)`` with created_by """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
user = models.User(client, {'id': 11})
list(client.cases(project, created_by=user))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, created_by='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_created_by_user_list(client):
""" Verify calling ``client.cases(Project)`` with created_by """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
user1 = models.User(client, {'id': 11})
user2 = models.User(client, {'id': 12})
list(client.cases(project, created_by=[user1, user2]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, created_by='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_created_by_user_id(client):
""" Verify calling ``client.cases(Project)`` with created_by """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, created_by=11))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, created_by='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_created_by_user_id_list(client):
""" Verify calling ``client.cases(Project)`` with created_by """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, created_by=[11, 12]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, created_by='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_created_by_exc(client):
""" Verify calling ``client.cases(Project)`` with created_by exception """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
with pytest.raises(TypeError) as exc:
list(client.cases(project, created_by='asdf'))
assert 'asdf' in str(exc)
assert str(models.User) in str(exc)
assert str(int) in str(exc)
def test_cases_by_project_w_milestone(client):
""" Verify calling ``client.cases(Project)`` with milestone """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
milestone = models.Milestone(client, {'id': 11})
list(client.cases(project, milestone=milestone))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, milestone_id='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_milestone_list(client):
""" Verify calling ``client.cases(Project)`` with milestone """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
milestone1 = models.Milestone(client, {'id': 11})
milestone2 = models.Milestone(client, {'id': 12})
list(client.cases(project, milestone=[milestone1, milestone2]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, milestone_id='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_milestone_id(client):
""" Verify calling ``client.cases(Project)`` with milestone """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, milestone=11))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, milestone_id='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_milestone_id_list(client):
""" Verify calling ``client.cases(Project)`` with milestone """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, milestone=[11, 12]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, milestone_id='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_milestone_exc(client):
""" Verify calling ``client.cases(Project)`` with milestone exception """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
with pytest.raises(TypeError) as exc:
list(client.cases(project, milestone='asdf'))
assert 'asdf' in str(exc)
assert str(models.Milestone) in str(exc)
assert str(int) in str(exc)
def test_cases_by_project_w_sub_milestone(client):
""" Verify calling ``client.cases(Project)`` with sub-milestone """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
submilestone = models.SubMilestone(client, {'id': 11})
list(client.cases(project, milestone=submilestone))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, milestone_id='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_sub_milestone_list(client):
""" Verify calling ``client.cases(Project)`` with sub-milestone """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
submilestone1 = models.SubMilestone(client, {'id': 11})
submilestone2 = models.SubMilestone(client, {'id': 12})
list(client.cases(project, milestone=[submilestone1, submilestone2]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, milestone_id='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_priority(client):
""" Verify calling ``client.cases(Project)`` with priority """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
priority = models.Priority(client, {'id': 11})
list(client.cases(project, priority=priority))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, priority_id='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_priority_list(client):
""" Verify calling ``client.cases(Project)`` with priority """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
priority1 = models.Priority(client, {'id': 11})
priority2 = models.Priority(client, {'id': 12})
list(client.cases(project, priority=[priority1, priority2]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, priority_id='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_priority_id(client):
""" Verify calling ``client.cases(Project)`` with priority """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, priority=11))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, priority_id='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_priority_id_list(client):
""" Verify calling ``client.cases(Project)`` with priority """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, priority=[11, 12]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, priority_id='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_priority_exc(client):
""" Verify calling ``client.cases(Project)`` with priority exception """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
with pytest.raises(TypeError) as exc:
list(client.cases(project, priority='asdf'))
assert 'asdf' in str(exc)
assert str(models.Priority) in str(exc)
assert str(int) in str(exc)
def test_cases_by_project_w_template(client):
""" Verify calling ``client.cases(Project)`` with template """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
template = models.Template(client, {'id': 11})
list(client.cases(project, template=template))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, template_id='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_template_list(client):
""" Verify calling ``client.cases(Project)`` with template """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
template1 = models.Template(client, {'id': 11})
template2 = models.Template(client, {'id': 12})
list(client.cases(project, template=[template1, template2]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, template_id='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_template_id(client):
""" Verify calling ``client.cases(Project)`` with template """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, template=11))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, template_id='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_template_id_list(client):
""" Verify calling ``client.cases(Project)`` with template """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, template=[11, 12]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, template_id='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_template_exc(client):
""" Verify calling ``client.cases(Project)`` with template exception """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
with pytest.raises(TypeError) as exc:
list(client.cases(project, template='asdf'))
assert 'asdf' in str(exc)
assert str(models.Template) in str(exc)
assert str(int) in str(exc)
def test_cases_by_project_w_int_updated_after(client):
""" Verify calling ``client.cases(Project)`` with updated_after """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, updated_after=1112))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, updated_after=1112)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_float_updated_after(client):
""" Verify calling ``client.cases(Project)`` with updated_after """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, updated_after=11.12))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, updated_after=11)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_datetime_updated_after(client):
""" Verify calling ``client.cases(Project)`` with updated_after """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
ca_dt = dt.fromtimestamp(33.22)
list(client.cases(project, updated_after=ca_dt))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, updated_after=33)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_updated_after_exc(client):
""" Verify calling ``client.cases(Project)`` with updated_after exception """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
client.api.project_by_id.return_value = PROJECT_DICT
with pytest.raises(TypeError) as exc:
list(client.cases(
models.Project(client, {'id': 1234}), updated_after='asdf'))
assert 'created/updated after/before' in str(exc)
assert 'asdf' in str(exc)
assert not client.api.cases_by_project_id.called
def test_cases_by_project_w_int_updated_before(client):
""" Verify calling ``client.cases(Project)`` with updated_before """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, updated_before=1112))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, updated_before=1112)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_float_updated_before(client):
""" Verify calling ``client.cases(Project)`` with updated_before """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, updated_before=11.12))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, updated_before=11)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_datetime_updated_before(client):
""" Verify calling ``client.cases(Project)`` with updated_before """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
ca_dt = dt.fromtimestamp(33.22)
list(client.cases(project, updated_before=ca_dt))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, updated_before=33)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_updated_before_exc(client):
""" Verify calling ``client.cases(Project)`` with updated_before exception """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
with pytest.raises(TypeError) as exc:
list(client.cases(project, updated_before='asdf'))
assert 'created/updated after/before' in str(exc)
assert 'asdf' in str(exc)
assert not client.api.cases_by_project_id.called
def test_cases_by_project_w_updated_by_user(client):
""" Verify calling ``client.cases(Project)`` with updated_by """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
user = models.User(client, {'id': 11})
list(client.cases(project, updated_by=user))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, updated_by='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_updated_by_user_list(client):
""" Verify calling ``client.cases(Project)`` with updated_by """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
user1 = models.User(client, {'id': 11})
user2 = models.User(client, {'id': 12})
list(client.cases(project, updated_by=[user1, user2]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, updated_by='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_updated_by_user_id(client):
""" Verify calling ``client.cases(Project)`` with updated_by """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, updated_by=11))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, updated_by='11')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_updated_by_user_id_list(client):
""" Verify calling ``client.cases(Project)`` with updated_by """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
list(client.cases(project, updated_by=[11, 12]))
client.api.cases_by_project_id.assert_called_once_with(PROJECT_ID, updated_by='11,12')
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
def test_cases_by_project_w_updated_by_exc(client):
""" Verify calling ``client.cases(Project)`` with updated_by exception """
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
project = models.Project(client, PROJECT_DICT)
client.api.project_by_id.return_value = PROJECT_DICT
with pytest.raises(TypeError) as exc:
list(client.cases(project, updated_by='asdf'))
assert 'asdf' in str(exc)
assert str(models.User) in str(exc)
assert str(int) in str(exc)
def test_case_type_exc(client):
""" Verify the Client's ``case_type`` method throws an exception if called """
with pytest.raises(NotImplementedError) as exc:
client.case_type()
assert 'You must pass in int object' in str(exc)
assert not client.api.case_types.called
def test_case_type_by_int(client):
""" Verify the Client's ``case_type`` method call with int"""
client.api.case_types.return_value = [CT1, CT2, CT3]
case_type = client.case_type(332)
assert isinstance(case_type, models.CaseType)
assert case_type.id == 332
assert client.api.case_types.called
def test_case_type_by_int_exc(client):
""" Verify the Client's ``case_type`` method throws an exception if unmatched id """
client.api.case_types.return_value = [CT1, CT2, CT3]
with pytest.raises(TRAWClientError) as exc:
client.case_type(334)
assert 'id of 334' in str(exc)
assert client.api.case_types.called
def test_case_types(client):
""" Verify the Client's ``case_types`` method call """
client.api.case_types.return_value = [CT1, CT2, CT3]
ct_gen = client.case_types()
ct1 = next(ct_gen)
assert isinstance(ct1, models.CaseType)
assert ct1.name == 'casetype1'
ct2 = next(ct_gen)
assert isinstance(ct2, models.CaseType)
assert ct2.name == 'casetype2'
ct3 = next(ct_gen)
assert isinstance(ct3, models.CaseType)
assert ct3.name == 'casetype3'
assert client.api.case_types.call_args == mock.call()
def test_config(client):
""" Verify config method returns a new models.Config instance if
called without any parameters
"""
config = client.config()
assert isinstance(config, models.Config)
assert config.name is None
assert config.id is None
assert config.config_group is None
def test_config_by_project(client):
""" Verify calling ``client.config(project, 456)`` with a project with ID
of 123 and config ID of 456 returns a config object
"""
PROJECT_ID = 15
PROJECT = models.Project(client, {'id': PROJECT_ID})
CONFIG_ID = 4
client.api.config_groups.return_value = [CG1, CG2, CG3]
client.api.project_by_id.return_value = {'id': PROJECT_ID}
config = client.config(PROJECT, CONFIG_ID)
assert isinstance(config, models.Config)
assert config.id == CONFIG_ID
assert config.id == CONF4['id']
assert config.name == CONF4['name']
assert config.project.id == PROJECT_ID
client.api.config_groups.assert_called_once_with(PROJECT_ID)
def test_config_by_project_id(client):
""" Verify calling ``client.config(123, 456)`` with a project ID of 123
and config ID of 456 returns a config object
"""
PROJECT_ID = 15
CONFIG_ID = 4
client.api.config_groups.return_value = [CG1, CG2, CG3]
client.api.project_by_id.return_value = {'id': PROJECT_ID}
config = client.config(PROJECT_ID, CONFIG_ID)
assert isinstance(config, models.Config)
assert config.id == CONFIG_ID
assert config.id == CONF4['id']
assert config.name == CONF4['name']
assert config.project.id == PROJECT_ID
client.api.config_groups.assert_called_once_with(PROJECT_ID)
def test_config_by_project_exc(client):
""" Verify calling ``client.config(project, 456)`` raises an exception
if nothing matching the params is found
"""
PROJECT_ID = 15
PROJECT = models.Project(client, {'id': PROJECT_ID})
CONFIG_ID = 444
client.api.config_groups.return_value = [CG1, CG2, CG3]
client.api.project_by_id.return_value = {'id': PROJECT_ID}
with pytest.raises(TRAWClientError) as exc:
client.config(PROJECT, CONFIG_ID)
assert 'with id of 444' in str(exc)
client.api.config_groups.assert_called_once_with(PROJECT_ID)
def test_config_group(client):
""" Verify config group method returns a new models.ConfigGroup instance if
called without any parameters
"""
cg = client.config_group()
assert isinstance(cg, models.ConfigGroup)
assert cg.name is None
assert cg.id is None
assert cg.project is None
assert list(cg.configs) == list()
def test_config_group_by_project(client):
""" Verify calling ``client.config_group(123)`` with an ID returns
a config group object
"""
PROJECT_ID = 15
PROJECT = models.Project(client, {'id': PROJECT_ID})
CONFIG_GROUP_ID = 662
client.api.config_groups.return_value = [CG1, CG2, CG3]
client.api.project_by_id.return_value = {'id': PROJECT_ID}
config_group = client.config_group(PROJECT, CONFIG_GROUP_ID)
assert isinstance(config_group, models.ConfigGroup)
assert config_group.id == CONFIG_GROUP_ID
assert config_group.name == 'configgroup2'
assert config_group.project.id == PROJECT_ID
client.api.config_groups.assert_called_once_with(PROJECT_ID)
def test_config_group_by_project_id(client):
""" Verify calling ``client.config_group(123)`` with an ID returns
a config group object
"""
PROJECT_ID = 15
CONFIG_GROUP_ID = 662
client.api.config_groups.return_value = [CG1, CG2, CG3]
client.api.project_by_id.return_value = {'id': PROJECT_ID}
config_group = client.config_group(PROJECT_ID, CONFIG_GROUP_ID)
assert isinstance(config_group, models.ConfigGroup)
assert config_group.id == CONFIG_GROUP_ID
assert config_group.name == 'configgroup2'
assert config_group.project.id == PROJECT_ID
client.api.config_groups.assert_called_once_with(PROJECT_ID)
def test_config_group_exc(client):
""" Verify calling ``client.config_group(123)`` with an ID throws an
exception if called with an recognized Config Group ID
"""
PROJECT_ID = 15
CONFIG_GROUP_ID = 666
client.api.config_groups.return_value = [CG1, CG2, CG3]
client.api.project_by_id.return_value = {'id': PROJECT_ID}
with pytest.raises(TRAWClientError) as exc:
client.config_group(PROJECT_ID, CONFIG_GROUP_ID)
assert "models.ConfigGroup" in str(exc)
assert "id of 666" in str(exc)
client.api.config_groups.assert_called_once_with(PROJECT_ID)
def test_config_groups_by_project(client):
""" Verify calling ``client.config_groups(project)`` with an ID returns
a config group generator
"""
PROJECT_ID = 15
PROJECT = models.Project(client, {'id': PROJECT_ID})
client.api.config_groups.return_value = [CG1, CG2, CG3]
config_groups = client.config_groups(PROJECT)
cg1 = next(config_groups)
assert isinstance(cg1, models.ConfigGroup)
assert cg1.id == 661
cg2 = next(config_groups)
assert isinstance(cg2, models.ConfigGroup)
assert cg2.id == 662
cg3 = next(config_groups)
assert isinstance(cg3, models.ConfigGroup)
assert cg3.id == 663
client.api.config_groups.assert_called_once_with(PROJECT_ID)
def test_config_groups_by_project_id(client):
""" Verify calling ``client.config_groups(123)`` with an ID returns
a config group generator
"""
client.api.config_groups.return_value = [CG1, CG2, CG3]
config_groups = client.config_groups(1234)
cg1 = next(config_groups)
assert isinstance(cg1, models.ConfigGroup)
assert cg1.id == 661
cg2 = next(config_groups)
assert isinstance(cg2, models.ConfigGroup)
assert cg2.id == 662
cg3 = next(config_groups)
assert isinstance(cg3, models.ConfigGroup)
assert cg3.id == 663
client.api.config_groups.assert_called_once_with(1234)
def test_config_groups(client):
""" Verify an exception is thrown if config_groups is called with no
parameters
"""
with pytest.raises(NotImplementedError) as exc:
client.config_groups()
assert 'models.Project or int' in str(exc)
def test_milestone(client):
""" Verify calling ``client.milestone()`` with no args returns an empty Milestone """
milestone = client.milestone()
assert isinstance(milestone, models.Milestone)
assert milestone._content == dict()
def test_milestone_by_id(client):
""" Verify calling ``client.milestone(123)`` with an ID returns that milestone """
client.api.milestone_by_id.return_value = {'id': 1234}
milestone = client.milestone(1234)
assert isinstance(milestone, models.Milestone)
assert milestone.id == 1234
client.api.milestone_by_id.assert_called_once_with(1234)
def test_milestones_exception(client):
""" Verify an exception is thrown if milestones is called with no parameters """
with pytest.raises(NotImplementedError) as exc:
client.milestones()
assert 'models.Project or int' in str(exc)
def test_milestones_by_project_w_defaults(client):
""" Verify milestones method returns milestones if called with
a models.Project object
"""
PROJECT_ID = 15
PROJECT = models.Project(client, {'id': PROJECT_ID})
client.api.milestones.return_value = [MILE1, MILE2, MILE3]
mile_gen = client.milestones(PROJECT)
mile1 = next(mile_gen)
assert isinstance(mile1, models.Milestone)
assert mile1.name == 'milestone1'
mile2 = next(mile_gen)
assert isinstance(mile2, models.Milestone)
assert mile2.name == 'milestone2'
mile3 = next(mile_gen)
assert isinstance(mile3, models.Milestone)
assert mile3.name == 'milestone3'
assert client.api.milestones.call_args == mock.call(PROJECT.id, None, None)
def test_milestones_by_project_w_params(client):
""" Verify milestones method returns milestones if called with
a models.Project object and method parameters
"""
PROJECT_ID = 15
PROJECT = models.Project(client, {'id': PROJECT_ID})
client.api.milestones.return_value = [MILE1, MILE2, MILE3]
mile_gen = client.milestones(PROJECT, is_completed=False, is_started=True)
mile1 = next(mile_gen)
assert isinstance(mile1, models.Milestone)
assert mile1.name == 'milestone1'
mile2 = next(mile_gen)
assert isinstance(mile2, models.Milestone)
assert mile2.name == 'milestone2'
mile3 = next(mile_gen)
assert isinstance(mile3, models.Milestone)
assert mile3.name == 'milestone3'
assert client.api.milestones.call_args == mock.call(PROJECT.id, False, True)
def test_milestones_by_project_id_w_defaults(client):
""" Verify milestones method returns milestones if called with
an project ID (an int)
"""
PROJECT_ID = 15
client.api.milestones.return_value = [MILE1, MILE2, MILE3]
mile_gen = client.milestones(PROJECT_ID)
mile1 = next(mile_gen)
assert isinstance(mile1, models.Milestone)
assert mile1.name == 'milestone1'
mile2 = next(mile_gen)
assert isinstance(mile2, models.Milestone)
assert mile2.name == 'milestone2'
mile3 = next(mile_gen)
assert isinstance(mile3, models.Milestone)
assert mile3.name == 'milestone3'
assert client.api.milestones.call_args == mock.call(PROJECT_ID, None, None)
def test_milestones_by_project_id_w_params(client):
""" Verify milestones method returns milestones if called with
an project ID (an int)
"""
PROJECT_ID = 15
client.api.milestones.return_value = [MILE1, MILE2, MILE3]
mile_gen = client.milestones(PROJECT_ID, True, False)
mile1 = next(mile_gen)
assert isinstance(mile1, models.Milestone)
assert mile1.name == 'milestone1'
mile2 = next(mile_gen)
assert isinstance(mile2, models.Milestone)
assert mile2.name == 'milestone2'
mile3 = next(mile_gen)
assert isinstance(mile3, models.Milestone)
assert mile3.name == 'milestone3'
assert client.api.milestones.call_args == mock.call(PROJECT_ID, True, False)
def test_milestones_by_project_id_is_completed_exception(client):
""" Verify milestones raises an exception if is_completed is the wrong type """
with pytest.raises(TypeError) as exc:
next(client.milestones(15, 1234, False))
assert '1234' in str(exc)
assert 'is_completed' in str(exc)
def test_milestones_by_project_id_is_started_exception(client):
""" Verify milestones raises an exception if is_started is the wrong type """
with pytest.raises(TypeError) as exc:
next(client.milestones(15, False, 'asdf'))
assert 'asdf' in str(exc)
assert 'is_started' in str(exc)
def test_milestones_by_project_is_completed_exception(client):
""" Verify milestones raises an exception if is_completed is the wrong type """
with pytest.raises(TypeError) as exc:
next(client.milestones(models.Project(client, {'id': 15}), 1234, False))
assert '1234' in str(exc)
assert 'is_completed' in str(exc)
def test_milestones_by_project_is_started_exception(client):
""" Verify milestones raises an exception if is_started is the wrong type """
with pytest.raises(TypeError) as exc:
next(client.milestones(models.Project(client, {'id': 15}), False, 'asdf'))
assert 'asdf' in str(exc)
assert 'is_started' in str(exc)
def test_plan(client):
""" Verify plan method returns a new models.Plan instance if called without
any parameters
"""
plan = client.plan()
assert isinstance(plan, models.Plan)
# TODO: Complete when Run is more than a stub
# assert proj.announcement is None
# assert proj.completed_on is None
# assert proj.is_completed is False
# assert proj.show_announcement is False
# assert proj.suite_mode is None
# assert proj.url is None
def test_plan_by_id(client):
""" Verify calling ``client.plan(123)`` with an ID returns that plan """
client.api.plan_by_id.return_value = {'id': 1234}
plan = client.plan(1234)
assert isinstance(plan, models.Plan)
assert plan.id == 1234
client.api.plan_by_id.assert_called_once_with(1234)
def test_priority_exc(client):
""" Verify the Client's ``priority`` method throws an exception if called """
with pytest.raises(NotImplementedError) as exc:
client.priority()
assert 'You must pass in int object' in str(exc)
assert not client.api.priorities.called
def test_priority_by_int(client):
""" Verify the Client's ``priority`` method call with int"""
client.api.priorities.return_value = [PRIO1, PRIO2, PRIO3]
priority = client.priority(112)
assert isinstance(priority, models.Priority)
assert priority.id == 112
assert client.api.priorities.called
def test_priority_by_int_exc(client):
""" Verify the Client's ``priority`` method throws an exception if unmatched id """
client.api.priorities.return_value = [PRIO1, PRIO2, PRIO3]
with pytest.raises(TRAWClientError) as exc:
client.priority(114)
assert 'id of 114' in str(exc)
assert client.api.priorities.called
def test_priorities(client):
""" Verify the Client's ``priorities`` method call """
client.api.priorities.return_value = (p for p in [PRIO1, PRIO2, PRIO3])
prio_gen = client.priorities()
prio1 = next(prio_gen)
assert isinstance(prio1, models.Priority)
assert prio1.name == 'priority1'
prio2 = next(prio_gen)
assert isinstance(prio2, models.Priority)
assert prio2.name == 'priority2'
prio3 = next(prio_gen)
assert isinstance(prio3, models.Priority)
assert prio3.name == 'priority3'
assert client.api.priorities.call_args == mock.call()
def test_project(client):
""" Verify project method returns a new project instance if called without
any parameters
"""
proj = client.project()
assert isinstance(proj, models.Project)
assert proj.announcement is None
assert proj.completed_on is None
assert proj.is_completed is False
assert proj.show_announcement is False
assert proj.suite_mode is None
assert proj.url is None
def test_project_by_id(client):
""" Verify project method returns a specific project instance if called with
an int
"""
PROJ_1234 = {"announcement": "mock announcement",
"completed_on": None,
"id": 1234,
"is_completed": False,
"name": "Project 1234",
"show_announcement": False,
"url": "http://<server>/index.php?/projects/overview/1234",
"suite_mode": 1}
PROJ_ID = 1234
client.api.project_by_id.return_value = PROJ_1234
proj = client.project(PROJ_ID)
assert isinstance(proj, models.Project)
assert proj.id == PROJ_ID
client.api.project_by_id.assert_called_once_with(PROJ_ID)
def test_projects_exception(client):
""" Verify ``projects`` throws an exception if all args are True """
with pytest.raises(TypeError) as exc:
next(client.projects(active_only=True, completed_only=True))
assert 'but not both' in str(exc)
def test_projects(client):
""" Verify the Client's ``projects`` method call """
client.api.projects.side_effect = [[PROJ1], [PROJ2], [PROJ3]]
project1 = next(client.projects())
assert isinstance(project1, models.Project)
assert project1.name == 'project1'
assert client.api.projects.call_args == mock.call(None)
project2 = next(client.projects(active_only=True))
assert isinstance(project2, models.Project)
assert project2.name == 'project2'
assert client.api.projects.call_args == mock.call(0)
project3 = next(client.projects(completed_only=True))
assert isinstance(project3, models.Project)
assert project3.name == 'project3'
assert client.api.projects.call_args == mock.call(1)
def test_result(client):
""" Verify result method returns a new models.Result instance if called without
any parameters
"""
result = client.result()
assert isinstance(result, models.Result)
assert result.assigned_to is None
assert result.comment is None
assert result.created_by is None
assert result.created_on is None
assert list(result.defects) == list()
assert result.elapsed is None
assert result.status is None
assert result.test is None
assert result.version is None
def test_results_exc(client):
""" Verify the Client's ``tests`` method throws an exception if called """
with pytest.raises(NotImplementedError) as exc:
client.results()
assert 'You must pass in models.Test or int object' in str(exc)
assert not client.api.results_by_test_id.called
def test_results_by_run(client):
""" Verify calling ``client.results(123)`` with an ID returns result
generator
"""
client.api.results_by_run_id.return_value = [RESU1, RESU2, RESU3]
results = client.results(models.Run(client, {'id': 1234}))
result1 = next(results)
assert isinstance(result1, models.Result)
assert result1.id == 771
result2 = next(results)
assert isinstance(result2, models.Result)
assert result2.id == 772
result3 = next(results)
assert isinstance(result3, models.Result)
assert result3.id == 773
client.api.results_by_run_id.assert_called_once_with(1234)
def test_results_by_run_id(client):
""" Verify calling ``client.results(123, obj_type=models.Run)`` with
an ID returns result generator
"""
client.api.results_by_run_id.return_value = [RESU1, RESU2]
results = client.results(1234, obj_type=models.Run, limit=2)
result1 = next(results)
assert isinstance(result1, models.Result)
assert result1.id == 771
result2 = next(results)
assert isinstance(result2, models.Result)
assert result2.id == 772
client.api.results_by_run_id.assert_called_once_with(1234, limit=2)
def test_results_by_run_id_exc_1(client):
""" Verify calling ``client.results(123, obj_type='asdf')`` throws
an exception
"""
with pytest.raises(TypeError) as exc:
next(client.results(1234, obj_type='asdf'))
assert str(models.Run) in str(exc)
assert str(models.Test) in str(exc)
assert 'asdf' in str(exc)
def test_results_by_test_id(client):
""" Verify calling ``client.results(123)`` with an ID returns result
generator
"""
client.api.results_by_test_id.return_value = [RESU1, RESU2]
results = client.results(1234, limit=2)
result1 = next(results)
assert isinstance(result1, models.Result)
assert result1.id == 771
result2 = next(results)
assert isinstance(result2, models.Result)
assert result2.id == 772
client.api.results_by_test_id.assert_called_once_with(1234, limit=2)
def test_results_by_test_id_with_status(client):
""" Verify calling ``client.results(123)`` with a status returns result
generator
"""
client.api.results_by_test_id.return_value = [RESU1, ]
status = models.Status(client, {'id': 234})
results = client.results(1234, with_status=status)
result1 = next(results)
assert isinstance(result1, models.Result)
assert result1.id == 771
client.api.results_by_test_id.assert_called_once_with(1234, status_id='234')
def test_results_by_test_id_with_int_status(client):
""" Verify calling ``client.results(123)`` with an int status returns result
generator
"""
client.api.results_by_test_id.return_value = [RESU1, ]
results = client.results(1234, with_status=111)
result1 = next(results)
assert isinstance(result1, models.Result)
assert result1.id == 771
client.api.results_by_test_id.assert_called_once_with(1234, status_id='111')
def test_results_by_test_id_with_2_status(client):
""" Verify calling ``client.results(123)`` with an ID returns result
generator
"""
client.api.results_by_test_id.return_value = [RESU1, ]
status1 = models.Status(client, {'id': 234})
status2 = models.Status(client, {'id': 345})
results = client.results(1234, with_status=(status1, status2))
result1 = next(results)
assert isinstance(result1, models.Result)
assert result1.id == 771
client.api.results_by_test_id.assert_called_once_with(1234, status_id='234,345')
def test_results_by_test_id_with_2_status_ids(client):
""" Verify calling ``client.results(123)`` with status IDs returns result
generator
"""
client.api.results_by_test_id.return_value = [RESU1, ]
results = client.results(1234, with_status=(234, 345))
result1 = next(results)
assert isinstance(result1, models.Result)
assert result1.id == 771
client.api.results_by_test_id.assert_called_once_with(1234, status_id='234,345')
def test_results_by_test_id_exc_1(client):
""" Verify calling ``client.results(123, status)`` throws an exception """
with pytest.raises(TypeError) as exc:
next(client.results(1234, with_status=111.11))
assert str(None) in str(exc)
assert str(int) in str(exc)
assert str(models.Status) in str(exc)
assert 'with_status' in str(exc)
assert str(111.11) in str(exc)
assert not client.api.results_by_test_id.called
def test_results_by_test_id_exc_2(client):
""" Verify calling ``client.results(123, status)`` throws an exception """
status1 = models.Status(client, {'id': 234})
status2 = models.Status(client, {'id': 345})
with pytest.raises(TypeError) as exc:
next(client.results(1234, with_status=(status1, 'asdf', status2)))
assert str(None) in str(exc)
assert str(int) in str(exc)
assert str(models.Status) in str(exc)
assert 'with_status' in str(exc)
assert 'asdf' in str(exc)
assert not client.api.results_by_test_id.called
def test_results_by_test(client):
""" Verify calling ``client.results(123)`` with an ID returns result
generator
"""
client.api.results_by_test_id.return_value = [RESU1, RESU2, RESU3]
results = client.results(models.Test(client, {'id': 1234}))
result1 = next(results)
assert isinstance(result1, models.Result)
assert result1.id == 771
result2 = next(results)
assert isinstance(result2, models.Result)
assert result2.id == 772
result3 = next(results)
assert isinstance(result3, models.Result)
assert result3.id == 773
client.api.results_by_test_id.assert_called_once_with(1234)
def test_results_by_test_with_status(client):
""" Verify calling ``client.results(123)`` with an ID returns result
generator
"""
client.api.results_by_test_id.return_value = [RESU1, ]
status = models.Status(client, {'id': 234})
results = client.results(models.Test(client, {'id': 1234}), with_status=status)
result1 = next(results)
assert isinstance(result1, models.Result)
assert result1.id == 771
client.api.results_by_test_id.assert_called_once_with(1234, status_id='234')
def test_results_by_test_with_2_status(client):
""" Verify calling ``client.results(123)`` with an ID returns result
generator
"""
client.api.results_by_test_id.return_value = [RESU1, ]
status1 = models.Status(client, {'id': 234})
status2 = models.Status(client, {'id': 345})
results = client.results(models.Test(client, {'id': 1234}), with_status=(status1, status2))
result1 = next(results)
assert isinstance(result1, models.Result)
assert result1.id == 771
client.api.results_by_test_id.assert_called_once_with(1234, status_id='234,345')
def test_run(client):
""" Verify run method returns a new models.Run instance if called without
any parameters
"""
run = client.run()
assert isinstance(run, models.Run)
assert run.assigned_to is None
assert run.blocked_count is None
assert list(run.cases) == list()
assert run.completed_on is None
assert list(run.configs) == list()
assert run.created_by is None
assert run.created_on is None
assert run.description is None
assert run.failed_count is None
assert run.include_all is True
assert run.is_completed is False
assert run.milestone is None
assert run.name is None
assert run.passed_count is None
assert run.plan is None
assert run.project is None
assert run.retest_count is None
assert run.suite is None
assert run.untested_count is None
assert run.url is None
def test_run_by_id(client):
""" Verify calling ``client.run(123)`` with an ID returns that run """
client.api.run_by_id.return_value = {'id': 1234}
run = client.run(1234)
assert isinstance(run, models.Run)
assert run.id == 1234
client.api.run_by_id.assert_called_once_with(1234)
def test_runs_exc(client):
""" Verify the Client's ``runs`` method throws an exception if called """
with pytest.raises(NotImplementedError) as exc:
client.runs()
assert 'You must pass in models.Project or int object' in str(exc)
assert not client.api.runs_by_project_id.called
def test_runs_by_project_id(client):
""" Verify calling ``client.runs(123)`` with an ID returns suite generator """
client.api.runs_by_project_id.return_value = [RUN1, RUN2, RUN3]
runs = client.runs(1234)
run1 = next(runs)
assert isinstance(run1, models.Run)
assert run1.id == 881
run2 = next(runs)
assert isinstance(run2, models.Run)
assert run2.id == 882
run3 = next(runs)
assert isinstance(run3, models.Run)
assert run3.id == 883
client.api.runs_by_project_id.assert_called_once_with(1234)
def test_runs_by_project(client):
""" Verify calling ``client.runs(Project)`` with an ID returns run generator """
client.api.runs_by_project_id.return_value = [RUN1, RUN2, RUN3]
runs = client.runs(models.Project(client, {'id': 1234}))
run1 = next(runs)
assert isinstance(run1, models.Run)
assert run1.id == 881
run2 = next(runs)
assert isinstance(run2, models.Run)
assert run2.id == 882
run3 = next(runs)
assert isinstance(run3, models.Run)
assert run3.id == 883
client.api.runs_by_project_id.assert_called_once_with(1234)
def test_runs_by_project_w_limit(client):
""" Verify calling ``client.runs(Project)`` with a limit """
list(client.runs(models.Project(client, {'id': 1234}), limit=2))
client.api.runs_by_project_id.assert_called_once_with(1234, limit=2)
def test_runs_by_project_w_int_created_after(client):
""" Verify calling ``client.runs(Project)`` with created_after """
list(client.runs(models.Project(client, {'id': 1234}), created_after=1112))
client.api.runs_by_project_id.assert_called_once_with(1234, created_after=1112)
def test_runs_by_project_w_float_created_after(client):
""" Verify calling ``client.runs(Project)`` with created_after """
list(client.runs(models.Project(client, {'id': 1234}), created_after=11.12))
client.api.runs_by_project_id.assert_called_once_with(1234, created_after=11)
def test_runs_by_project_w_datetime_created_after(client):
""" Verify calling ``client.runs(Project)`` with created_after """
ca_dt = dt.fromtimestamp(33.22)
list(client.runs(models.Project(client, {'id': 1234}), created_after=ca_dt))
client.api.runs_by_project_id.assert_called_once_with(1234, created_after=33)
def test_runs_by_project_w_created_after_exc(client):
""" Verify calling ``client.runs(Project)`` with created_after exception """
with pytest.raises(TypeError) as exc:
list(client.runs(
models.Project(client, {'id': 1234}), created_after='asdf'))
assert 'created/updated after/before' in str(exc)
assert 'asdf' in str(exc)
assert not client.api.runs_by_project_id.called
def test_runs_by_project_w_int_created_before(client):
""" Verify calling ``client.runs(Project)`` with created_before """
list(client.runs(models.Project(client, {'id': 1234}), created_before=1112))
client.api.runs_by_project_id.assert_called_once_with(1234, created_before=1112)
def test_runs_by_project_w_float_created_before(client):
""" Verify calling ``client.runs(Project)`` with created_before """
list(client.runs(models.Project(client, {'id': 1234}), created_before=11.12))
client.api.runs_by_project_id.assert_called_once_with(1234, created_before=11)
def test_runs_by_project_w_datetime_created_before(client):
""" Verify calling ``client.runs(Project)`` with created_before """
ca_dt = dt.fromtimestamp(33.22)
list(client.runs(models.Project(client, {'id': 1234}), created_before=ca_dt))
client.api.runs_by_project_id.assert_called_once_with(1234, created_before=33)
def test_runs_by_project_w_created_before_exc(client):
""" Verify calling ``client.runs(Project)`` with created_before exception """
with pytest.raises(TypeError) as exc:
list(client.runs(
models.Project(client, {'id': 1234}), created_before='asdf'))
assert 'created/updated after/before' in str(exc)
assert 'asdf' in str(exc)
assert not client.api.runs_by_project_id.called
def test_runs_by_project_w_created_by_user(client):
""" Verify calling ``client.runs(Project)`` with created_by """
user = models.User(client, {'id': 11})
list(client.runs(models.Project(client, {'id': 1234}), created_by=user))
client.api.runs_by_project_id.assert_called_once_with(1234, created_by='11')
def test_runs_by_project_w_created_by_user_list(client):
""" Verify calling ``client.runs(Project)`` with created_by """
users = [models.User(client, {'id': 11}), models.User(client, {'id': 12})]
list(client.runs(models.Project(client, {'id': 1234}), created_by=users))
client.api.runs_by_project_id.assert_called_once_with(1234, created_by='11,12')
def test_runs_by_project_w_created_by_user_id(client):
""" Verify calling ``client.runs(Project)`` with created_by """
list(client.runs(models.Project(client, {'id': 1234}), created_by=11))
client.api.runs_by_project_id.assert_called_once_with(1234, created_by='11')
def test_runs_by_project_w_created_by_user_id_list(client):
""" Verify calling ``client.runs(Project)`` with created_by """
list(client.runs(models.Project(client, {'id': 1234}), created_by=[11, 12]))
client.api.runs_by_project_id.assert_called_once_with(1234, created_by='11,12')
def test_runs_by_project_w_created_by_exc(client):
""" Verify calling ``client.runs(Project)`` with created_by exception """
with pytest.raises(TypeError) as exc:
list(client.runs(models.Project(client, {'id': 1234}), created_by='asdf'))
assert 'asdf' in str(exc)
assert str(models.User) in str(exc)
assert str(int) in str(exc)
def test_runs_by_project_id_w_created_by_user(client):
""" Verify calling ``client.runs(1234)`` with created_by """
user = models.User(client, {'id': 11})
list(client.runs(1234, created_by=user))
client.api.runs_by_project_id.assert_called_once_with(1234, created_by='11')
def test_runs_by_project_id_w_created_by_user_id(client):
""" Verify calling ``client.runs(1234)`` with created_by """
list(client.runs(1234, created_by=11))
client.api.runs_by_project_id.assert_called_once_with(1234, created_by='11')
def test_runs_by_project_id_w_created_by_exc(client):
""" Verify calling ``client.runs(1234)`` with created_by exception """
with pytest.raises(TypeError) as exc:
list(client.runs(1234, created_by='asdf'))
assert 'asdf' in str(exc)
assert str(models.User) in str(exc)
assert str(int) in str(exc)
def test_runs_by_project_w_is_completed_true(client):
""" Verify calling ``client.runs(Project)`` with is_completed """
list(client.runs(models.Project(client, {'id': 1234}), is_completed=True))
client.api.runs_by_project_id.assert_called_once_with(1234, is_completed=1)
def test_runs_by_project_w_is_copmleted_false(client):
""" Verify calling ``client.runs(Project)`` with is_completed """
list(client.runs(models.Project(client, {'id': 1234}), is_completed=False))
client.api.runs_by_project_id.assert_called_once_with(1234, is_completed=0)
def test_runs_by_project_w_is_completed_exc(client):
""" Verify calling ``client.runs(Project)`` with is_completed exception """
with pytest.raises(TypeError) as exc:
list(client.runs(models.Project(client, {'id': 1234}), is_completed='asdf'))
assert 'asdf' in str(exc)
assert 'None, True, or False' in str(exc)
def test_runs_by_project_id_w_is_completed_true(client):
""" Verify calling ``client.runs(1234)`` with is_completed """
list(client.runs(1234, is_completed=True))
client.api.runs_by_project_id.assert_called_once_with(1234, is_completed=1)
def test_runs_by_project_id_w_is_completed_false(client):
""" Verify calling ``client.runs(1234)`` with is_completed """
list(client.runs(1234, is_completed=False))
client.api.runs_by_project_id.assert_called_once_with(1234, is_completed=0)
def test_runs_by_project_id_w_is_complete_exc(client):
""" Verify calling ``client.runs(1234)`` with is_completed exception """
with pytest.raises(TypeError) as exc:
list(client.runs(1234, is_completed='asdf'))
assert 'asdf' in str(exc)
assert 'None, True, or False' in str(exc)
def test_runs_by_project_w_milestone(client):
""" Verify calling ``client.runs(Project)`` with milestone """
ms = models.Milestone(client, {'id': 22})
list(client.runs(models.Project(client, {'id': 1234}), milestone=ms))
client.api.runs_by_project_id.assert_called_once_with(1234, milestone_id='22')
def test_runs_by_project_w_milestone_id(client):
""" Verify calling ``client.runs(Project)`` with milestone """
list(client.runs(models.Project(client, {'id': 1234}), milestone=22))
client.api.runs_by_project_id.assert_called_once_with(1234, milestone_id='22')
def test_runs_by_project_w_milestone_exc(client):
""" Verify calling ``client.runs(Project)`` with milestone exception """
with pytest.raises(TypeError) as exc:
list(client.runs(models.Project(client, {'id': 1234}), milestone='asdf'))
assert 'asdf' in str(exc)
assert str(models.Milestone) in str(exc)
assert str(models.SubMilestone) in str(exc)
assert str(int) in str(exc)
def test_runs_by_project_id_w_milestone(client):
""" Verify calling ``client.runs(1234)`` with milestone """
ms = models.Milestone(client, {'id': 22})
list(client.runs(1234, milestone=ms))
client.api.runs_by_project_id.assert_called_once_with(1234, milestone_id='22')
def test_runs_by_project_id_w_milestone_id(client):
""" Verify calling ``client.runs(1234)`` with milestone """
list(client.runs(1234, milestone=22))
client.api.runs_by_project_id.assert_called_once_with(1234, milestone_id='22')
def test_runs_by_project_id_w_milestone_exc(client):
""" Verify calling ``client.runs(1234)`` with milestone exception """
with pytest.raises(TypeError) as exc:
list(client.runs(1234, milestone='asdf'))
assert 'asdf' in str(exc)
assert str(models.Milestone) in str(exc)
assert str(models.SubMilestone) in str(exc)
assert str(int) in str(exc)
def test_runs_by_project_w_suite(client):
""" Verify calling ``client.runs(Project)`` with suite """
suite = models.Suite(client, {'id': 22})
list(client.runs(models.Project(client, {'id': 1234}), suite=suite))
client.api.runs_by_project_id.assert_called_once_with(1234, suite_id='22')
def test_runs_by_project_w_suite_id(client):
""" Verify calling ``client.runs(Project)`` with suite """
list(client.runs(models.Project(client, {'id': 1234}), suite=22))
client.api.runs_by_project_id.assert_called_once_with(1234, suite_id='22')
def test_runs_by_project_w_suite_exc(client):
""" Verify calling ``client.runs(Project)`` with suite exception """
with pytest.raises(TypeError) as exc:
list(client.runs(models.Project(client, {'id': 1234}), suite='asdf'))
assert 'asdf' in str(exc)
assert str(models.Suite) in str(exc)
assert str(int) in str(exc)
def test_runs_by_project_id_w_suite(client):
""" Verify calling ``client.runs(1234)`` with suite """
suite = models.Suite(client, {'id': 22})
list(client.runs(1234, suite=suite))
client.api.runs_by_project_id.assert_called_once_with(1234, suite_id='22')
def test_runs_by_project_id_w_suite_id(client):
""" Verify calling ``client.runs(1234)`` with suite """
list(client.runs(1234, suite=22))
client.api.runs_by_project_id.assert_called_once_with(1234, suite_id='22')
def test_runs_by_project_id_w_suite_exc(client):
""" Verify calling ``client.runs(1234)`` with suite exception """
with pytest.raises(TypeError) as exc:
list(client.runs(1234, suite='asdf'))
assert 'asdf' in str(exc)
assert str(models.Suite) in str(exc)
assert str(int) in str(exc)
def test_section(client):
""" Verify the Client's ``section`` method returns models.Section object """
section = client.section()
assert isinstance(section, models.Section)
assert section.depth is None
assert section.description is None
assert section.display_order is None
assert section.name is None
assert section.parent is None
assert section.project is None
assert section.suite is None
def test_section_by_id(client):
""" Verify calling ``client.section(123)`` with an ID returns that section """
client.api.section_by_id.return_value = {'id': 1234}
section = client.section(1234)
assert isinstance(section, models.Section)
assert section.id == 1234
client.api.section_by_id.assert_called_once_with(1234)
def test_sections_exc(client):
""" Verify the Client's ``sections`` method throws an exception if called """
with pytest.raises(NotImplementedError) as exc:
client.sections()
assert 'You must pass in models.Project or int object' in str(exc)
assert not client.api.sections_by_project_id.called
def test_sections_by_project_id(client):
""" Verify calling ``client.sections(123)`` with an ID returns section
generator
"""
PROJECT_ID = 15
client.api.project_by_id.return_value = {'id': PROJECT_ID, 'suite_mode': 1}
client.api.sections_by_project_id.return_value = [SECT1, SECT2, SECT3]
sections = client.sections(PROJECT_ID)
section1 = next(sections)
assert isinstance(section1, models.Section)
assert section1.id == 991
section2 = next(sections)
assert isinstance(section2, models.Section)
assert section2.id == 992
section3 = next(sections)
assert isinstance(section3, models.Section)
assert section3.id == 993
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
client.api.sections_by_project_id.assert_called_once_with(PROJECT_ID, None)
def test_sections_by_project(client):
""" Verify calling ``client.sections(Project)`` with an ID returns
section generator
"""
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 1}
client.api.project_by_id.return_value = PROJECT_DICT
client.api.sections_by_project_id.return_value = [SECT1, SECT2, SECT3]
sections = client.sections(models.Project(client, PROJECT_DICT))
section1 = next(sections)
assert isinstance(section1, models.Section)
assert section1.id == 991
section2 = next(sections)
assert isinstance(section2, models.Section)
assert section2.id == 992
section3 = next(sections)
assert isinstance(section3, models.Section)
assert section3.id == 993
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
client.api.sections_by_project_id.assert_called_once_with(PROJECT_ID, None)
def test_sections_by_project_and_suite(client):
""" Verify calling ``client.sections(Project, Suite)`` returns
section generator
"""
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 2}
SUITE_ID = 16
SUITE_DICT = {'id': SUITE_ID}
client.api.project_by_id.return_value = PROJECT_DICT
client.api.sections_by_project_id.return_value = [SECT1, ]
project = models.Project(client, PROJECT_DICT)
suite = models.Suite(client, SUITE_DICT)
sections = client.sections(project, suite)
section1 = next(sections)
assert isinstance(section1, models.Section)
assert section1.id == 991
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
client.api.sections_by_project_id.assert_called_once_with(PROJECT_ID, SUITE_ID)
def test_sections_by_project_and_suite_id(client):
""" Verify calling ``client.sections(Project, 15)`` returns
section generator
"""
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 2}
SUITE_ID = 16
client.api.project_by_id.return_value = PROJECT_DICT
client.api.sections_by_project_id.return_value = [SECT1, ]
project = models.Project(client, PROJECT_DICT)
sections = client.sections(project, SUITE_ID)
section1 = next(sections)
assert isinstance(section1, models.Section)
assert section1.id == 991
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
client.api.sections_by_project_id.assert_called_once_with(PROJECT_ID, SUITE_ID)
def test_sections_by_project_exc_1(client):
""" Verify calling ``client.sections(Project)`` when the project is a
suite_mode of 2 raises an exception
"""
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 2}
client.api.project_by_id.return_value = PROJECT_DICT
client.api.sections_by_project_id.return_value = [SECT1, ]
with pytest.raises(TypeError) as exc:
list(client.sections(models.Project(client, PROJECT_DICT)))
assert 'suite_mode of 2' in str(exc)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
assert not client.api.sections_by_project_id.called
def test_sections_by_project_exc_2(client):
""" Verify calling ``client.sections(Project, 'asdf')`` when the project
is a suite_mode of 2 raises an exception
"""
PROJECT_ID = 15
PROJECT_DICT = {'id': PROJECT_ID, 'suite_mode': 2}
client.api.project_by_id.return_value = PROJECT_DICT
client.api.sections_by_project_id.return_value = [SECT1, ]
with pytest.raises(TypeError) as exc:
list(client.sections(models.Project(client, PROJECT_DICT), 'asdf'))
assert 'models.Suite' in str(exc)
assert 'int ID of a suite in testrail' in str(exc)
client.api.project_by_id.assert_called_once_with(PROJECT_ID)
assert not client.api.sections_by_project_id.called
def test_custom_status_exc(client):
""" Verify the Client's ``custom_status`` method throws an exception
if called
"""
with pytest.raises(NotImplementedError) as exc:
client.custom_status()
assert 'You must pass in int object' in str(exc)
assert not client.api.statuses.called
def test_custom_status_by_int(client):
""" Verify the Client's ``custom_status`` method call with int"""
client.api.statuses.return_value = [STAT1, STAT2, STAT3, STAT4]
status = client.custom_status(3)
assert isinstance(status, models.Status)
assert status.id == 8
assert client.api.statuses.called
def test_custom_status_by_int_exc(client):
""" Verify the Client's ``custom_status`` method call with int"""
client.api.statuses.return_value = [STAT1, STAT2, STAT3, STAT4]
with pytest.raises(UnknownCustomStatusError) as exc:
client.custom_status(30)
assert 'with custom status ID 3' in str(exc)
def test_custom_status_by_name(client):
""" Verify the Client's ``custom_status`` method call with a name """
client.api.statuses.return_value = [STAT1, STAT2, STAT3, STAT4]
status = client.custom_status('custom_status3')
assert isinstance(status, models.Status)
assert status.id == 8
assert client.api.statuses.called
def test_custom_status_by_name_exc_1(client):
""" Verify the Client's ``custom_status`` raises an exception """
with pytest.raises(UnknownCustomStatusError) as exc:
client.custom_status('bogus_custom_status3')
assert "be of format 'custom_statusX'" in str(exc)
assert not client.api.statuses.called
def test_custom_status_by_name_exc_2(client):
""" Verify the Client's ``custom_status`` raises an exception """
with pytest.raises(UnknownCustomStatusError) as exc:
client.custom_status('custom_status9')
assert "is between 1 and 7" in str(exc)
assert not client.api.statuses.called
def test_status_exc(client):
""" Verify the Client's ``status`` method throws an exception if called """
with pytest.raises(NotImplementedError) as exc:
client.status()
assert 'You must pass in int object' in str(exc)
assert not client.api.statuses.called
def test_status_by_int(client):
""" Verify the Client's ``status`` method call with int"""
client.api.statuses.return_value = [STAT1, STAT2, STAT3]
status = client.status(222)
assert isinstance(status, models.Status)
assert status.id == 222
assert client.api.statuses.called
def test_status_by_int_exc(client):
""" Verify the Client's ``status`` method throws an exception if unmatched id """
client.api.statuses.return_value = [STAT1, STAT2, STAT3]
with pytest.raises(TRAWClientError) as exc:
client.status(224)
assert 'id of 224' in str(exc)
assert client.api.statuses.called
def test_status_by_label_not_strict(client):
""" Verify the Client's ``status`` method call with a label """
client.api.statuses.return_value = [STAT1, STAT2, STAT3]
status = client.status('failed')
assert isinstance(status, models.Status)
assert status.id == STAT2['id']
assert status.label != 'failed'
assert status.label.lower() == 'failed'
assert client.api.statuses.called
def test_status_by_label_exc(client):
""" Verify the Client's ``status`` method throws an exception if unmatched label """
client.api.statuses.return_value = [STAT1, STAT2, STAT3]
with pytest.raises(TRAWClientError) as exc:
client.status('bad status')
assert 'label of bad status' in str(exc)
assert client.api.statuses.called
def test_status_by_label_strict(client):
""" Verify the Client's ``status`` method call with a label """
client.api.statuses.return_value = [STAT1, STAT2, STAT3]
status = client.status('failed', strict=True)
assert isinstance(status, models.Status)
assert status.id == STAT3['id']
assert status.label == 'failed'
assert client.api.statuses.called
def test_statuses(client):
""" Verify the Client's ``statuses`` method call """
client.api.statuses.return_value = [STAT1, STAT2, STAT3]
stat_gen = client.statuses()
stat1 = next(stat_gen)
assert isinstance(stat1, models.Status)
assert stat1.name == 'status1'
stat2 = next(stat_gen)
assert isinstance(stat2, models.Status)
assert stat2.name == 'status2'
stat3 = next(stat_gen)
assert isinstance(stat3, models.Status)
assert stat3.name == 'status3'
assert client.api.statuses.call_args == mock.call()
def test_suite(client):
""" Verify the Client's ``suite`` method returns models.Suite object """
suite = client.suite()
assert isinstance(suite, models.Suite)
assert suite.completed_on is None
assert suite.description is None
assert suite.is_baseline is False
assert suite.is_completed is False
assert suite.is_master is False
assert suite.name is None
assert suite.project is None
assert suite.url is None
def test_suite_by_id(client):
""" Verify calling ``client.suite(123)`` with an ID returns that suite """
client.api.suite_by_id.return_value = {'id': 1234}
suite = client.suite(1234)
assert isinstance(suite, models.Suite)
assert suite.id == 1234
client.api.suite_by_id.assert_called_once_with(1234)
def test_suites_exc(client):
""" Verify the Client's ``suites`` method throws an exception if called """
with pytest.raises(NotImplementedError) as exc:
client.suites()
assert 'You must pass in models.Project or int object' in str(exc)
assert not client.api.suites_by_project_id.called
def test_suites_by_project_id(client):
""" Verify calling ``client.suites(123)`` with an ID returns suite generator """
client.api.suites_by_project_id.return_value = [SUIT1, SUIT2, SUIT3]
suites = client.suites(1234)
suite1 = next(suites)
assert isinstance(suite1, models.Suite)
assert suite1.id == 551
suite2 = next(suites)
assert isinstance(suite2, models.Suite)
assert suite2.id == 552
suite3 = next(suites)
assert isinstance(suite3, models.Suite)
assert suite3.id == 553
client.api.suites_by_project_id.assert_called_once_with(1234)
def test_suites_by_project(client):
""" Verify calling ``client.suites(Project)`` with an ID returns suite generator """
client.api.suites_by_project_id.return_value = [SUIT1, SUIT2, SUIT3]
suites = client.suites(models.Project(client, {'id': 1234}))
suite1 = next(suites)
assert isinstance(suite1, models.Suite)
assert suite1.id == 551
suite2 = next(suites)
assert isinstance(suite2, models.Suite)
assert suite2.id == 552
suite3 = next(suites)
assert isinstance(suite3, models.Suite)
assert suite3.id == 553
client.api.suites_by_project_id.assert_called_once_with(1234)
def test_template_exc(client):
""" Verify the Client's ``template`` method throws an exception if called """
with pytest.raises(NotImplementedError) as exc:
client.template()
assert 'You must pass in int object' in str(exc)
assert not client.api.templates.called
def test_template_by_int(client):
""" Verify the Client's ``templates`` method call with int"""
client.api.templates.return_value = [TEMP1, TEMP2, TEMP3]
with mock.patch.object(client, 'projects') as proj_mock:
proj_mock.return_value = [models.Project(client, {'id': 123}), ]
template = client.template(992)
assert isinstance(template, models.Template)
assert template.id == 992
assert client.api.templates.called
def test_template_by_int_exc(client):
""" Verify the Client's ``template`` method throws an exception if
unmatched id
"""
client.api.templates.return_value = [TEMP1, TEMP2, TEMP3]
with mock.patch.object(client, 'projects') as proj_mock:
proj_mock.return_value = [models.Project(client, {'id': 123}), ]
with pytest.raises(TRAWClientError) as exc:
client.template(994)
assert 'id of 994' in str(exc)
assert client.api.templates.called
def test_templates_exception(client):
""" Verify an exception is thrown if templates is called with no parameters """
with pytest.raises(NotImplementedError) as exc:
client.templates()
assert 'models.Project or int' in str(exc)
def test_templates_by_project(client):
""" Verify templates method returns a templates if called with
a models.Project object
"""
PROJECT_ID = 15
PROJECT = models.Project(client, {'id': PROJECT_ID})
client.api.templates.return_value = [TEMP1, TEMP2, TEMP3]
temp_gen = client.templates(PROJECT)
temp1 = next(temp_gen)
assert isinstance(temp1, models.Template)
assert temp1.name == 'template1'
temp2 = next(temp_gen)
assert isinstance(temp2, models.Template)
assert temp2.name == 'template2'
temp3 = next(temp_gen)
assert isinstance(temp3, models.Template)
assert temp3.name == 'template3'
assert client.api.templates.call_args == mock.call(PROJECT.id)
def test_templates_by_project_id(client):
""" Verify templates method returns a templates if called with
an project ID (an int)
"""
PROJECT_ID = 15
client.api.templates.return_value = [TEMP1, TEMP2, TEMP3]
temp_gen = client.templates(PROJECT_ID)
temp1 = next(temp_gen)
assert isinstance(temp1, models.Template)
assert temp1.name == 'template1'
temp2 = next(temp_gen)
assert isinstance(temp2, models.Template)
assert temp2.name == 'template2'
temp3 = next(temp_gen)
assert isinstance(temp3, models.Template)
assert temp3.name == 'template3'
assert client.api.templates.call_args == mock.call(PROJECT_ID)
def test_test_exc(client):
""" Verify the Client's ``test`` method throws an exception if called """
with pytest.raises(NotImplementedError) as exc:
client.test()
assert 'You must pass in int object' in str(exc)
assert not client.api.test.called
def test_test_by_id(client):
""" Verify calling ``client.test(123)`` with an ID returns that test """
client.api.test_by_id.return_value = {'id': 1234}
test = client.test(1234)
assert isinstance(test, models.Test)
assert test.id == 1234
client.api.test_by_id.assert_called_once_with(1234)
def test_tests_exc(client):
""" Verify the Client's ``tests`` method throws an exception if called """
with pytest.raises(NotImplementedError) as exc:
client.tests()
assert 'You must pass in models.Run or int object' in str(exc)
assert not client.api.tests_by_run_id.called
def test_tests_by_run_id(client):
""" Verify calling ``client.tests(123)`` with an ID returns test generator """
client.api.tests_by_run_id.return_value = [TEST1, TEST2, TEST3]
tests = client.tests(1234)
test1 = next(tests)
assert isinstance(test1, models.Test)
assert test1.id == 441
test2 = next(tests)
assert isinstance(test2, models.Test)
assert test2.id == 442
test3 = next(tests)
assert isinstance(test3, models.Test)
assert test3.id == 443
client.api.tests_by_run_id.assert_called_once_with(1234, None)
def test_tests_by_run_id_with_status(client):
""" Verify calling ``client.tests(123)`` with an ID returns test generator """
client.api.tests_by_run_id.return_value = [TEST1, ]
status = models.Status(client, {'id': 234})
tests = client.tests(1234, with_status=status)
test1 = next(tests)
assert isinstance(test1, models.Test)
assert test1.id == 441
client.api.tests_by_run_id.assert_called_once_with(1234, '234')
def test_tests_by_run_id_with_2_status(client):
""" Verify calling ``client.tests(123)`` with an ID returns test generator """
client.api.tests_by_run_id.return_value = [TEST1, ]
status1 = models.Status(client, {'id': 234})
status2 = models.Status(client, {'id': 345})
tests = client.tests(1234, with_status=(status1, status2))
test1 = next(tests)
assert isinstance(test1, models.Test)
assert test1.id == 441
client.api.tests_by_run_id.assert_called_once_with(1234, '234,345')
def test_tests_by_run_id_exc_1(client):
""" Verify calling ``client.tests(123, status)`` throws an exception """
with pytest.raises(TypeError) as exc:
next(client.tests(1234, with_status=111))
assert "None, models.Status" in str(exc)
assert "iterable of models.Status objects" in str(exc)
assert str(int) in str(exc)
assert not client.api.tests_by_run_id.called
def test_tests_by_run_id_exc_2(client):
""" Verify calling ``client.tests(123, status)`` throws an exception """
status1 = models.Status(client, {'id': 234})
status2 = models.Status(client, {'id': 345})
with pytest.raises(TypeError) as exc:
next(client.tests(1234, with_status=(status1, 111, status2)))
assert "None, models.Status" in str(exc)
assert "iterable of models.Status objects" in str(exc)
assert str(int) in str(exc)
assert not client.api.tests_by_run_id.called
def test_tests_by_run(client):
""" Verify calling ``client.tests(Run)`` with an ID returns test generator """
client.api.tests_by_run_id.return_value = [TEST1, TEST2, TEST3]
tests = client.tests(models.Run(client, {'id': 1234}))
test1 = next(tests)
assert isinstance(test1, models.Test)
assert test1.id == 441
test2 = next(tests)
assert isinstance(test2, models.Test)
assert test2.id == 442
test3 = next(tests)
assert isinstance(test3, models.Test)
assert test3.id == 443
client.api.tests_by_run_id.assert_called_once_with(1234, None)
def test_tests_by_run_with_status(client):
""" Verify calling ``client.tests(123)`` with an ID returns test generator """
client.api.tests_by_run_id.return_value = [TEST1, ]
status = models.Status(client, {'id': 234})
tests = client.tests(models.Run(client, {'id': 1234}), with_status=status)
test1 = next(tests)
assert isinstance(test1, models.Test)
assert test1.id == 441
client.api.tests_by_run_id.assert_called_once_with(1234, '234')
def test_tests_by_run_with_2_status(client):
""" Verify calling ``client.tests(123)`` with an ID returns test generator """
client.api.tests_by_run_id.return_value = [TEST1, ]
status1 = models.Status(client, {'id': 234})
status2 = models.Status(client, {'id': 345})
tests = client.tests(models.Run(client, {'id': 1234}), with_status=(status1, status2))
test1 = next(tests)
assert isinstance(test1, models.Test)
assert test1.id == 441
client.api.tests_by_run_id.assert_called_once_with(1234, '234,345')
def test_tests_by_run_exc_1(client):
""" Verify calling ``client.tests(123, status)`` throws an exception """
with pytest.raises(TypeError) as exc:
next(client.tests(models.Run(client, {'id': 1234}), with_status=111))
assert "None, models.Status" in str(exc)
assert "iterable of models.Status objects" in str(exc)
assert str(int) in str(exc)
assert not client.api.tests_by_run_id.called
def test_tests_by_run_exc_2(client):
""" Verify calling ``client.tests(123, status)`` throws an exception """
status1 = models.Status(client, {'id': 234})
status2 = models.Status(client, {'id': 345})
run = models.Run(client, {'id': 1234})
with pytest.raises(TypeError) as exc:
next(client.tests(run, with_status=(status1, 111, status2)))
assert "None, models.Status" in str(exc)
assert "iterable of models.Status objects" in str(exc)
assert str(int) in str(exc)
assert not client.api.tests_by_run_id.called
def test_user(client):
""" Verify user method returns a new user instance if called without
any parameters
"""
user = client.user()
assert user.email is None
assert user.id is None
assert user.is_active is None
assert user.name is None
def test_user_by_email(client):
""" Verify user method returns a specific user instance if called with
an email address
"""
USER_EMAIL = 'mock.user@mock.com'
USER_DICT = {"email": USER_EMAIL}
client.api.user_by_email.return_value = USER_DICT
user = client.user(USER_EMAIL)
assert isinstance(user, models.User)
assert user.email == USER_EMAIL
client.api.user_by_email.assert_called_once_with(USER_EMAIL)
def test_user_by_email_exc(client):
""" Verify user method throws an exception if a non-email str is used """
USER_EMAIL = 'not valid'
USER_DICT = {"email": USER_EMAIL}
client.api.user_by_email.return_value = USER_DICT
with pytest.raises(ValueError) as exc:
client.user(USER_EMAIL)
assert 'must be a string that includes an "@"' in str(exc)
assert not client.api.user_by_email.called
def test_user_by_id(client):
""" Verify user method returns a specific user instance if called with
an int
"""
USER_ID = 1234
USER_1234 = {"id": USER_ID}
client.api.user_by_id.return_value = USER_1234
user = client.user(USER_ID)
assert isinstance(user, models.User)
assert user.id == USER_ID
client.api.user_by_id.assert_called_once_with(USER_ID)
def test_users(client):
""" Verify the Client's ``users`` method call """
client.api.users.return_value = [USER1, USER2, USER3]
users_gen = client.users()
user1 = next(users_gen)
assert isinstance(user1, models.User)
assert user1.name == 'user1'
user2 = next(users_gen)
assert isinstance(user2, models.User)
assert user2.name == 'user2'
user3 = next(users_gen)
assert isinstance(user3, models.User)
assert user3.name == 'user3'
assert client.api.users.call_args == mock.call()
def test_change_cache_timeout_single_change(client):
""" Verify change_cache_timeout works for a single object type """
client.api.cache_timeouts = dict()
client.api.cache_timeouts[client.api] = dict()
assert client.api.cache_timeouts[client.api] == dict()
client.change_cache_timeout(30, models.Project)
assert client.api.cache_timeouts[client.api][models.Project] == 30
def test_change_cache_timeout_change_all(client):
""" Verify change_cache_timeout works for all object types """
client.api.cache_timeouts = dict()
client.api.cache_timeouts[client.api] = dict()
assert client.api.cache_timeouts[client.api] == dict()
client.change_cache_timeout(30)
for cls_name in models.__all__:
cls = getattr(models, cls_name)
assert client.api.cache_timeouts[client.api][cls] == 30
def test_change_cache_timeout_exc(client):
""" Verify change_cache_timeout raises an exception """
with pytest.raises(TypeError) as exc:
client.change_cache_timeout(30, type(1234))
assert "found class of type {0}".format(type(1234)) in str(exc)
def test_clear_cache(client):
""" Verify the Client's ``clear_cache`` method call """
client.clear_cache()
assert client.api.case_by_id.cache.clear.called
assert client.api.case_types.cache.clear.called
assert client.api.config_groups.cache.clear.called
assert client.api.milestone_by_id.cache.clear.called
assert client.api.milestones.cache.clear.called
assert client.api.plan_by_id.cache.clear.called
assert client.api.priorities.cache.clear.called
assert client.api.project_by_id.cache.clear.called
assert client.api.projects.cache.clear.called
assert client.api.results_by_test_id.cache.clear.called
assert client.api.run_by_id.cache.clear.called
assert client.api.runs_by_project_id.cache.clear.called
assert client.api.section_by_id.cache.clear.called
assert client.api.sections_by_project_id.cache.clear.called
assert client.api.statuses.cache.clear.called
assert client.api.suite_by_id.cache.clear.called
assert client.api.suites_by_project_id.cache.clear.called
assert client.api.templates.cache.clear.called
assert client.api.test_by_id.cache.clear.called
assert client.api.tests_by_run_id.cache.clear.called
assert client.api.user_by_email.cache.clear.called
assert client.api.user_by_id.cache.clear.called
assert client.api.users.cache.clear.called
| mit |
RaoUmer/django | django/contrib/auth/tokens.py | 4 | 2631 | from datetime import date
from django.conf import settings
from django.utils.http import int_to_base36, base36_to_int
from django.utils.crypto import constant_time_compare, salted_hmac
from django.utils import six
class PasswordResetTokenGenerator(object):
"""
Strategy object used to generate and check tokens for the password
reset mechanism.
"""
def make_token(self, user):
"""
Returns a token that can be used once to do a password reset
for the given user.
"""
return self._make_token_with_timestamp(user, self._num_days(self._today()))
def check_token(self, user, token):
"""
Check that a password reset token is correct for a given user.
"""
# Parse the token
try:
ts_b36, hash = token.split("-")
except ValueError:
return False
try:
ts = base36_to_int(ts_b36)
except ValueError:
return False
# Check that the timestamp/uid has not been tampered with
if not constant_time_compare(self._make_token_with_timestamp(user, ts), token):
return False
# Check the timestamp is within limit
if (self._num_days(self._today()) - ts) > settings.PASSWORD_RESET_TIMEOUT_DAYS:
return False
return True
def _make_token_with_timestamp(self, user, timestamp):
# timestamp is number of days since 2001-1-1. Converted to
# base 36, this gives us a 3 digit string until about 2121
ts_b36 = int_to_base36(timestamp)
# By hashing on the internal state of the user and using state
# that is sure to change (the password salt will change as soon as
# the password is set, at least for current Django auth, and
# last_login will also change), we produce a hash that will be
# invalid as soon as it is used.
# We limit the hash to 20 chars to keep URL short
key_salt = "django.contrib.auth.tokens.PasswordResetTokenGenerator"
# Ensure results are consistent across DB backends
login_timestamp = user.last_login.replace(microsecond=0, tzinfo=None)
value = (six.text_type(user.id) + user.password +
six.text_type(login_timestamp) + six.text_type(timestamp))
hash = salted_hmac(key_salt, value).hexdigest()[::2]
return "%s-%s" % (ts_b36, hash)
def _num_days(self, dt):
return (dt - date(2001, 1, 1)).days
def _today(self):
# Used for mocking in tests
return date.today()
default_token_generator = PasswordResetTokenGenerator()
| bsd-3-clause |
michdolan/samples | metrics.py | 1 | 3282 | """
Nuke 9 exposed a Python API for retrieving performance metrics from nodes
during rendering, which had previously only been possible via command-line on
Linux and was limited.
Using that API, this script extends Nuke's standard node performance profiling
with additional data as to where each node's cost fits into the comp. Like the
builtin method, we color the nodes green to red based on performance, the
color being proportional to its time between the fastest and slowest nodes.
This script can take into account any or all node performance profile instead
of just the default engine profile, and can optionally analyze specific node
classes at a time.
We also convert the standard microsecond timing into a more readable clock
representation, and a linear percentage [0.0, 1.0] of where the node fits in
with siblings, performance-wise.
"""
from datetime import timedelta
import operator
import nuke
PROFILES = [
nuke.PROFILE_STORE,
nuke.PROFILE_VALIDATE,
nuke.PROFILE_REQUEST,
nuke.PROFILE_ENGINE,
]
def allNodes(type_=None):
""""
Recursive version of nuke.allNodes with class filtering support
:param type_: str
Nuke node class name
:return: list of nuke nodes
"""
with nuke.root():
nodes = nuke.allNodes(recurseGroups=True)
if type_ is not None:
nodes = filter(lambda n: n.Class() == type_, nodes)
return nodes
def run(type_=None):
"""
Process performance on all nodes, calculated upstream of the currently
viewed node.
"""
frame = int(nuke.root()['frame'].value())
# Force Nuke to recalculate the image, with the performance timers active
nuke.memory('free')
nuke.clearRAMCache()
nuke.clearDiskCache()
nuke.startPerformanceTimers()
try:
# Retrieve node being viewed
target = nuke.activeViewer().node().input(0)
except AttributeError:
nuke.critical("Please view a node to calculate performance from")
return
# Writing a null image to force Nuke to process the image
write = nuke.nodes.Write(file='temp.null', file_type='null')
write.setInput(0, target)
nuke.execute(write, frame, frame, continueOnError=True)
nuke.delete(write)
metrics = {}
# Get total timings for all nodes
for node in allNodes(type_):
total = 0
for prof in PROFILES:
total += node.performanceInfo(prof)['timeTakenWall']
metrics[node] = abs(total) # Sometimes slow nodes return negative?
# Stop timers, which will clear Nuke's builtin timer coloring/labeling
nuke.stopPerformanceTimers()
# Sort nodes by time and calculate range
perf = sorted(metrics.items(), key=operator.itemgetter(1))
minT = perf[0][1]
maxT = perf[-1][1]
rangeT = maxT - minT
for node, t in perf:
# Interpolated amount of time in range (reverse lerp)
amt = float(t - minT) / rangeT
# Get Nuke friendly tile_color string
r = int(amt * 255)
g = int((1.0 - amt) * 255)
color = '0x%02x%02x%02xff' % (r, g, 0)
# Mark node
node['tile_color'].fromScript(color)
node['label'].setValue('%s\n%f%%' % (timedelta(microseconds=t), amt))
if __name__ == '__main__':
run()
| mit |
tuulos/luigi | test/lock_test.py | 34 | 2946 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import subprocess
import tempfile
import mock
from helpers import unittest
import luigi
import luigi.lock
import luigi.notifications
luigi.notifications.DEBUG = True
class TestCmd(unittest.TestCase):
def test_getpcmd(self):
p = subprocess.Popen(["sleep", "1"])
self.assertTrue(
luigi.lock.getpcmd(p.pid) in ["sleep 1", '[sleep]']
)
p.kill()
class LockTest(unittest.TestCase):
def setUp(self):
self.pid_dir = tempfile.mkdtemp()
self.pid, self.cmd, self.pid_file = luigi.lock.get_info(self.pid_dir)
def tearDown(self):
if os.path.exists(self.pid_file):
os.remove(self.pid_file)
os.rmdir(self.pid_dir)
def test_get_info(self):
p = subprocess.Popen(["yes", "à我ф"], stdout=subprocess.PIPE)
pid, cmd, pid_file = luigi.lock.get_info(self.pid_dir, p.pid)
p.kill()
self.assertEqual(cmd, 'yes à我ф')
def test_acquiring_free_lock(self):
acquired = luigi.lock.acquire_for(self.pid_dir)
self.assertTrue(acquired)
def test_acquiring_taken_lock(self):
with open(self.pid_file, 'w') as f:
f.write('%d\n' % (self.pid, ))
acquired = luigi.lock.acquire_for(self.pid_dir)
self.assertFalse(acquired)
def test_acquiring_partially_taken_lock(self):
with open(self.pid_file, 'w') as f:
f.write('%d\n' % (self.pid, ))
acquired = luigi.lock.acquire_for(self.pid_dir, 2)
self.assertTrue(acquired)
s = os.stat(self.pid_file)
self.assertEqual(s.st_mode & 0o777, 0o777)
def test_acquiring_lock_from_missing_process(self):
fake_pid = 99999
with open(self.pid_file, 'w') as f:
f.write('%d\n' % (fake_pid, ))
acquired = luigi.lock.acquire_for(self.pid_dir)
self.assertTrue(acquired)
s = os.stat(self.pid_file)
self.assertEqual(s.st_mode & 0o777, 0o777)
@mock.patch('os.kill')
def test_take_lock_with_kill(self, kill_fn):
with open(self.pid_file, 'w') as f:
f.write('%d\n' % (self.pid,))
kill_signal = 77777
acquired = luigi.lock.acquire_for(self.pid_dir, kill_signal=kill_signal)
self.assertTrue(acquired)
kill_fn.assert_called_once_with(self.pid, kill_signal)
| apache-2.0 |
AladdinSonni/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/bot/expectedfailures_unittest.py | 122 | 5271 | # Copyright (c) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.tool.bot.expectedfailures import ExpectedFailures
class MockResults(object):
def __init__(self, failing_tests=[], failure_limit=10):
self._failing_tests = failing_tests
self._failure_limit_count = failure_limit
def failure_limit_count(self):
return self._failure_limit_count
def failing_tests(self):
return self._failing_tests
class ExpectedFailuresTest(unittest.TestCase):
def _assert_can_trust(self, results, can_trust):
self.assertEqual(ExpectedFailures._should_trust(results), can_trust)
def test_can_trust_results(self):
self._assert_can_trust(None, False)
self._assert_can_trust(MockResults(failing_tests=[], failure_limit=None), False)
self._assert_can_trust(MockResults(failing_tests=[], failure_limit=10), False)
self._assert_can_trust(MockResults(failing_tests=[1], failure_limit=None), False)
self._assert_can_trust(MockResults(failing_tests=[1], failure_limit=2), True)
self._assert_can_trust(MockResults(failing_tests=[1], failure_limit=1), False)
self._assert_can_trust(MockResults(failing_tests=[1, 2], failure_limit=1), False)
def _assert_expected(self, expected_failures, failures, expected):
self.assertEqual(expected_failures.failures_were_expected(MockResults(failures)), expected)
def test_failures_were_expected(self):
failures = ExpectedFailures()
failures.update(MockResults(['foo.html']))
self._assert_expected(failures, ['foo.html'], True)
self._assert_expected(failures, ['bar.html'], False)
self._assert_expected(failures, ['bar.html', 'foo.html'], False)
failures.update(MockResults(['baz.html']))
self._assert_expected(failures, ['baz.html'], True)
self._assert_expected(failures, ['foo.html'], False)
failures.update(MockResults([]))
self._assert_expected(failures, ['baz.html'], False)
self._assert_expected(failures, ['foo.html'], False)
def test_unexpected_failures_observed(self):
failures = ExpectedFailures()
failures.update(MockResults(['foo.html']))
self.assertEqual(failures.unexpected_failures_observed(MockResults(['foo.html', 'bar.html'])), set(['bar.html']))
self.assertEqual(failures.unexpected_failures_observed(MockResults(['baz.html'])), set(['baz.html']))
unbounded_results = MockResults(['baz.html', 'qux.html', 'taco.html'], failure_limit=3)
self.assertEqual(failures.unexpected_failures_observed(unbounded_results), set(['baz.html', 'qux.html', 'taco.html']))
unbounded_results_with_existing_failure = MockResults(['foo.html', 'baz.html', 'qux.html', 'taco.html'], failure_limit=4)
self.assertEqual(failures.unexpected_failures_observed(unbounded_results_with_existing_failure), set(['baz.html', 'qux.html', 'taco.html']))
def test_unexpected_failures_observed_when_tree_is_hosed(self):
failures = ExpectedFailures()
failures.update(MockResults(['foo.html', 'banana.html'], failure_limit=2))
self.assertEqual(failures.unexpected_failures_observed(MockResults(['foo.html', 'bar.html'])), None)
self.assertEqual(failures.unexpected_failures_observed(MockResults(['baz.html'])), None)
unbounded_results = MockResults(['baz.html', 'qux.html', 'taco.html'], failure_limit=3)
self.assertEqual(failures.unexpected_failures_observed(unbounded_results), None)
unbounded_results_with_existing_failure = MockResults(['foo.html', 'baz.html', 'qux.html', 'taco.html'], failure_limit=4)
self.assertEqual(failures.unexpected_failures_observed(unbounded_results_with_existing_failure), None)
| bsd-3-clause |
TathagataChakraborti/resource-conflicts | PLANROB-2015/seq-sat-lama/py2.5/lib/python2.5/email/quoprimime.py | 93 | 10839 | # Copyright (C) 2001-2006 Python Software Foundation
# Author: Ben Gertzfield
# Contact: email-sig@python.org
"""Quoted-printable content transfer encoding per RFCs 2045-2047.
This module handles the content transfer encoding method defined in RFC 2045
to encode US ASCII-like 8-bit data called `quoted-printable'. It is used to
safely encode text that is in a character set similar to the 7-bit US ASCII
character set, but that includes some 8-bit characters that are normally not
allowed in email bodies or headers.
Quoted-printable is very space-inefficient for encoding binary files; use the
email.base64MIME module for that instead.
This module provides an interface to encode and decode both headers and bodies
with quoted-printable encoding.
RFC 2045 defines a method for including character set information in an
`encoded-word' in a header. This method is commonly used for 8-bit real names
in To:/From:/Cc: etc. fields, as well as Subject: lines.
This module does not do the line wrapping or end-of-line character
conversion necessary for proper internationalized headers; it only
does dumb encoding and decoding. To deal with the various line
wrapping issues, use the email.Header module.
"""
__all__ = [
'body_decode',
'body_encode',
'body_quopri_check',
'body_quopri_len',
'decode',
'decodestring',
'encode',
'encodestring',
'header_decode',
'header_encode',
'header_quopri_check',
'header_quopri_len',
'quote',
'unquote',
]
import re
from string import hexdigits
from email.utils import fix_eols
CRLF = '\r\n'
NL = '\n'
# See also Charset.py
MISC_LEN = 7
hqre = re.compile(r'[^-a-zA-Z0-9!*+/ ]')
bqre = re.compile(r'[^ !-<>-~\t]')
# Helpers
def header_quopri_check(c):
"""Return True if the character should be escaped with header quopri."""
return bool(hqre.match(c))
def body_quopri_check(c):
"""Return True if the character should be escaped with body quopri."""
return bool(bqre.match(c))
def header_quopri_len(s):
"""Return the length of str when it is encoded with header quopri."""
count = 0
for c in s:
if hqre.match(c):
count += 3
else:
count += 1
return count
def body_quopri_len(str):
"""Return the length of str when it is encoded with body quopri."""
count = 0
for c in str:
if bqre.match(c):
count += 3
else:
count += 1
return count
def _max_append(L, s, maxlen, extra=''):
if not L:
L.append(s.lstrip())
elif len(L[-1]) + len(s) <= maxlen:
L[-1] += extra + s
else:
L.append(s.lstrip())
def unquote(s):
"""Turn a string in the form =AB to the ASCII character with value 0xab"""
return chr(int(s[1:3], 16))
def quote(c):
return "=%02X" % ord(c)
def header_encode(header, charset="iso-8859-1", keep_eols=False,
maxlinelen=76, eol=NL):
"""Encode a single header line with quoted-printable (like) encoding.
Defined in RFC 2045, this `Q' encoding is similar to quoted-printable, but
used specifically for email header fields to allow charsets with mostly 7
bit characters (and some 8 bit) to remain more or less readable in non-RFC
2045 aware mail clients.
charset names the character set to use to encode the header. It defaults
to iso-8859-1.
The resulting string will be in the form:
"=?charset?q?I_f=E2rt_in_your_g=E8n=E8ral_dire=E7tion?\\n
=?charset?q?Silly_=C8nglish_Kn=EEghts?="
with each line wrapped safely at, at most, maxlinelen characters (defaults
to 76 characters). If maxlinelen is None, the entire string is encoded in
one chunk with no splitting.
End-of-line characters (\\r, \\n, \\r\\n) will be automatically converted
to the canonical email line separator \\r\\n unless the keep_eols
parameter is True (the default is False).
Each line of the header will be terminated in the value of eol, which
defaults to "\\n". Set this to "\\r\\n" if you are using the result of
this function directly in email.
"""
# Return empty headers unchanged
if not header:
return header
if not keep_eols:
header = fix_eols(header)
# Quopri encode each line, in encoded chunks no greater than maxlinelen in
# length, after the RFC chrome is added in.
quoted = []
if maxlinelen is None:
# An obnoxiously large number that's good enough
max_encoded = 100000
else:
max_encoded = maxlinelen - len(charset) - MISC_LEN - 1
for c in header:
# Space may be represented as _ instead of =20 for readability
if c == ' ':
_max_append(quoted, '_', max_encoded)
# These characters can be included verbatim
elif not hqre.match(c):
_max_append(quoted, c, max_encoded)
# Otherwise, replace with hex value like =E2
else:
_max_append(quoted, "=%02X" % ord(c), max_encoded)
# Now add the RFC chrome to each encoded chunk and glue the chunks
# together. BAW: should we be able to specify the leading whitespace in
# the joiner?
joiner = eol + ' '
return joiner.join(['=?%s?q?%s?=' % (charset, line) for line in quoted])
def encode(body, binary=False, maxlinelen=76, eol=NL):
"""Encode with quoted-printable, wrapping at maxlinelen characters.
If binary is False (the default), end-of-line characters will be converted
to the canonical email end-of-line sequence \\r\\n. Otherwise they will
be left verbatim.
Each line of encoded text will end with eol, which defaults to "\\n". Set
this to "\\r\\n" if you will be using the result of this function directly
in an email.
Each line will be wrapped at, at most, maxlinelen characters (defaults to
76 characters). Long lines will have the `soft linefeed' quoted-printable
character "=" appended to them, so the decoded text will be identical to
the original text.
"""
if not body:
return body
if not binary:
body = fix_eols(body)
# BAW: We're accumulating the body text by string concatenation. That
# can't be very efficient, but I don't have time now to rewrite it. It
# just feels like this algorithm could be more efficient.
encoded_body = ''
lineno = -1
# Preserve line endings here so we can check later to see an eol needs to
# be added to the output later.
lines = body.splitlines(1)
for line in lines:
# But strip off line-endings for processing this line.
if line.endswith(CRLF):
line = line[:-2]
elif line[-1] in CRLF:
line = line[:-1]
lineno += 1
encoded_line = ''
prev = None
linelen = len(line)
# Now we need to examine every character to see if it needs to be
# quopri encoded. BAW: again, string concatenation is inefficient.
for j in range(linelen):
c = line[j]
prev = c
if bqre.match(c):
c = quote(c)
elif j+1 == linelen:
# Check for whitespace at end of line; special case
if c not in ' \t':
encoded_line += c
prev = c
continue
# Check to see to see if the line has reached its maximum length
if len(encoded_line) + len(c) >= maxlinelen:
encoded_body += encoded_line + '=' + eol
encoded_line = ''
encoded_line += c
# Now at end of line..
if prev and prev in ' \t':
# Special case for whitespace at end of file
if lineno + 1 == len(lines):
prev = quote(prev)
if len(encoded_line) + len(prev) > maxlinelen:
encoded_body += encoded_line + '=' + eol + prev
else:
encoded_body += encoded_line + prev
# Just normal whitespace at end of line
else:
encoded_body += encoded_line + prev + '=' + eol
encoded_line = ''
# Now look at the line we just finished and it has a line ending, we
# need to add eol to the end of the line.
if lines[lineno].endswith(CRLF) or lines[lineno][-1] in CRLF:
encoded_body += encoded_line + eol
else:
encoded_body += encoded_line
encoded_line = ''
return encoded_body
# For convenience and backwards compatibility w/ standard base64 module
body_encode = encode
encodestring = encode
# BAW: I'm not sure if the intent was for the signature of this function to be
# the same as base64MIME.decode() or not...
def decode(encoded, eol=NL):
"""Decode a quoted-printable string.
Lines are separated with eol, which defaults to \\n.
"""
if not encoded:
return encoded
# BAW: see comment in encode() above. Again, we're building up the
# decoded string with string concatenation, which could be done much more
# efficiently.
decoded = ''
for line in encoded.splitlines():
line = line.rstrip()
if not line:
decoded += eol
continue
i = 0
n = len(line)
while i < n:
c = line[i]
if c <> '=':
decoded += c
i += 1
# Otherwise, c == "=". Are we at the end of the line? If so, add
# a soft line break.
elif i+1 == n:
i += 1
continue
# Decode if in form =AB
elif i+2 < n and line[i+1] in hexdigits and line[i+2] in hexdigits:
decoded += unquote(line[i:i+3])
i += 3
# Otherwise, not in form =AB, pass literally
else:
decoded += c
i += 1
if i == n:
decoded += eol
# Special case if original string did not end with eol
if not encoded.endswith(eol) and decoded.endswith(eol):
decoded = decoded[:-1]
return decoded
# For convenience and backwards compatibility w/ standard base64 module
body_decode = decode
decodestring = decode
def _unquote_match(match):
"""Turn a match in the form =AB to the ASCII character with value 0xab"""
s = match.group(0)
return unquote(s)
# Header decoding is done a bit differently
def header_decode(s):
"""Decode a string encoded with RFC 2045 MIME header `Q' encoding.
This function does not parse a full MIME header value encoded with
quoted-printable (like =?iso-8895-1?q?Hello_World?=) -- please use
the high level email.Header class for that functionality.
"""
s = s.replace('_', ' ')
return re.sub(r'=\w{2}', _unquote_match, s)
| mit |
NetIQ/mongo-connector | mongo_connector/connector.py | 1 | 33340 | # Copyright 2013-2014 MongoDB, Inc.
#
# Portions related to enabling timezone awareness flag and purging
# the password files to avoid a possible security risk scenario.
# are copyrighted to:
# Copyright (c) 2015, NetIQ Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Discovers the mongo cluster and starts the connector.
"""
import json
import logging
import logging.handlers
import optparse
import os
import pymongo
import re
import shutil
import sys
import threading
import time
import imp
from mongo_connector import constants, errors, util
from mongo_connector.locking_dict import LockingDict
from mongo_connector.oplog_manager import OplogThread
from mongo_connector.doc_managers import doc_manager_simulator as simulator
from pymongo import MongoClient
class Connector(threading.Thread):
"""Checks the cluster for shards to tail.
"""
def __init__(self, address, oplog_checkpoint, target_url, ns_set,
u_key, auth_key, doc_manager=None, auth_username=None,
collection_dump=True, batch_size=constants.DEFAULT_BATCH_SIZE,
fields=None, dest_mapping={},
auto_commit_interval=constants.DEFAULT_COMMIT_INTERVAL,
continue_on_error=False):
if target_url and not doc_manager:
raise errors.ConnectorError("Cannot create a Connector with a "
"target URL but no doc manager!")
def is_string(s):
try:
return isinstance(s, basestring)
except NameError:
return isinstance(s, str)
def load_doc_manager(path):
name, _ = os.path.splitext(os.path.basename(path))
try:
import importlib.machinery
loader = importlib.machinery.SourceFileLoader(name, path)
module = loader.load_module(name)
except ImportError:
module = imp.load_source(name, path)
return module
doc_manager_modules = None
if doc_manager is not None:
# backwards compatilibity: doc_manager may be a string
if is_string(doc_manager):
doc_manager_modules = [load_doc_manager(doc_manager)]
# doc_manager is a list
else:
doc_manager_modules = []
for dm in doc_manager:
doc_manager_modules.append(load_doc_manager(dm))
super(Connector, self).__init__()
#can_run is set to false when we join the thread
self.can_run = True
#The name of the file that stores the progress of the OplogThreads
self.oplog_checkpoint = oplog_checkpoint
#main address - either mongos for sharded setups or a primary otherwise
self.address = address
#The URLs of each target system, respectively
if is_string(target_url):
self.target_urls = [target_url]
elif target_url:
self.target_urls = list(target_url)
else:
self.target_urls = None
#The set of relevant namespaces to consider
self.ns_set = ns_set
#The dict of source namespace to destination namespace
self.dest_mapping = dest_mapping
#Whether the collection dump gracefully handles exceptions
self.continue_on_error = continue_on_error
#The key that is a unique document identifier for the target system.
#Not necessarily the mongo unique key.
self.u_key = u_key
#Password for authentication
self.auth_key = auth_key
#Username for authentication
self.auth_username = auth_username
#The set of OplogThreads created
self.shard_set = {}
#Boolean chooses whether to dump the entire collection if no timestamp
# is present in the config file
self.collection_dump = collection_dump
#Num entries to process before updating config file with current pos
self.batch_size = batch_size
#Dict of OplogThread/timestamp pairs to record progress
self.oplog_progress = LockingDict()
# List of fields to export
self.fields = fields
try:
docman_kwargs = {"unique_key": u_key,
"namespace_set": ns_set,
"auto_commit_interval": auto_commit_interval}
# No doc managers specified, using simulator
if doc_manager is None:
self.doc_managers = [simulator.DocManager(**docman_kwargs)]
else:
self.doc_managers = []
for i, d in enumerate(doc_manager_modules):
# self.target_urls may be shorter than
# self.doc_managers, or left as None
if self.target_urls and i < len(self.target_urls):
target_url = self.target_urls[i]
else:
target_url = None
if target_url:
self.doc_managers.append(
d.DocManager(self.target_urls[i],
**docman_kwargs))
else:
self.doc_managers.append(
d.DocManager(**docman_kwargs))
# If more target URLs were given than doc managers, may need
# to create additional doc managers
for url in self.target_urls[i + 1:]:
self.doc_managers.append(
doc_manager_modules[-1].DocManager(url,
**docman_kwargs))
except errors.ConnectionFailed:
err_msg = "MongoConnector: Could not connect to target system"
logging.critical(err_msg)
self.can_run = False
return
if self.oplog_checkpoint is not None:
if not os.path.exists(self.oplog_checkpoint):
info_str = ("MongoConnector: Can't find %s, "
"attempting to create an empty progress log" %
self.oplog_checkpoint)
logging.info(info_str)
try:
# Create oplog progress file
open(self.oplog_checkpoint, "w").close()
except IOError as e:
logging.critical("MongoConnector: Could not "
"create a progress log: %s" %
str(e))
sys.exit(2)
else:
if (not os.access(self.oplog_checkpoint, os.W_OK)
and not os.access(self.oplog_checkpoint, os.R_OK)):
logging.critical("Invalid permissions on %s! Exiting" %
(self.oplog_checkpoint))
sys.exit(2)
def join(self):
""" Joins thread, stops it from running
"""
self.can_run = False
for dm in self.doc_managers:
dm.stop()
threading.Thread.join(self)
def write_oplog_progress(self):
""" Writes oplog progress to file provided by user
"""
if self.oplog_checkpoint is None:
return None
# write to temp file
backup_file = self.oplog_checkpoint + '.backup'
os.rename(self.oplog_checkpoint, backup_file)
# for each of the threads write to file
with open(self.oplog_checkpoint, 'w') as dest:
with self.oplog_progress as oplog_prog:
oplog_dict = oplog_prog.get_dict()
for oplog, time_stamp in oplog_dict.items():
oplog_str = str(oplog)
timestamp = util.bson_ts_to_long(time_stamp)
json_str = json.dumps([oplog_str, timestamp])
try:
dest.write(json_str)
except IOError:
# Basically wipe the file, copy from backup
dest.truncate()
with open(backup_file, 'r') as backup:
shutil.copyfile(backup, dest)
break
os.remove(self.oplog_checkpoint + '.backup')
def read_oplog_progress(self):
"""Reads oplog progress from file provided by user.
This method is only called once before any threads are spanwed.
"""
if self.oplog_checkpoint is None:
return None
# Check for empty file
try:
if os.stat(self.oplog_checkpoint).st_size == 0:
logging.info("MongoConnector: Empty oplog progress file.")
return None
except OSError:
return None
source = open(self.oplog_checkpoint, 'r')
try:
data = json.load(source)
except ValueError: # empty file
reason = "It may be empty or corrupt."
logging.info("MongoConnector: Can't read oplog progress file. %s" %
(reason))
source.close()
return None
source.close()
count = 0
oplog_dict = self.oplog_progress.get_dict()
for count in range(0, len(data), 2):
oplog_str = data[count]
time_stamp = data[count + 1]
oplog_dict[oplog_str] = util.long_to_bson_ts(time_stamp)
#stored as bson_ts
def run(self):
"""Discovers the mongo cluster and creates a thread for each primary.
"""
main_conn = MongoClient(self.address, tz_aware=True)
if self.auth_key is not None:
main_conn['admin'].authenticate(self.auth_username, self.auth_key)
self.read_oplog_progress()
conn_type = None
try:
main_conn.admin.command("isdbgrid")
except pymongo.errors.OperationFailure:
conn_type = "REPLSET"
if conn_type == "REPLSET":
# Make sure we are connected to a replica set
is_master = main_conn.admin.command("isMaster")
if not "setName" in is_master:
logging.error(
'No replica set at "%s"! A replica set is required '
'to run mongo-connector. Shutting down...' % self.address
)
return
# Establish a connection to the replica set as a whole
main_conn.disconnect()
main_conn = MongoClient(self.address,
replicaSet=is_master['setName'],
tz_aware=True)
if self.auth_key is not None:
main_conn.admin.authenticate(self.auth_username, self.auth_key)
#non sharded configuration
oplog_coll = main_conn['local']['oplog.rs']
oplog = OplogThread(
primary_conn=main_conn,
main_address=self.address,
oplog_coll=oplog_coll,
is_sharded=False,
doc_manager=self.doc_managers,
oplog_progress_dict=self.oplog_progress,
namespace_set=self.ns_set,
auth_key=self.auth_key,
auth_username=self.auth_username,
repl_set=is_master['setName'],
collection_dump=self.collection_dump,
batch_size=self.batch_size,
fields=self.fields,
dest_mapping=self.dest_mapping,
continue_on_error=self.continue_on_error
)
self.shard_set[0] = oplog
logging.info('MongoConnector: Starting connection thread %s' %
main_conn)
oplog.start()
while self.can_run:
if not self.shard_set[0].running:
logging.error("MongoConnector: OplogThread"
" %s unexpectedly stopped! Shutting down" %
(str(self.shard_set[0])))
self.oplog_thread_join()
for dm in self.doc_managers:
dm.stop()
return
self.write_oplog_progress()
time.sleep(1)
else: # sharded cluster
while self.can_run is True:
for shard_doc in main_conn['config']['shards'].find():
shard_id = shard_doc['_id']
if shard_id in self.shard_set:
if not self.shard_set[shard_id].running:
logging.error("MongoConnector: OplogThread "
"%s unexpectedly stopped! Shutting "
"down" %
(str(self.shard_set[shard_id])))
self.oplog_thread_join()
for dm in self.doc_managers:
dm.stop()
return
self.write_oplog_progress()
time.sleep(1)
continue
try:
repl_set, hosts = shard_doc['host'].split('/')
except ValueError:
cause = "The system only uses replica sets!"
logging.error("MongoConnector: %s", cause)
self.oplog_thread_join()
for dm in self.doc_managers:
dm.stop()
return
shard_conn = MongoClient(hosts, replicaSet=repl_set, tz_aware=True)
oplog_coll = shard_conn['local']['oplog.rs']
oplog = OplogThread(
primary_conn=shard_conn,
main_address=self.address,
oplog_coll=oplog_coll,
is_sharded=True,
doc_manager=self.doc_managers,
oplog_progress_dict=self.oplog_progress,
namespace_set=self.ns_set,
auth_key=self.auth_key,
auth_username=self.auth_username,
collection_dump=self.collection_dump,
batch_size=self.batch_size,
fields=self.fields,
dest_mapping=self.dest_mapping,
continue_on_error=self.continue_on_error
)
self.shard_set[shard_id] = oplog
msg = "Starting connection thread"
logging.info("MongoConnector: %s %s" % (msg, shard_conn))
oplog.start()
self.oplog_thread_join()
self.write_oplog_progress()
def oplog_thread_join(self):
"""Stops all the OplogThreads
"""
logging.info('MongoConnector: Stopping all OplogThreads')
for thread in self.shard_set.values():
thread.join()
def main():
""" Starts the mongo connector (assuming CLI)
"""
parser = optparse.OptionParser()
#-m is for the main address, which is a host:port pair, ideally of the
#mongos. For non sharded clusters, it can be the primary.
parser.add_option("-m", "--main", action="store", type="string",
dest="main_addr", default="localhost:27217",
help="""Specify the main address, which is a"""
""" host:port pair. For sharded clusters, this"""
""" should be the mongos address. For individual"""
""" replica sets, supply the address of the"""
""" primary. For example, `-m localhost:27217`"""
""" would be a valid argument to `-m`. Don't use"""
""" quotes around the address.""")
#-o is to specify the oplog-config file. This file is used by the system
#to store the last timestamp read on a specific oplog. This allows for
#quick recovery from failure.
parser.add_option("-o", "--oplog-ts", action="store", type="string",
dest="oplog_config", default="config.txt",
help="""Specify the name of the file that stores the """
"""oplog progress timestamps. """
"""This file is used by the system to store the last """
"""timestamp read on a specific oplog. This allows """
"""for quick recovery from failure. By default this """
"""is `config.txt`, which starts off empty. An empty """
"""file causes the system to go through all the mongo """
"""oplog and sync all the documents. Whenever the """
"""cluster is restarted, it is essential that the """
"""oplog-timestamp config file be emptied - otherwise """
"""the connector will miss some documents and behave """
"""incorrectly.""")
#--no-dump specifies whether we should read an entire collection from
#scratch if no timestamp is found in the oplog_config.
parser.add_option("--no-dump", action="store_true", default=False, help=
"If specified, this flag will ensure that "
"mongo_connector won't read the entire contents of a "
"namespace iff --oplog-ts points to an empty file.")
#--batch-size specifies num docs to read from oplog before updating the
#--oplog-ts config file with current oplog position
parser.add_option("--batch-size", action="store",
default=constants.DEFAULT_BATCH_SIZE, type="int",
help="Specify an int to update the --oplog-ts "
"config file with latest position of oplog every "
"N documents. By default, the oplog config isn't "
"updated until we've read through the entire oplog. "
"You may want more frequent updates if you are at risk "
"of falling behind the earliest timestamp in the oplog")
#-t is to specify the URL to the target system being used.
parser.add_option("-t", "--target-url", "--target-urls", action="store",
type="string", dest="urls", default=None, help=
"""Specify the URL to each target system being """
"""used. For example, if you were using Solr out of """
"""the box, you could use '-t """
"""http://localhost:8080/solr' with the """
"""SolrDocManager to establish a proper connection. """
"""URLs should be specified in the same order as """
"""their respective doc managers in the """
"""--doc-managers option. URLs are assigned to doc """
"""managers respectively. Additional doc managers """
"""are implied to have no target URL. Additional """
"""URLs are implied to have the same doc manager """
"""type as the last doc manager for which a URL was """
"""specified. """
"""Don't use quotes around addresses. """)
#-n is to specify the namespaces we want to consider. The default
#considers all the namespaces
parser.add_option("-n", "--namespace-set", action="store", type="string",
dest="ns_set", default=None, help=
"""Used to specify the namespaces we want to """
"""consider. For example, if we wished to store all """
"""documents from the test.test and alpha.foo """
"""namespaces, we could use `-n test.test,alpha.foo`. """
"""The default is to consider all the namespaces, """
"""excluding the system and config databases, and """
"""also ignoring the "system.indexes" collection in """
"""any database.""")
#-u is to specify the mongoDB field that will serve as the unique key
#for the target system,
parser.add_option("-u", "--unique-key", action="store", type="string",
dest="u_key", default="_id", help=
"""The name of the MongoDB field that will serve """
"""as the unique key for the target system. """
"""Note that this option does not apply """
"""when targeting another MongoDB cluster. """
"""Defaults to "_id".""")
#-f is to specify the authentication key file. This file is used by mongos
#to authenticate connections to the shards, and we'll use it in the oplog
#threads.
parser.add_option("-f", "--password-file", action="store", type="string",
dest="auth_file", default=None, help=
"""Used to store the password for authentication."""
""" Use this option if you wish to specify a"""
""" username and password but don't want to"""
""" type in the password. The contents of this"""
""" file should be the password for the admin user.""")
#-p is to specify the password used for authentication.
parser.add_option("-p", "--password", action="store", type="string",
dest="password", default=None, help=
"""Used to specify the password."""
""" This is used by mongos to authenticate"""
""" connections to the shards, and in the"""
""" oplog threads. If authentication is not used, then"""
""" this field can be left empty as the default """)
#-a is to specify the username for authentication.
parser.add_option("-a", "--admin-username", action="store", type="string",
dest="admin_name", default="__system", help=
"""Used to specify the username of an admin user to """
"""authenticate with. To use authentication, the user """
"""must specify both an admin username and a keyFile. """
"""The default username is '__system'""")
#-d is to specify the doc manager file.
parser.add_option("-d", "--docManager", "--doc-managers", action="store",
type="string", dest="doc_managers", default=None, help=
"""Used to specify the path to each doc manager """
"""file that will be used. DocManagers should be """
"""specified in the same order as their respective """
"""target addresses in the --target-urls option. """
"""URLs are assigned to doc managers """
"""respectively. Additional doc managers are """
"""implied to have no target URL. Additional URLs """
"""are implied to have the same doc manager type as """
"""the last doc manager for which a URL was """
"""specified. By default, Mongo Connector will use """
"""'doc_manager_simulator.py'. It is recommended """
"""that all doc manager files be kept in the """
"""doc_managers folder in mongo-connector. For """
"""more information about making your own doc """
"""manager, see 'Writing Your Own DocManager' """
"""section of the wiki""")
#-g is the destination namespace
parser.add_option("-g", "--dest-namespace-set", action="store",
type="string", dest="dest_ns_set", default=None, help=
"""Specify a destination namespace mapping. Each """
"""namespace provided in the --namespace-set option """
"""will be mapped respectively according to this """
"""comma-separated list. These lists must have """
"""equal length. The default is to use the identity """
"""mapping. This is currently only implemented """
"""for mongo-to-mongo connections.""")
#-s is to enable syslog logging.
parser.add_option("-s", "--enable-syslog", action="store_true",
dest="enable_syslog", default=False, help=
"""Used to enable logging to syslog."""
""" Use -l to specify syslog host.""")
#--syslog-host is to specify the syslog host.
parser.add_option("--syslog-host", action="store", type="string",
dest="syslog_host", default="localhost:514", help=
"""Used to specify the syslog host."""
""" The default is 'localhost:514'""")
#--syslog-facility is to specify the syslog facility.
parser.add_option("--syslog-facility", action="store", type="string",
dest="syslog_facility", default="user", help=
"""Used to specify the syslog facility."""
""" The default is 'user'""")
#-i to specify the list of fields to export
parser.add_option("-i", "--fields", action="store", type="string",
dest="fields", default=None, help=
"""Used to specify the list of fields to export. """
"""Specify a field or fields to include in the export. """
"""Use a comma separated list of fields to specify multiple """
"""fields. The '_id', 'ns' and '_ts' fields are always """
"""exported.""")
#--auto-commit-interval to specify auto commit time interval
parser.add_option("--auto-commit-interval", action="store",
dest="commit_interval", type="int",
default=constants.DEFAULT_COMMIT_INTERVAL,
help="""Seconds in-between calls for the Doc Manager"""
""" to commit changes to the target system. A value of"""
""" 0 means to commit after every write operation."""
""" When left unset, Mongo Connector will not make"""
""" explicit commits. Some systems have"""
""" their own mechanism for adjusting a commit"""
""" interval, which should be preferred to this"""
""" option.""")
#--continue-on-error to continue to upsert documents during a collection
#dump, even if the documents cannot be inserted for some reason
parser.add_option("--continue-on-error", action="store_true",
dest="continue_on_error", default=False, help=
"By default, if any document fails to upsert"
" during a collection dump, the entire operation fails."
" When this flag is enabled, normally fatal errors"
" will be caught and logged, allowing the collection"
" dump to continue.\n"
"Note: Applying oplog operations to an incomplete"
" set of documents due to errors may cause undefined"
" behavior. Use this flag to dump only.")
#-v enables vebose logging
parser.add_option("-v", "--verbose", action="store_true",
dest="verbose", default=False,
help="Sets verbose logging to be on.")
#-w enable logging to a file
parser.add_option("-w", "--logfile", dest="logfile",
help=("Log all output to a file rather than stream to "
"stderr. Omit to stream to stderr."))
(options, args) = parser.parse_args()
logger = logging.getLogger()
loglevel = logging.INFO
if options.verbose:
loglevel = logging.DEBUG
logger.setLevel(loglevel)
if options.enable_syslog and options.logfile:
print ("You cannot specify syslog and a logfile simultaneously, please"
" choose the logging method you would prefer.")
sys.exit(1)
if options.enable_syslog:
syslog_info = options.syslog_host.split(":")
syslog_host = logging.handlers.SysLogHandler(
address=(syslog_info[0], int(syslog_info[1])),
facility=options.syslog_facility
)
syslog_host.setLevel(loglevel)
logger.addHandler(syslog_host)
elif options.logfile is not None:
log_out = logging.FileHandler(options.logfile)
log_out.setLevel(loglevel)
log_out.setFormatter(logging.Formatter(
'%(asctime)s - %(levelname)s - %(message)s'))
logger.addHandler(log_out)
else:
log_out = logging.StreamHandler()
log_out.setLevel(loglevel)
log_out.setFormatter(logging.Formatter(
'%(asctime)s - %(levelname)s - %(message)s'))
logger.addHandler(log_out)
logger.info('Beginning Mongo Connector')
# Get DocManagers and target URLs
# Each DocManager is assigned the respective (same-index) target URL
# Additional DocManagers may be specified that take no target URL
doc_managers = options.doc_managers
doc_managers = doc_managers.split(",") if doc_managers else doc_managers
target_urls = options.urls.split(",") if options.urls else None
if options.doc_managers is None:
logger.info('No doc managers specified, using simulator.')
if options.ns_set is None:
ns_set = []
else:
ns_set = options.ns_set.split(',')
if options.dest_ns_set is None:
dest_ns_set = ns_set
else:
dest_ns_set = options.dest_ns_set.split(',')
if len(dest_ns_set) != len(ns_set):
logger.error("Destination namespace must be the same length as the "
"origin namespace!")
sys.exit(1)
elif len(set(ns_set)) + len(set(dest_ns_set)) != 2 * len(ns_set):
logger.error("Namespace set and destination namespace set should not "
"contain any duplicates!")
sys.exit(1)
else:
## Create a mapping of source ns to dest ns as a dict
dest_mapping = dict(zip(ns_set, dest_ns_set))
fields = options.fields
if fields is not None:
fields = options.fields.split(',')
def purge(dir, pattern):
regexp = re.compile(pattern)
for f in os.listdir(dir):
if regexp.search(f):
logger.debug("Deleting: %s" % f)
os.remove(os.path.join(dir, f))
key = None
if options.auth_file is not None:
try:
key = open(options.auth_file).read()
re.sub(r'\s', '', key)
dirname, filename = os.path.split(os.path.abspath(options.auth_file))
fname, extn = os.path.splitext(filename)
pattern = "%s%s$" % (".*\\", extn)
purge(dirname, pattern)
except IOError:
logger.error('Could not parse password authentication file!')
sys.exit(1)
if options.password is not None:
key = options.password
if key is None and options.admin_name != "__system":
logger.error("Admin username specified without password!")
sys.exit(1)
if options.commit_interval is not None and options.commit_interval < 0:
raise ValueError("--auto-commit-interval must be non-negative")
connector = Connector(
address=options.main_addr,
oplog_checkpoint=options.oplog_config,
target_url=target_urls,
ns_set=ns_set,
u_key=options.u_key,
auth_key=key,
doc_manager=doc_managers,
auth_username=options.admin_name,
collection_dump=(not options.no_dump),
batch_size=options.batch_size,
fields=fields,
dest_mapping=dest_mapping,
auto_commit_interval=options.commit_interval,
continue_on_error=options.continue_on_error
)
connector.start()
while True:
try:
time.sleep(3)
if not connector.is_alive():
break
except KeyboardInterrupt:
logging.info("Caught keyboard interrupt, exiting!")
connector.join()
break
if __name__ == '__main__':
main()
| apache-2.0 |
stscieisenhamer/glue | glue/plugins/tools/spectrum_tool/qt/profile_viewer.py | 4 | 13923 | from __future__ import absolute_import, division, print_function
import numpy as np
from matplotlib.transforms import blended_transform_factory
from glue.core.callback_property import CallbackProperty, add_callback
PICK_THRESH = 30 # pixel distance threshold for picking
class Grip(object):
def __init__(self, viewer, artist=True):
self.viewer = viewer
self.enabled = True
self.artist = None
if artist:
self.artist = self._artist_factory()
def remove(self):
raise NotImplementedError()
def _artist_factory(self):
raise NotImplementedError()
def pick_dist(self, x, y):
"""
Return the distance, in pixels,
between a point in (x,y) data space and
the grip
"""
raise NotImplementedError()
def dblclick(self, x, y):
"""Respond to a double-click event
Default is to ignore
"""
pass
def select(self, x, y):
"""
Process a selection event (click) at x,y
"""
raise NotImplementedError()
def drag(self, x, y):
"""
Process a drag to x, y
"""
raise NotImplementedError()
def release(self):
"""
Process a release
"""
raise NotImplementedError()
def disable(self):
self.enabled = False
if self.artist is not None:
self.artist.set_visible(False)
self.viewer.axes.figure.canvas.draw()
def enable(self):
self.enabled = True
if self.artist is not None:
self.artist.set_visible(True)
self.viewer.axes.figure.canvas.draw()
class ValueGrip(Grip):
value = CallbackProperty(None)
def __init__(self, viewer, artist=True):
super(ValueGrip, self).__init__(viewer, artist)
self._drag = False
def _artist_factory(self):
return ValueArtist(self)
def dblclick(self, x, y):
self.value = x
def pick_dist(self, x, y):
xy = [[x, y], [self.value, y]]
xypix = self.viewer.axes.transData.transform(xy)
return abs(xypix[1, 0] - xypix[0, 0])
def select(self, x, y):
if self.pick_dist(x, y) > PICK_THRESH:
return
self._drag = True
def drag(self, x, y):
if self._drag:
self.value = x
def release(self):
self._drag = False
class RangeGrip(Grip):
range = CallbackProperty((None, None))
def __init__(self, viewer):
super(RangeGrip, self).__init__(viewer)
# track state during drags
self._move = None
self._ref = None
self._refx = None
self._refnew = None
def _artist_factory(self):
return RangeArtist(self)
def pick_dist(self, x, y):
xy = np.array([[x, y],
[self.range[0], y],
[self.range[1], y],
[sum(self.range) / 2, y]])
xypix = self.viewer.axes.transData.transform(xy)
dx = np.abs(xypix[1:] - xypix[0])[:, 0]
return min(dx)
def select(self, x, y):
if self.pick_dist(x, y) > PICK_THRESH:
return self.new_select(x, y)
cen = sum(self.range) / 2.
wid = self.range[1] - self.range[0]
if x < cen - wid / 4.:
self._move = 'left'
elif x < cen + wid / 4.:
self._move = 'center'
self._ref = self.range
self._refx = x
else:
self._move = 'right'
def new_select(self, x, y):
"""
Begin a selection in "new range" mode.
In this mode, the previous grip position is ignored,
and the new range is defined by the select/release positions
"""
self._refnew = x
self.range = (x, x)
def new_drag(self, x, y):
"""
Drag the selection in "new mode"
"""
if self._refnew is not None:
self._set_range(self._refnew, x)
def drag(self, x, y):
if self._refnew is not None:
return self.new_drag(x, y)
if self._move == 'left':
if x > self.range[1]:
self._move = 'right'
self._set_range(x, self.range[1])
elif self._move == 'center':
dx = (x - self._refx)
self._set_range(self._ref[0] + dx, self._ref[1] + dx)
else:
if x < self.range[0]:
self._move = 'left'
self._set_range(self.range[0], x)
def _set_range(self, lo, hi):
self.range = min(lo, hi), max(lo, hi)
def release(self):
self._move = None
self._ref = None
self._refx = None
self._refnew = None
class ValueArtist(object):
def __init__(self, grip, **kwargs):
self.grip = grip
add_callback(grip, 'value', self._update)
ax = self.grip.viewer.axes
kwargs.setdefault('lw', 2)
kwargs.setdefault('alpha', 0.5)
kwargs.setdefault('c', '#ffb304')
trans = blended_transform_factory(ax.transData, ax.transAxes)
self._line, = ax.plot([grip.value, grip.value], [0, 1],
transform=trans, **kwargs)
def _update(self, value):
self._line.set_xdata([value, value])
self._line.axes.figure.canvas.draw()
def set_visible(self, visible):
self._line.set_visible(visible)
class RangeArtist(object):
def __init__(self, grip, **kwargs):
self.grip = grip
add_callback(grip, 'range', self._update)
ax = grip.viewer.axes
trans = blended_transform_factory(ax.transData, ax.transAxes)
kwargs.setdefault('lw', 2)
kwargs.setdefault('alpha', 0.5)
kwargs.setdefault('c', '#ffb304')
self._line, = ax.plot(self.x, self.y, transform=trans, **kwargs)
@property
def x(self):
l, r = self.grip.range
return [l, l, l, r, r, r]
@property
def y(self):
return [0, 1, .5, .5, 0, 1]
def _update(self, rng):
self._line.set_xdata(self.x)
self._line.axes.figure.canvas.draw()
def set_visible(self, visible):
self._line.set_visible(visible)
def _build_axes(figure):
ax2 = figure.add_subplot(122)
ax1 = figure.add_subplot(121, sharex=ax2)
ax1.xaxis.get_major_formatter().set_useOffset(False)
ax1.yaxis.get_major_formatter().set_useOffset(False)
ax2.xaxis.get_major_formatter().set_useOffset(False)
ax2.yaxis.get_major_formatter().set_useOffset(False)
return ax1, ax2
class ProfileViewer(object):
value_cls = ValueGrip
range_cls = RangeGrip
def __init__(self, figure):
self.axes, self.resid_axes = _build_axes(figure)
self._artist = None
self._resid_artist = None
self._x = self._xatt = self._y = self._yatt = None
self._resid = None
self.connect()
self._fit_artists = []
self.active_grip = None # which grip should receive events?
self.grips = []
self._xlabel = ''
def set_xlabel(self, xlabel):
self._xlabel = xlabel
def autoscale_ylim(self):
x, y = self._x, self._y
xlim = self.axes.get_xlim()
mask = (xlim[0] <= x) & (x <= xlim[1])
ymask = y[mask]
if ymask.size == 0:
return
ylim = np.nan_to_num(np.array([np.nanmin(ymask), np.nanmax(ymask)]))
self.axes.set_ylim(ylim[0], ylim[1] + .05 * (ylim[1] - ylim[0]))
if self._resid is None:
return
assert self._resid.size == y.size
ymask = self._resid[mask]
ylim = np.nan_to_num([np.nanmin(ymask), np.nanmax(ymask)])
diff = .05 * (ylim[1] - ylim[0])
self.resid_axes.set_ylim(ylim[0] - diff, ylim[1] + diff)
def _relayout(self):
if self._resid_artist is not None:
self.axes.set_position([0.1, .35, .88, .6])
self.resid_axes.set_position([0.1, .15, .88, .2])
self.resid_axes.set_xlabel(self._xlabel)
self.resid_axes.set_visible(True)
self.axes.set_xlabel('')
[t.set_visible(False) for t in self.axes.get_xticklabels()]
else:
self.resid_axes.set_visible(False)
self.axes.set_position([0.1, .15, .88, .83])
self.axes.set_xlabel(self._xlabel)
[t.set_visible(True) for t in self.axes.get_xticklabels()]
def set_profile(self, x, y, xatt=None, yatt=None, **kwargs):
"""
Set a new line profile
:param x: X-coordinate data
:type x: array-like
:param y: Y-coordinate data
:type y: array-like
:param xatt: ComponentID associated with X axis
:type xatt: :class:`~glue.core.data.ComponentID`
:param yatt: ComponentID associated with Y axis
:type yatt: :class:`~glue.core.data.ComponentID`
Extra kwargs are passed to matplotlib.plot, to
customize plotting
Returns the created MPL artist
"""
self.clear_fit()
self._x = np.asarray(x).ravel()
self._xatt = xatt
self._y = np.asarray(y).ravel()
self._yatt = yatt
if self._artist is not None:
self._artist.remove()
kwargs.setdefault('drawstyle', 'steps-mid')
self._artist = self.axes.plot(x, y, **kwargs)[0]
self._relayout()
self._redraw()
return self._artist
def clear_fit(self):
for a in self._fit_artists:
a.remove()
self._fit_artists = []
if self._resid_artist is not None:
self._resid_artist.remove()
self._resid_artist = None
def connect(self):
connect = self.axes.figure.canvas.mpl_connect
self._down_id = connect('button_press_event', self._on_down)
self._up_id = connect('button_release_event', self._on_up)
self._move_id = connect('motion_notify_event', self._on_move)
def disconnect(self):
off = self.axes.figure.canvas.mpl_disconnect
self._down_id = off(self._down_id)
self._up_id = off(self._up_id)
self._move_id = off(self._move_id)
def _on_down(self, event):
if not event.inaxes:
return
if event.dblclick:
if self.active_grip is not None:
self.active_grip.dblclick(event.xdata, event.ydata)
return
if self.active_grip is not None and self.active_grip.enabled:
self.active_grip.select(event.xdata, event.ydata)
def _on_up(self, event):
if not event.inaxes:
return
if self.active_grip is None or not self.active_grip.enabled:
return
self.active_grip.release()
def _on_move(self, event):
if not event.inaxes or event.button != 1:
return
if self.active_grip is None or not self.active_grip.enabled:
return
self.active_grip.drag(event.xdata, event.ydata)
def _redraw(self):
self.axes.figure.canvas.draw()
def profile_data(self, xlim=None):
if self._x is None or self._y is None:
raise ValueError("Must set profile first")
x = self._x
y = self._y
if xlim is not None:
mask = (min(xlim) <= x) & (x <= max(xlim))
x = x[mask]
y = y[mask]
return x, y
def fit(self, fitter, xlim=None):
try:
x, y = self.profile_data(xlim)
dy = None
except ValueError:
raise ValueError("Must set profile before fitting")
result = fitter.build_and_fit(x, y)
return result, x, y, dy
def plot_fit(self, fitter, fit_result):
self.clear_fit()
x = self._x
y = fitter.predict(fit_result, x)
self._fit_artists = fitter.plot(fit_result, self.axes, x)
resid = self._y - y
self._resid = resid
self._resid_artist, = self.resid_axes.plot(x, resid, 'k')
self.autoscale_ylim()
self._relayout()
def new_value_grip(self, callback=None):
"""
Create and return new ValueGrip
:param callback: A callback function to be invoked
whenever the grip.value property changes
"""
result = self.value_cls(self)
result.value = self._center[0]
if callback is not None:
add_callback(result, 'value', callback)
self.grips.append(result)
self.active_grip = result
return result
def new_range_grip(self, callback=None):
"""
Create and return new RangeGrip
:param callback: A callback function to be invoked
whenever the grip.range property changes
"""
result = self.range_cls(self)
center = self._center[0]
width = self._width
result.range = center - width / 4, center + width / 4
if callback is not None:
add_callback(result, 'range', callback)
self.grips.append(result)
self.active_grip = result
return result
@property
def _center(self):
"""Return the data coordinates of the axes center, as (x, y)"""
xy = self.axes.transAxes.transform([(.5, .5)])
xy = self.axes.transData.inverted().transform(xy)
return tuple(xy.ravel())
@property
def _width(self):
"""Return the X-width of axes in data units"""
xlim = self.axes.get_xlim()
return xlim[1] - xlim[0]
def pick_grip(self, x, y):
"""
Given a coordinate in Data units,
return the enabled Grip object nearest
that point, or None if none are nearby
"""
grips = [h for h in self.grips if h.enabled]
if not grips:
return
dist, grip = min((h.pick_dist(x, y), h)
for h in grips)
if dist < PICK_THRESH:
return grip
| bsd-3-clause |
ento/elm-doc | src/elm_doc/tasks/assets.py | 1 | 6493 | from pathlib import Path
import re
import gzip
import tarfile
from elm_doc.run_config import Build
from elm_doc.utils import Namespace
tarball = Path(__file__).parent.parent / 'assets' / 'assets.tar.gz'
bundled_helps = [
'assets/help/documentation-format.md',
'assets/help/design-guidelines.md',
]
bundled_assets = bundled_helps + [
'artifacts/elm.js',
'artifacts/LICENSE',
'assets/favicon.ico',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7jsDJB9cme_xc.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7jsDJT9g.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7ksDJB9cme_xc.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7ksDJT9g.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7nsDI.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7nsDJB9cme.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7osDJB9cme_xc.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7osDJT9g.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7psDJB9cme_xc.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7psDJT9g.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7qsDJB9cme_xc.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7qsDJT9g.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7rsDJB9cme_xc.woff2',
'assets/fonts/6xK1dSBYKcSV-LCoeQqfX1RYOo3qPZ7rsDJT9g.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qN67lqDY.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qN67lujVj9_mf.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNK7lqDY.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNK7lujVj9_mf.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNa7lqDY.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNa7lujVj9_mf.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNq7lqDY.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qNq7lujVj9_mf.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qO67lqDY.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qO67lujVj9_mf.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qOK7l.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qOK7lujVj9w.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qPK7lqDY.woff2',
'assets/fonts/6xK3dSBYKcSV-LCoeQqfX1RYOo3qPK7lujVj9_mf.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdg18S0xR41YDw.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdg18Smxg.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdh18S0xR41YDw.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdh18Smxg.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdi18S0xR41YDw.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdi18Smxg.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdj18S0xR41YDw.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdj18Smxg.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdo18S0xR41YDw.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdo18Smxg.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSds18Q.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSds18S0xR41.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdv18S0xR41YDw.woff2',
'assets/fonts/6xKwdSBYKcSV-LCoeQqfX1RYOo3qPZZclSdv18Smxg.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwkxdu3cOWxy40.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwkxduz8A.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwlBdu3cOWxy40.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwlBduz8A.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwlxdu.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwlxdu3cOWxw.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmBdu3cOWxy40.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmBduz8A.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmRdu3cOWxy40.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmRduz8A.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmhdu3cOWxy40.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmhduz8A.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmxdu3cOWxy40.woff2',
'assets/fonts/6xKydSBYKcSV-LCoeQqfX1RYOo3ig4vwmxduz8A.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlM-vWjMY.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlM-vWnsUnxlC9.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlMOvWjMY.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlMOvWnsUnxlC9.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlMuvWjMY.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlMuvWnsUnxlC9.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlOevWjMY.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlOevWnsUnxlC9.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevWnsUnxg.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPuvWjMY.woff2',
'assets/fonts/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPuvWnsUnxlC9.woff2',
'assets/fonts/_hints_off.css.gz',
'assets/fonts/_hints_on.css.gz',
'assets/highlight/LICENSE',
'assets/highlight/highlight.pack.js',
'assets/highlight/styles/default.css',
'assets/style.css',
'assets/LICENSE',
]
class actions(Namespace):
def extract_assets(run_config: Build):
with tarfile.open(str(tarball)) as f:
f.extractall(str(run_config.output_path))
# decompress .gz files
for asset in bundled_assets:
if Path(asset).suffix == '.gz':
src_path = run_config.output_path / asset
write_to = src_path.parent / src_path.stem
decompress_and_rewrite(src_path, write_to, run_config.mount_point)
def decompress_and_rewrite(source: Path, target: Path, mount_point: str):
assets_re = re.compile(re.escape(b'/assets/'))
replace_with = mount_point.encode('utf8') + b'/assets/'
rewrite = target.suffix == '.css'
with gzip.open(str(source), 'rb') as f, target.open('wb') as g:
while True:
content = f.read(1024)
if not content:
break
if rewrite:
content = assets_re.sub(replace_with, content)
g.write(content)
# re-compress the rewritten content
if rewrite:
with target.open('rb') as f, gzip.open(str(source), 'wb') as g:
while True:
content = f.read(1024)
if not content:
break
g.write(content)
| bsd-3-clause |
uclouvain/osis_louvain | base/migrations/0301_auto_20180703_1611.py | 1 | 6243 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-07-03 14:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0300_auto_20180703_0941'),
]
operations = [
migrations.AlterModelOptions(
name='teachingmaterial',
options={'ordering': ('learning_unit_year', 'order'), 'verbose_name_plural': 'bibliographies'},
),
migrations.AlterField(
model_name='educationgroup',
name='end_year',
field=models.IntegerField(blank=True, null=True, verbose_name='end'),
),
migrations.AlterField(
model_name='educationgroup',
name='start_year',
field=models.IntegerField(blank=True, null=True, verbose_name='start'),
),
migrations.AlterField(
model_name='educationgrouptype',
name='category',
field=models.CharField(choices=[('TRAINING', 'TRAINING'), ('MINI_TRAINING', 'MINI_TRAINING'), ('GROUP', 'GROUP')], default='TRAINING', max_length=25, verbose_name='type'),
),
migrations.AlterField(
model_name='educationgrouptype',
name='name',
field=models.CharField(max_length=255, verbose_name='training_type'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='academic_type',
field=models.CharField(blank=True, choices=[('NON_ACADEMIC', 'NON_ACADEMIC'), ('NON_ACADEMIC_CREF', 'NON_ACADEMIC_CREF'), ('ACADEMIC', 'ACADEMIC')], max_length=20, null=True, verbose_name='academic_type'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='active',
field=models.CharField(choices=[('ACTIVE', 'ACTIVE'), ('INACTIVE', 'INACTIVE'), ('RE_REGISTRATION', 'RE_REGISTRATION')], default='ACTIVE', max_length=20, verbose_name='status'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='admission_exam',
field=models.BooleanField(default=False, verbose_name='admission_exam'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='dissertation',
field=models.BooleanField(default=False, verbose_name='dissertation'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='duration',
field=models.IntegerField(blank=True, null=True, verbose_name='duration'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='duration_unit',
field=models.CharField(blank=True, choices=[('QUADRIMESTER', 'QUADRIMESTER'), ('TRIMESTER', 'TRIMESTER'), ('MONTH', 'MONTH'), ('WEEK', 'WEEK'), ('DAY', 'DAY')], default='QUADRIMESTER', max_length=40, null=True),
),
migrations.AlterField(
model_name='educationgroupyear',
name='enrollment_enabled',
field=models.BooleanField(default=False, verbose_name='enrollment_enabled'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='funding',
field=models.BooleanField(default=False, verbose_name='funding'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='funding_cud',
field=models.BooleanField(default=False, verbose_name='funding_cud'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='funding_direction',
field=models.CharField(blank=True, max_length=1, null=True, verbose_name='funding_direction'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='funding_direction_cud',
field=models.CharField(blank=True, max_length=1, null=True, verbose_name='cud_funding_direction'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='internship',
field=models.CharField(blank=True, choices=[('YES', 'YES'), ('NO', 'NO'), ('OPTIONAL', 'OPTIONAL')], max_length=20, null=True, verbose_name='internship'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='keywords',
field=models.CharField(blank=True, max_length=320, null=True, verbose_name='keywords'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='other_campus_activities',
field=models.CharField(blank=True, choices=[('YES', 'YES'), ('NO', 'NO'), ('OPTIONAL', 'OPTIONAL')], max_length=20, null=True, verbose_name='other_campus_activities'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='other_language_activities',
field=models.CharField(blank=True, choices=[('YES', 'YES'), ('NO', 'NO'), ('OPTIONAL', 'OPTIONAL')], max_length=20, null=True, verbose_name='other_language_activities'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='partial_deliberation',
field=models.BooleanField(default=False, verbose_name='partial_deliberation'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='primary_language',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='reference.Language', verbose_name='primary_language'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='schedule_type',
field=models.CharField(choices=[('DAILY', 'DAILY'), ('SHIFTED', 'SHIFTED'), ('ADAPTED', 'ADAPTED')], default='DAILY', max_length=20, verbose_name='schedule_type'),
),
migrations.AlterField(
model_name='educationgroupyear',
name='university_certificate',
field=models.BooleanField(default=False, verbose_name='university_certificate'),
),
]
| agpl-3.0 |
nzavagli/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Python-2.7.10/Lib/distutils/tests/test_sysconfig.py | 38 | 5594 | """Tests for distutils.sysconfig."""
import os
import test
import unittest
import shutil
import subprocess
import sys
import textwrap
from distutils import sysconfig
from distutils.tests import support
from test.test_support import TESTFN
class SysconfigTestCase(support.EnvironGuard,
unittest.TestCase):
def setUp(self):
super(SysconfigTestCase, self).setUp()
self.makefile = None
def tearDown(self):
if self.makefile is not None:
os.unlink(self.makefile)
self.cleanup_testfn()
super(SysconfigTestCase, self).tearDown()
def cleanup_testfn(self):
path = test.test_support.TESTFN
if os.path.isfile(path):
os.remove(path)
elif os.path.isdir(path):
shutil.rmtree(path)
def test_get_python_lib(self):
lib_dir = sysconfig.get_python_lib()
# XXX doesn't work on Linux when Python was never installed before
#self.assertTrue(os.path.isdir(lib_dir), lib_dir)
# test for pythonxx.lib?
self.assertNotEqual(sysconfig.get_python_lib(),
sysconfig.get_python_lib(prefix=TESTFN))
_sysconfig = __import__('sysconfig')
res = sysconfig.get_python_lib(True, True)
self.assertEqual(_sysconfig.get_path('platstdlib'), res)
def test_get_python_inc(self):
inc_dir = sysconfig.get_python_inc()
# This is not much of a test. We make sure Python.h exists
# in the directory returned by get_python_inc() but we don't know
# it is the correct file.
self.assertTrue(os.path.isdir(inc_dir), inc_dir)
python_h = os.path.join(inc_dir, "Python.h")
self.assertTrue(os.path.isfile(python_h), python_h)
def test_parse_makefile_base(self):
self.makefile = test.test_support.TESTFN
fd = open(self.makefile, 'w')
try:
fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=LIB'" '\n')
fd.write('VAR=$OTHER\nOTHER=foo')
finally:
fd.close()
d = sysconfig.parse_makefile(self.makefile)
self.assertEqual(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'",
'OTHER': 'foo'})
def test_parse_makefile_literal_dollar(self):
self.makefile = test.test_support.TESTFN
fd = open(self.makefile, 'w')
try:
fd.write(r"CONFIG_ARGS= '--arg1=optarg1' 'ENV=\$$LIB'" '\n')
fd.write('VAR=$OTHER\nOTHER=foo')
finally:
fd.close()
d = sysconfig.parse_makefile(self.makefile)
self.assertEqual(d, {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'",
'OTHER': 'foo'})
def test_sysconfig_module(self):
import sysconfig as global_sysconfig
self.assertEqual(global_sysconfig.get_config_var('CFLAGS'), sysconfig.get_config_var('CFLAGS'))
self.assertEqual(global_sysconfig.get_config_var('LDFLAGS'), sysconfig.get_config_var('LDFLAGS'))
@unittest.skipIf(sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'),'compiler flags customized')
def test_sysconfig_compiler_vars(self):
# On OS X, binary installers support extension module building on
# various levels of the operating system with differing Xcode
# configurations. This requires customization of some of the
# compiler configuration directives to suit the environment on
# the installed machine. Some of these customizations may require
# running external programs and, so, are deferred until needed by
# the first extension module build. With Python 3.3, only
# the Distutils version of sysconfig is used for extension module
# builds, which happens earlier in the Distutils tests. This may
# cause the following tests to fail since no tests have caused
# the global version of sysconfig to call the customization yet.
# The solution for now is to simply skip this test in this case.
# The longer-term solution is to only have one version of sysconfig.
import sysconfig as global_sysconfig
if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'):
self.skipTest('compiler flags customized')
self.assertEqual(global_sysconfig.get_config_var('LDSHARED'), sysconfig.get_config_var('LDSHARED'))
self.assertEqual(global_sysconfig.get_config_var('CC'), sysconfig.get_config_var('CC'))
def test_customize_compiler_before_get_config_vars(self):
# Issue #21923: test that a Distribution compiler
# instance can be called without an explicit call to
# get_config_vars().
with open(TESTFN, 'w') as f:
f.writelines(textwrap.dedent('''\
from distutils.core import Distribution
config = Distribution().get_command_obj('config')
# try_compile may pass or it may fail if no compiler
# is found but it should not raise an exception.
rc = config.try_compile('int x;')
'''))
p = subprocess.Popen([str(sys.executable), TESTFN],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True)
outs, errs = p.communicate()
self.assertEqual(0, p.returncode, "Subprocess failed: " + outs)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SysconfigTestCase))
return suite
if __name__ == '__main__':
test.test_support.run_unittest(test_suite())
| mit |
KyoheiG3/grpc | src/python/src/grpc/framework/base/interfaces_test_case.py | 15 | 11724 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Abstract tests against the interfaces of the base layer of RPC Framework."""
import threading
import time
from grpc.framework.base import interfaces
from grpc.framework.base import util
from grpc.framework.foundation import stream
from grpc.framework.foundation import stream_testing
from grpc.framework.foundation import stream_util
TICK = 0.1
SMALL_TIMEOUT = TICK * 50
STREAM_LENGTH = 100
SYNCHRONOUS_ECHO = 'synchronous echo'
ASYNCHRONOUS_ECHO = 'asynchronous echo'
IMMEDIATE_FAILURE = 'immediate failure'
TRIGGERED_FAILURE = 'triggered failure'
WAIT_ON_CONDITION = 'wait on condition'
EMPTY_OUTCOME_DICT = {
interfaces.Outcome.COMPLETED: 0,
interfaces.Outcome.CANCELLED: 0,
interfaces.Outcome.EXPIRED: 0,
interfaces.Outcome.RECEPTION_FAILURE: 0,
interfaces.Outcome.TRANSMISSION_FAILURE: 0,
interfaces.Outcome.SERVICER_FAILURE: 0,
interfaces.Outcome.SERVICED_FAILURE: 0,
}
def _synchronous_echo(output_consumer):
return stream_util.TransformingConsumer(lambda x: x, output_consumer)
class AsynchronousEcho(stream.Consumer):
"""A stream.Consumer that echoes its input to another stream.Consumer."""
def __init__(self, output_consumer, pool):
self._lock = threading.Lock()
self._output_consumer = output_consumer
self._pool = pool
self._queue = []
self._spinning = False
def _spin(self, value, complete):
while True:
if value:
if complete:
self._output_consumer.consume_and_terminate(value)
else:
self._output_consumer.consume(value)
elif complete:
self._output_consumer.terminate()
with self._lock:
if self._queue:
value, complete = self._queue.pop(0)
else:
self._spinning = False
return
def consume(self, value):
with self._lock:
if self._spinning:
self._queue.append((value, False))
else:
self._spinning = True
self._pool.submit(self._spin, value, False)
def terminate(self):
with self._lock:
if self._spinning:
self._queue.append((None, True))
else:
self._spinning = True
self._pool.submit(self._spin, None, True)
def consume_and_terminate(self, value):
with self._lock:
if self._spinning:
self._queue.append((value, True))
else:
self._spinning = True
self._pool.submit(self._spin, value, True)
class TestServicer(interfaces.Servicer):
"""An interfaces.Servicer with instrumented for testing."""
def __init__(self, pool):
self._pool = pool
self.condition = threading.Condition()
self._released = False
def service(self, name, context, output_consumer):
if name == SYNCHRONOUS_ECHO:
return _synchronous_echo(output_consumer)
elif name == ASYNCHRONOUS_ECHO:
return AsynchronousEcho(output_consumer, self._pool)
elif name == IMMEDIATE_FAILURE:
raise ValueError()
elif name == TRIGGERED_FAILURE:
raise NotImplementedError
elif name == WAIT_ON_CONDITION:
with self.condition:
while not self._released:
self.condition.wait()
return _synchronous_echo(output_consumer)
else:
raise NotImplementedError()
def release(self):
with self.condition:
self._released = True
self.condition.notify_all()
class EasyServicedIngestor(interfaces.ServicedIngestor):
"""A trivial implementation of interfaces.ServicedIngestor."""
def __init__(self, consumer):
self._consumer = consumer
def consumer(self, operation_context):
"""See interfaces.ServicedIngestor.consumer for specification."""
return self._consumer
class FrontAndBackTest(object):
"""A test suite usable against any joined Front and Back."""
# Pylint doesn't know that this is a unittest.TestCase mix-in.
# pylint: disable=invalid-name
def testSimplestCall(self):
"""Tests the absolute simplest call - a one-ticket fire-and-forget."""
self.front.operate(
SYNCHRONOUS_ECHO, None, True, SMALL_TIMEOUT,
util.none_serviced_subscription(), 'test trace ID')
util.wait_for_idle(self.front)
self.assertEqual(
1, self.front.operation_stats()[interfaces.Outcome.COMPLETED])
# Assuming nothing really pathological (such as pauses on the order of
# SMALL_TIMEOUT interfering with this test) there are a two different ways
# the back could have experienced execution up to this point:
# (1) The ticket is still either in the front waiting to be transmitted
# or is somewhere on the link between the front and the back. The back has
# no idea that this test is even happening. Calling wait_for_idle on it
# would do no good because in this case the back is idle and the call would
# return with the ticket bound for it still in the front or on the link.
back_operation_stats = self.back.operation_stats()
first_back_possibility = EMPTY_OUTCOME_DICT
# (2) The ticket arrived at the back and the back completed the operation.
second_back_possibility = dict(EMPTY_OUTCOME_DICT)
second_back_possibility[interfaces.Outcome.COMPLETED] = 1
self.assertIn(
back_operation_stats, (first_back_possibility, second_back_possibility))
# It's true that if the ticket had arrived at the back and the back had
# begun processing that wait_for_idle could hold test execution until the
# back completed the operation, but that doesn't really collapse the
# possibility space down to one solution.
def testEntireEcho(self):
"""Tests a very simple one-ticket-each-way round-trip."""
test_payload = 'test payload'
test_consumer = stream_testing.TestConsumer()
subscription = util.full_serviced_subscription(
EasyServicedIngestor(test_consumer))
self.front.operate(
ASYNCHRONOUS_ECHO, test_payload, True, SMALL_TIMEOUT, subscription,
'test trace ID')
util.wait_for_idle(self.front)
util.wait_for_idle(self.back)
self.assertEqual(
1, self.front.operation_stats()[interfaces.Outcome.COMPLETED])
self.assertEqual(
1, self.back.operation_stats()[interfaces.Outcome.COMPLETED])
self.assertListEqual([(test_payload, True)], test_consumer.calls)
def testBidirectionalStreamingEcho(self):
"""Tests sending multiple tickets each way."""
test_payload_template = 'test_payload: %03d'
test_payloads = [test_payload_template % i for i in range(STREAM_LENGTH)]
test_consumer = stream_testing.TestConsumer()
subscription = util.full_serviced_subscription(
EasyServicedIngestor(test_consumer))
operation = self.front.operate(
SYNCHRONOUS_ECHO, None, False, SMALL_TIMEOUT, subscription,
'test trace ID')
for test_payload in test_payloads:
operation.consumer.consume(test_payload)
operation.consumer.terminate()
util.wait_for_idle(self.front)
util.wait_for_idle(self.back)
self.assertEqual(
1, self.front.operation_stats()[interfaces.Outcome.COMPLETED])
self.assertEqual(
1, self.back.operation_stats()[interfaces.Outcome.COMPLETED])
self.assertListEqual(test_payloads, test_consumer.values())
def testCancellation(self):
"""Tests cancelling a long-lived operation."""
test_consumer = stream_testing.TestConsumer()
subscription = util.full_serviced_subscription(
EasyServicedIngestor(test_consumer))
operation = self.front.operate(
ASYNCHRONOUS_ECHO, None, False, SMALL_TIMEOUT, subscription,
'test trace ID')
operation.cancel()
util.wait_for_idle(self.front)
self.assertEqual(
1, self.front.operation_stats()[interfaces.Outcome.CANCELLED])
util.wait_for_idle(self.back)
self.assertListEqual([], test_consumer.calls)
# Assuming nothing really pathological (such as pauses on the order of
# SMALL_TIMEOUT interfering with this test) there are a two different ways
# the back could have experienced execution up to this point:
# (1) Both tickets are still either in the front waiting to be transmitted
# or are somewhere on the link between the front and the back. The back has
# no idea that this test is even happening. Calling wait_for_idle on it
# would do no good because in this case the back is idle and the call would
# return with the tickets bound for it still in the front or on the link.
back_operation_stats = self.back.operation_stats()
first_back_possibility = EMPTY_OUTCOME_DICT
# (2) Both tickets arrived within SMALL_TIMEOUT of one another at the back.
# The back started processing based on the first ticket and then stopped
# upon receiving the cancellation ticket.
second_back_possibility = dict(EMPTY_OUTCOME_DICT)
second_back_possibility[interfaces.Outcome.CANCELLED] = 1
self.assertIn(
back_operation_stats, (first_back_possibility, second_back_possibility))
def testExpiration(self):
"""Tests that operations time out."""
timeout = TICK * 2
allowance = TICK # How much extra time to
condition = threading.Condition()
test_payload = 'test payload'
subscription = util.termination_only_serviced_subscription()
start_time = time.time()
outcome_cell = [None]
termination_time_cell = [None]
def termination_action(outcome):
with condition:
outcome_cell[0] = outcome
termination_time_cell[0] = time.time()
condition.notify()
with condition:
operation = self.front.operate(
SYNCHRONOUS_ECHO, test_payload, False, timeout, subscription,
'test trace ID')
operation.context.add_termination_callback(termination_action)
while outcome_cell[0] is None:
condition.wait()
duration = termination_time_cell[0] - start_time
self.assertLessEqual(timeout, duration)
self.assertLess(duration, timeout + allowance)
self.assertEqual(interfaces.Outcome.EXPIRED, outcome_cell[0])
util.wait_for_idle(self.front)
self.assertEqual(
1, self.front.operation_stats()[interfaces.Outcome.EXPIRED])
util.wait_for_idle(self.back)
self.assertLessEqual(
1, self.back.operation_stats()[interfaces.Outcome.EXPIRED])
| bsd-3-clause |
stvstnfrd/edx-platform | common/djangoapps/student/migrations/0033_userprofile_state.py | 8 | 1586 | # Generated by Django 2.2.12 on 2020-04-30 20:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0032_removed_logout_view_configuration'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='state',
field=models.CharField(blank=True, choices=[('AL', 'Alabama'), ('AK', 'Alaska'), ('AZ', 'Arizona'), ('AR', 'Arkansas'), ('AA', 'Armed Forces Americas'), ('AE', 'Armed Forces Europe'), ('AP', 'Armed Forces Pacific'), ('CA', 'California'), ('CO', 'Colorado'), ('CT', 'Connecticut'), ('DE', 'Delaware'), ('DC', 'District Of Columbia'), ('FL', 'Florida'), ('GA', 'Georgia'), ('HI', 'Hawaii'), ('ID', 'Idaho'), ('IL', 'Illinois'), ('IN', 'Indiana'), ('IA', 'Iowa'), ('KS', 'Kansas'), ('KY', 'Kentucky'), ('LA', 'Louisiana'), ('ME', 'Maine'), ('MD', 'Maryland'), ('MA', 'Massachusetts'), ('MI', 'Michigan'), ('MN', 'Minnesota'), ('MS', 'Mississippi'), ('MO', 'Missouri'), ('MT', 'Montana'), ('NE', 'Nebraska'), ('NV', 'Nevada'), ('NH', 'New Hampshire'), ('NJ', 'New Jersey'), ('NM', 'New Mexico'), ('NY', 'New York'), ('NC', 'North Carolina'), ('ND', 'North Dakota'), ('OH', 'Ohio'), ('OK', 'Oklahoma'), ('OR', 'Oregon'), ('PA', 'Pennsylvania'), ('RI', 'Rhode Island'), ('SC', 'South Carolina'), ('SD', 'South Dakota'), ('TN', 'Tennessee'), ('TX', 'Texas'), ('UT', 'Utah'), ('VT', 'Vermont'), ('VA', 'Virginia'), ('WA', 'Washington'), ('WV', 'West Virginia'), ('WI', 'Wisconsin'), ('WY', 'Wyoming')], max_length=2, null=True),
),
]
| agpl-3.0 |
qwefi/nova | nova/cmd/baremetal_manage.py | 4 | 7164 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Interactive shell based on Django:
#
# Copyright (c) 2005, the Lawrence Journal-World
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Django nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
CLI interface for nova bare-metal management.
"""
import os
import sys
from oslo.config import cfg
from nova import config
from nova.openstack.common import cliutils
from nova.openstack.common import log as logging
from nova import version
from nova.virt.baremetal.db import migration as bmdb_migration
CONF = cfg.CONF
# Decorators for actions
def args(*args, **kwargs):
def _decorator(func):
func.__dict__.setdefault('args', []).insert(0, (args, kwargs))
return func
return _decorator
class BareMetalDbCommands(object):
"""Class for managing the bare-metal database."""
def __init__(self):
pass
@args('--version', dest='version', metavar='<version>',
help='Bare-metal Database version')
def sync(self, version=None):
"""Sync the database up to the most recent version."""
bmdb_migration.db_sync(version)
def version(self):
"""Print the current database version."""
v = bmdb_migration.db_version()
print(v)
# return for unittest
return v
CATEGORIES = {
'db': BareMetalDbCommands,
}
def methods_of(obj):
"""Get all callable methods of an object that don't start with underscore.
returns a list of tuples of the form (method_name, method)
"""
result = []
for i in dir(obj):
if callable(getattr(obj, i)) and not i.startswith('_'):
result.append((i, getattr(obj, i)))
return result
def add_command_parsers(subparsers):
parser = subparsers.add_parser('bash-completion')
parser.add_argument('query_category', nargs='?')
for category in CATEGORIES:
command_object = CATEGORIES[category]()
parser = subparsers.add_parser(category)
parser.set_defaults(command_object=command_object)
category_subparsers = parser.add_subparsers(dest='action')
for (action, action_fn) in methods_of(command_object):
parser = category_subparsers.add_parser(action)
action_kwargs = []
for args, kwargs in getattr(action_fn, 'args', []):
action_kwargs.append(kwargs['dest'])
kwargs['dest'] = 'action_kwarg_' + kwargs['dest']
parser.add_argument(*args, **kwargs)
parser.set_defaults(action_fn=action_fn)
parser.set_defaults(action_kwargs=action_kwargs)
parser.add_argument('action_args', nargs='*')
category_opt = cfg.SubCommandOpt('category',
title='Command categories',
help='Available categories',
handler=add_command_parsers)
def main():
"""Parse options and call the appropriate class/method."""
CONF.register_cli_opt(category_opt)
try:
config.parse_args(sys.argv)
logging.setup("nova")
except cfg.ConfigFilesNotFoundError:
cfgfile = CONF.config_file[-1] if CONF.config_file else None
if cfgfile and not os.access(cfgfile, os.R_OK):
st = os.stat(cfgfile)
print(_("Could not read %s. Re-running with sudo") % cfgfile)
try:
os.execvp('sudo', ['sudo', '-u', '#%s' % st.st_uid] + sys.argv)
except Exception:
print(_('sudo failed, continuing as if nothing happened'))
print(_('Please re-run nova-manage as root.'))
return(2)
if CONF.category.name == "version":
print(version.version_string_with_package())
return(0)
if CONF.category.name == "bash-completion":
if not CONF.category.query_category:
print(" ".join(CATEGORIES.keys()))
elif CONF.category.query_category in CATEGORIES:
fn = CATEGORIES[CONF.category.query_category]
command_object = fn()
actions = methods_of(command_object)
print(" ".join([k for (k, v) in actions]))
return(0)
fn = CONF.category.action_fn
fn_args = [arg.decode('utf-8') for arg in CONF.category.action_args]
fn_kwargs = {}
for k in CONF.category.action_kwargs:
v = getattr(CONF.category, 'action_kwarg_' + k)
if v is None:
continue
if isinstance(v, basestring):
v = v.decode('utf-8')
fn_kwargs[k] = v
# call the action with the remaining arguments
# check arguments
try:
cliutils.validate_args(fn, *fn_args, **fn_kwargs)
except cliutils.MissingArgs as e:
print(fn.__doc__)
print(e)
return(1)
try:
fn(*fn_args, **fn_kwargs)
return(0)
except Exception:
print(_("Command failed, please check log for more info"))
raise
| apache-2.0 |
horance-liu/tensorflow | tensorflow/contrib/learn/python/learn/estimators/estimator_test.py | 21 | 53471 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Estimator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import itertools
import json
import os
import tempfile
import numpy as np
import six
from six.moves import xrange # pylint: disable=redefined-builtin
from google.protobuf import text_format
from tensorflow.contrib import learn
from tensorflow.contrib import lookup
from tensorflow.contrib.framework.python.ops import variables
from tensorflow.contrib.layers.python.layers import feature_column as feature_column_lib
from tensorflow.contrib.layers.python.layers import optimizers
from tensorflow.contrib.learn.python.learn import experiment
from tensorflow.contrib.learn.python.learn import models
from tensorflow.contrib.learn.python.learn import monitors as monitors_lib
from tensorflow.contrib.learn.python.learn.datasets import base
from tensorflow.contrib.learn.python.learn.estimators import _sklearn
from tensorflow.contrib.learn.python.learn.estimators import constants
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import linear
from tensorflow.contrib.learn.python.learn.estimators import model_fn
from tensorflow.contrib.learn.python.learn.estimators import run_config
from tensorflow.contrib.learn.python.learn.utils import input_fn_utils
from tensorflow.contrib.metrics.python.ops import metric_ops
from tensorflow.contrib.testing.python.framework import util_test
from tensorflow.python.client import session as session_lib
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.lib.io import file_io
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.summary import summary
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import checkpoint_state_pb2
from tensorflow.python.training import input as input_lib
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver as saver_lib
from tensorflow.python.training import session_run_hook
from tensorflow.python.util import compat
_BOSTON_INPUT_DIM = 13
_IRIS_INPUT_DIM = 4
def boston_input_fn(num_epochs=None):
boston = base.load_boston()
features = input_lib.limit_epochs(
array_ops.reshape(
constant_op.constant(boston.data), [-1, _BOSTON_INPUT_DIM]),
num_epochs=num_epochs)
labels = array_ops.reshape(constant_op.constant(boston.target), [-1, 1])
return features, labels
def iris_input_fn():
iris = base.load_iris()
features = array_ops.reshape(
constant_op.constant(iris.data), [-1, _IRIS_INPUT_DIM])
labels = array_ops.reshape(constant_op.constant(iris.target), [-1])
return features, labels
def iris_input_fn_labels_dict():
iris = base.load_iris()
features = array_ops.reshape(
constant_op.constant(iris.data), [-1, _IRIS_INPUT_DIM])
labels = {
'labels': array_ops.reshape(constant_op.constant(iris.target), [-1])
}
return features, labels
def boston_eval_fn():
boston = base.load_boston()
n_examples = len(boston.target)
features = array_ops.reshape(
constant_op.constant(boston.data), [n_examples, _BOSTON_INPUT_DIM])
labels = array_ops.reshape(
constant_op.constant(boston.target), [n_examples, 1])
return array_ops.concat([features, features], 0), array_ops.concat(
[labels, labels], 0)
def extract(data, key):
if isinstance(data, dict):
assert key in data
return data[key]
else:
return data
def linear_model_params_fn(features, labels, mode, params):
features = extract(features, 'input')
labels = extract(labels, 'labels')
assert mode in (model_fn.ModeKeys.TRAIN, model_fn.ModeKeys.EVAL,
model_fn.ModeKeys.INFER)
prediction, loss = (models.linear_regression_zero_init(features, labels))
train_op = optimizers.optimize_loss(
loss,
variables.get_global_step(),
optimizer='Adagrad',
learning_rate=params['learning_rate'])
return prediction, loss, train_op
def linear_model_fn(features, labels, mode):
features = extract(features, 'input')
labels = extract(labels, 'labels')
assert mode in (model_fn.ModeKeys.TRAIN, model_fn.ModeKeys.EVAL,
model_fn.ModeKeys.INFER)
if isinstance(features, dict):
(_, features), = features.items()
prediction, loss = (models.linear_regression_zero_init(features, labels))
train_op = optimizers.optimize_loss(
loss, variables.get_global_step(), optimizer='Adagrad', learning_rate=0.1)
return prediction, loss, train_op
def linear_model_fn_with_model_fn_ops(features, labels, mode):
"""Same as linear_model_fn, but returns `ModelFnOps`."""
assert mode in (model_fn.ModeKeys.TRAIN, model_fn.ModeKeys.EVAL,
model_fn.ModeKeys.INFER)
prediction, loss = (models.linear_regression_zero_init(features, labels))
train_op = optimizers.optimize_loss(
loss, variables.get_global_step(), optimizer='Adagrad', learning_rate=0.1)
return model_fn.ModelFnOps(
mode=mode, predictions=prediction, loss=loss, train_op=train_op)
def logistic_model_no_mode_fn(features, labels):
features = extract(features, 'input')
labels = extract(labels, 'labels')
labels = array_ops.one_hot(labels, 3, 1, 0)
prediction, loss = (models.logistic_regression_zero_init(features, labels))
train_op = optimizers.optimize_loss(
loss, variables.get_global_step(), optimizer='Adagrad', learning_rate=0.1)
return {
'class': math_ops.argmax(prediction, 1),
'prob': prediction
}, loss, train_op
VOCAB_FILE_CONTENT = 'emerson\nlake\npalmer\n'
EXTRA_FILE_CONTENT = 'kermit\npiggy\nralph\n'
def _build_estimator_for_export_tests(tmpdir):
def _input_fn():
iris = base.load_iris()
return {
'feature': constant_op.constant(
iris.data, dtype=dtypes.float32)
}, constant_op.constant(
iris.target, shape=[150], dtype=dtypes.int32)
feature_columns = [
feature_column_lib.real_valued_column(
'feature', dimension=4)
]
est = linear.LinearRegressor(feature_columns)
est.fit(input_fn=_input_fn, steps=20)
feature_spec = feature_column_lib.create_feature_spec_for_parsing(
feature_columns)
serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec)
# hack in an op that uses an asset, in order to test asset export.
# this is not actually valid, of course.
def serving_input_fn_with_asset():
features, labels, inputs = serving_input_fn()
vocab_file_name = os.path.join(tmpdir, 'my_vocab_file')
vocab_file = gfile.GFile(vocab_file_name, mode='w')
vocab_file.write(VOCAB_FILE_CONTENT)
vocab_file.close()
hashtable = lookup.HashTable(
lookup.TextFileStringTableInitializer(vocab_file_name), 'x')
features['bogus_lookup'] = hashtable.lookup(
math_ops.to_int64(features['feature']))
return input_fn_utils.InputFnOps(features, labels, inputs)
return est, serving_input_fn_with_asset
def _build_estimator_for_resource_export_test():
def _input_fn():
iris = base.load_iris()
return {
'feature': constant_op.constant(iris.data, dtype=dtypes.float32)
}, constant_op.constant(
iris.target, shape=[150], dtype=dtypes.int32)
feature_columns = [
feature_column_lib.real_valued_column('feature', dimension=4)
]
def resource_constant_model_fn(unused_features, unused_labels, mode):
"""A model_fn that loads a constant from a resource and serves it."""
assert mode in (model_fn.ModeKeys.TRAIN, model_fn.ModeKeys.EVAL,
model_fn.ModeKeys.INFER)
const = constant_op.constant(-1, dtype=dtypes.int64)
table = lookup.MutableHashTable(
dtypes.string, dtypes.int64, const, name='LookupTableModel')
update_global_step = variables.get_global_step().assign_add(1)
if mode in (model_fn.ModeKeys.TRAIN, model_fn.ModeKeys.EVAL):
key = constant_op.constant(['key'])
value = constant_op.constant([42], dtype=dtypes.int64)
train_op_1 = table.insert(key, value)
training_state = lookup.MutableHashTable(
dtypes.string, dtypes.int64, const, name='LookupTableTrainingState')
training_op_2 = training_state.insert(key, value)
return (const, const,
control_flow_ops.group(train_op_1, training_op_2,
update_global_step))
if mode == model_fn.ModeKeys.INFER:
key = constant_op.constant(['key'])
prediction = table.lookup(key)
return prediction, const, update_global_step
est = estimator.Estimator(model_fn=resource_constant_model_fn)
est.fit(input_fn=_input_fn, steps=1)
feature_spec = feature_column_lib.create_feature_spec_for_parsing(
feature_columns)
serving_input_fn = input_fn_utils.build_parsing_serving_input_fn(feature_spec)
return est, serving_input_fn
class CheckCallsMonitor(monitors_lib.BaseMonitor):
def __init__(self, expect_calls):
super(CheckCallsMonitor, self).__init__()
self.begin_calls = None
self.end_calls = None
self.expect_calls = expect_calls
def begin(self, max_steps):
self.begin_calls = 0
self.end_calls = 0
def step_begin(self, step):
self.begin_calls += 1
return {}
def step_end(self, step, outputs):
self.end_calls += 1
return False
def end(self):
assert (self.end_calls == self.expect_calls and
self.begin_calls == self.expect_calls)
def _model_fn_ops(
expected_features, expected_labels, actual_features, actual_labels, mode):
assert_ops = tuple([
check_ops.assert_equal(
expected_features[k], actual_features[k], name='assert_%s' % k)
for k in expected_features
] + [
check_ops.assert_equal(
expected_labels, actual_labels, name='assert_labels')
])
with ops.control_dependencies(assert_ops):
return model_fn.ModelFnOps(
mode=mode,
predictions=constant_op.constant(0.),
loss=constant_op.constant(0.),
train_op=variables.get_global_step().assign_add(1))
def _make_input_fn(features, labels):
def _input_fn():
return {
k: constant_op.constant(v)
for k, v in six.iteritems(features)
}, constant_op.constant(labels)
return _input_fn
class EstimatorModelFnTest(test.TestCase):
def testModelFnArgs(self):
features = {'x': 42., 'y': 43.}
labels = 44.
expected_params = {'some_param': 'some_value'}
expected_config = run_config.RunConfig()
expected_config.i_am_test = True
# TODO(ptucker): We have to roll our own mock since Estimator._get_arguments
# doesn't work with mock fns.
model_fn_call_count = [0]
# `features` and `labels` are passed by position, `arg0` and `arg1` here.
def _model_fn(arg0, arg1, mode, params, config):
model_fn_call_count[0] += 1
self.assertItemsEqual(features.keys(), arg0.keys())
self.assertEqual(model_fn.ModeKeys.TRAIN, mode)
self.assertEqual(expected_params, params)
self.assertTrue(config.i_am_test)
return _model_fn_ops(features, labels, arg0, arg1, mode)
est = estimator.Estimator(
model_fn=_model_fn, params=expected_params, config=expected_config)
self.assertEqual(0, model_fn_call_count[0])
est.fit(input_fn=_make_input_fn(features, labels), steps=1)
self.assertEqual(1, model_fn_call_count[0])
def testPartialModelFnArgs(self):
features = {'x': 42., 'y': 43.}
labels = 44.
expected_params = {'some_param': 'some_value'}
expected_config = run_config.RunConfig()
expected_config.i_am_test = True
expected_foo = 45.
expected_bar = 46.
# TODO(ptucker): We have to roll our own mock since Estimator._get_arguments
# doesn't work with mock fns.
model_fn_call_count = [0]
# `features` and `labels` are passed by position, `arg0` and `arg1` here.
def _model_fn(arg0, arg1, foo, mode, params, config, bar):
model_fn_call_count[0] += 1
self.assertEqual(expected_foo, foo)
self.assertEqual(expected_bar, bar)
self.assertItemsEqual(features.keys(), arg0.keys())
self.assertEqual(model_fn.ModeKeys.TRAIN, mode)
self.assertEqual(expected_params, params)
self.assertTrue(config.i_am_test)
return _model_fn_ops(features, labels, arg0, arg1, mode)
partial_model_fn = functools.partial(
_model_fn, foo=expected_foo, bar=expected_bar)
est = estimator.Estimator(
model_fn=partial_model_fn, params=expected_params,
config=expected_config)
self.assertEqual(0, model_fn_call_count[0])
est.fit(input_fn=_make_input_fn(features, labels), steps=1)
self.assertEqual(1, model_fn_call_count[0])
def testModelFnWithModelDir(self):
expected_param = {'some_param': 'some_value'}
expected_model_dir = tempfile.mkdtemp()
def _argument_checker(features, labels, mode, params, config=None,
model_dir=None):
_, _, _ = features, labels, config
self.assertEqual(model_fn.ModeKeys.TRAIN, mode)
self.assertEqual(expected_param, params)
self.assertEqual(model_dir, expected_model_dir)
return (constant_op.constant(0.), constant_op.constant(0.),
variables.get_global_step().assign_add(1))
est = estimator.Estimator(model_fn=_argument_checker,
params=expected_param,
model_dir=expected_model_dir)
est.fit(input_fn=boston_input_fn, steps=1)
def testInvalidModelFn_no_train_op(self):
def _invalid_model_fn(features, labels):
# pylint: disable=unused-argument
w = variables_lib.Variable(42.0, 'weight')
update_global_step = variables.get_global_step().assign_add(1)
with ops.control_dependencies([update_global_step]):
loss = 100.0 - w
return None, loss, None
est = estimator.Estimator(model_fn=_invalid_model_fn)
with self.assertRaisesRegexp(ValueError, 'Missing train_op'):
est.fit(input_fn=boston_input_fn, steps=1)
def testInvalidModelFn_no_loss(self):
def _invalid_model_fn(features, labels, mode):
# pylint: disable=unused-argument
w = variables_lib.Variable(42.0, 'weight')
loss = 100.0 - w
update_global_step = variables.get_global_step().assign_add(1)
with ops.control_dependencies([update_global_step]):
train_op = w.assign_add(loss / 100.0)
predictions = loss
if mode == model_fn.ModeKeys.EVAL:
loss = None
return predictions, loss, train_op
est = estimator.Estimator(model_fn=_invalid_model_fn)
est.fit(input_fn=boston_input_fn, steps=1)
with self.assertRaisesRegexp(ValueError, 'Missing loss'):
est.evaluate(input_fn=boston_eval_fn, steps=1)
def testInvalidModelFn_no_prediction(self):
def _invalid_model_fn(features, labels):
# pylint: disable=unused-argument
w = variables_lib.Variable(42.0, 'weight')
loss = 100.0 - w
update_global_step = variables.get_global_step().assign_add(1)
with ops.control_dependencies([update_global_step]):
train_op = w.assign_add(loss / 100.0)
return None, loss, train_op
est = estimator.Estimator(model_fn=_invalid_model_fn)
est.fit(input_fn=boston_input_fn, steps=1)
with self.assertRaisesRegexp(ValueError, 'Missing prediction'):
est.evaluate(input_fn=boston_eval_fn, steps=1)
with self.assertRaisesRegexp(ValueError, 'Missing prediction'):
est.predict(input_fn=boston_input_fn)
with self.assertRaisesRegexp(ValueError, 'Missing prediction'):
est.predict(
input_fn=functools.partial(
boston_input_fn, num_epochs=1),
as_iterable=True)
def testModelFnScaffoldInTraining(self):
self.is_init_fn_called = False
def _init_fn(scaffold, session):
_, _ = scaffold, session
self.is_init_fn_called = True
def _model_fn_scaffold(features, labels, mode):
_, _ = features, labels
return model_fn.ModelFnOps(
mode=mode,
predictions=constant_op.constant(0.),
loss=constant_op.constant(0.),
train_op=variables.get_global_step().assign_add(1),
scaffold=monitored_session.Scaffold(init_fn=_init_fn))
est = estimator.Estimator(model_fn=_model_fn_scaffold)
est.fit(input_fn=boston_input_fn, steps=1)
self.assertTrue(self.is_init_fn_called)
def testModelFnScaffoldSaverUsage(self):
def _model_fn_scaffold(features, labels, mode):
_, _ = features, labels
variables_lib.Variable(1., 'weight')
real_saver = saver_lib.Saver()
self.mock_saver = test.mock.Mock(
wraps=real_saver, saver_def=real_saver.saver_def)
return model_fn.ModelFnOps(
mode=mode,
predictions=constant_op.constant([[1.]]),
loss=constant_op.constant(0.),
train_op=variables.get_global_step().assign_add(1),
scaffold=monitored_session.Scaffold(saver=self.mock_saver))
def input_fn():
return {
'x': constant_op.constant([[1.]]),
}, constant_op.constant([[1.]])
est = estimator.Estimator(model_fn=_model_fn_scaffold)
est.fit(input_fn=input_fn, steps=1)
self.assertTrue(self.mock_saver.save.called)
est.evaluate(input_fn=input_fn, steps=1)
self.assertTrue(self.mock_saver.restore.called)
est.predict(input_fn=input_fn)
self.assertTrue(self.mock_saver.restore.called)
def serving_input_fn():
serialized_tf_example = array_ops.placeholder(dtype=dtypes.string,
shape=[None],
name='input_example_tensor')
features, labels = input_fn()
return input_fn_utils.InputFnOps(
features, labels, {'examples': serialized_tf_example})
est.export_savedmodel(os.path.join(est.model_dir, 'export'), serving_input_fn)
self.assertTrue(self.mock_saver.restore.called)
class EstimatorTest(test.TestCase):
def testExperimentIntegration(self):
exp = experiment.Experiment(
estimator=estimator.Estimator(model_fn=linear_model_fn),
train_input_fn=boston_input_fn,
eval_input_fn=boston_input_fn)
exp.test()
def testCheckpointSaverHookSuppressesTheDefaultOne(self):
saver_hook = test.mock.Mock(
spec=basic_session_run_hooks.CheckpointSaverHook)
saver_hook.before_run.return_value = None
est = estimator.Estimator(model_fn=linear_model_fn)
est.fit(input_fn=boston_input_fn, steps=1, monitors=[saver_hook])
# test nothing is saved, due to suppressing default saver
with self.assertRaises(learn.NotFittedError):
est.evaluate(input_fn=boston_input_fn, steps=1)
def testCustomConfig(self):
test_random_seed = 5783452
class TestInput(object):
def __init__(self):
self.random_seed = 0
def config_test_input_fn(self):
self.random_seed = ops.get_default_graph().seed
return constant_op.constant([[1.]]), constant_op.constant([1.])
config = run_config.RunConfig(tf_random_seed=test_random_seed)
test_input = TestInput()
est = estimator.Estimator(model_fn=linear_model_fn, config=config)
est.fit(input_fn=test_input.config_test_input_fn, steps=1)
# If input_fn ran, it will have given us the random seed set on the graph.
self.assertEquals(test_random_seed, test_input.random_seed)
def testRunConfigModelDir(self):
config = run_config.RunConfig(model_dir='test_dir')
est = estimator.Estimator(model_fn=linear_model_fn,
config=config)
self.assertEqual('test_dir', est.config.model_dir)
self.assertEqual('test_dir', est.model_dir)
def testModelDirAndRunConfigModelDir(self):
config = run_config.RunConfig(model_dir='test_dir')
est = estimator.Estimator(model_fn=linear_model_fn,
config=config,
model_dir='test_dir')
self.assertEqual('test_dir', est.config.model_dir)
with self.assertRaisesRegexp(
ValueError,
'model_dir are set both in constructor and RunConfig, '
'but with different'):
estimator.Estimator(model_fn=linear_model_fn,
config=config,
model_dir='different_dir')
def testModelDirIsCopiedToRunConfig(self):
config = run_config.RunConfig()
self.assertIsNone(config.model_dir)
est = estimator.Estimator(model_fn=linear_model_fn,
model_dir='test_dir',
config=config)
self.assertEqual('test_dir', est.config.model_dir)
self.assertEqual('test_dir', est.model_dir)
def testModelDirAsTempDir(self):
with test.mock.patch.object(tempfile, 'mkdtemp', return_value='temp_dir'):
est = estimator.Estimator(model_fn=linear_model_fn)
self.assertEqual('temp_dir', est.config.model_dir)
self.assertEqual('temp_dir', est.model_dir)
def testCheckInputs(self):
est = estimator.SKCompat(estimator.Estimator(model_fn=linear_model_fn))
# Lambdas so we have to different objects to compare
right_features = lambda: np.ones(shape=[7, 8], dtype=np.float32)
right_labels = lambda: np.ones(shape=[7, 10], dtype=np.int32)
est.fit(right_features(), right_labels(), steps=1)
# TODO(wicke): This does not fail for np.int32 because of data_feeder magic.
wrong_type_features = np.ones(shape=[7, 8], dtype=np.int64)
wrong_size_features = np.ones(shape=[7, 10])
wrong_type_labels = np.ones(shape=[7, 10], dtype=np.float32)
wrong_size_labels = np.ones(shape=[7, 11])
est.fit(x=right_features(), y=right_labels(), steps=1)
with self.assertRaises(ValueError):
est.fit(x=wrong_type_features, y=right_labels(), steps=1)
with self.assertRaises(ValueError):
est.fit(x=wrong_size_features, y=right_labels(), steps=1)
with self.assertRaises(ValueError):
est.fit(x=right_features(), y=wrong_type_labels, steps=1)
with self.assertRaises(ValueError):
est.fit(x=right_features(), y=wrong_size_labels, steps=1)
def testBadInput(self):
est = estimator.Estimator(model_fn=linear_model_fn)
self.assertRaisesRegexp(
ValueError,
'Either x or input_fn must be provided.',
est.fit,
x=None,
input_fn=None,
steps=1)
self.assertRaisesRegexp(
ValueError,
'Can not provide both input_fn and x or y',
est.fit,
x='X',
input_fn=iris_input_fn,
steps=1)
self.assertRaisesRegexp(
ValueError,
'Can not provide both input_fn and x or y',
est.fit,
y='Y',
input_fn=iris_input_fn,
steps=1)
self.assertRaisesRegexp(
ValueError,
'Can not provide both input_fn and batch_size',
est.fit,
input_fn=iris_input_fn,
batch_size=100,
steps=1)
self.assertRaisesRegexp(
ValueError,
'Inputs cannot be tensors. Please provide input_fn.',
est.fit,
x=constant_op.constant(1.),
steps=1)
def testUntrained(self):
boston = base.load_boston()
est = estimator.SKCompat(estimator.Estimator(model_fn=linear_model_fn))
with self.assertRaises(learn.NotFittedError):
_ = est.score(x=boston.data, y=boston.target.astype(np.float64))
with self.assertRaises(learn.NotFittedError):
est.predict(x=boston.data)
def testContinueTraining(self):
boston = base.load_boston()
output_dir = tempfile.mkdtemp()
est = estimator.SKCompat(
estimator.Estimator(
model_fn=linear_model_fn, model_dir=output_dir))
float64_labels = boston.target.astype(np.float64)
est.fit(x=boston.data, y=float64_labels, steps=50)
scores = est.score(
x=boston.data,
y=float64_labels,
metrics={'MSE': metric_ops.streaming_mean_squared_error})
del est
# Create another estimator object with the same output dir.
est2 = estimator.SKCompat(
estimator.Estimator(
model_fn=linear_model_fn, model_dir=output_dir))
# Check we can evaluate and predict.
scores2 = est2.score(
x=boston.data,
y=float64_labels,
metrics={'MSE': metric_ops.streaming_mean_squared_error})
self.assertAllClose(scores['MSE'], scores2['MSE'])
predictions = np.array(list(est2.predict(x=boston.data)))
other_score = _sklearn.mean_squared_error(predictions, float64_labels)
self.assertAllClose(scores['MSE'], other_score)
# Check we can keep training.
est2.fit(x=boston.data, y=float64_labels, steps=100)
scores3 = est2.score(
x=boston.data,
y=float64_labels,
metrics={'MSE': metric_ops.streaming_mean_squared_error})
self.assertLess(scores3['MSE'], scores['MSE'])
def test_checkpoint_contains_relative_paths(self):
tmpdir = tempfile.mkdtemp()
est = estimator.Estimator(
model_dir=tmpdir,
model_fn=linear_model_fn_with_model_fn_ops)
est.fit(input_fn=boston_input_fn, steps=5)
checkpoint_file_content = file_io.read_file_to_string(
os.path.join(tmpdir, 'checkpoint'))
ckpt = checkpoint_state_pb2.CheckpointState()
text_format.Merge(checkpoint_file_content, ckpt)
self.assertEqual(ckpt.model_checkpoint_path, 'model.ckpt-5')
self.assertAllEqual(
['model.ckpt-1', 'model.ckpt-5'], ckpt.all_model_checkpoint_paths)
def test_train_save_copy_reload(self):
tmpdir = tempfile.mkdtemp()
model_dir1 = os.path.join(tmpdir, 'model_dir1')
est1 = estimator.Estimator(
model_dir=model_dir1,
model_fn=linear_model_fn_with_model_fn_ops)
est1.fit(input_fn=boston_input_fn, steps=5)
model_dir2 = os.path.join(tmpdir, 'model_dir2')
os.renames(model_dir1, model_dir2)
est2 = estimator.Estimator(
model_dir=model_dir2,
model_fn=linear_model_fn_with_model_fn_ops)
self.assertEqual(5, est2.get_variable_value('global_step'))
est2.fit(input_fn=boston_input_fn, steps=5)
self.assertEqual(10, est2.get_variable_value('global_step'))
def testEstimatorParams(self):
boston = base.load_boston()
est = estimator.SKCompat(
estimator.Estimator(
model_fn=linear_model_params_fn, params={'learning_rate': 0.01}))
est.fit(x=boston.data, y=boston.target, steps=100)
def testHooksNotChanged(self):
est = estimator.Estimator(model_fn=logistic_model_no_mode_fn)
# We pass empty array and expect it to remain empty after calling
# fit and evaluate. Requires inside to copy this array if any hooks were
# added.
my_array = []
est.fit(input_fn=iris_input_fn, steps=100, monitors=my_array)
_ = est.evaluate(input_fn=iris_input_fn, steps=1, hooks=my_array)
self.assertEqual(my_array, [])
def testIrisIterator(self):
iris = base.load_iris()
est = estimator.Estimator(model_fn=logistic_model_no_mode_fn)
x_iter = itertools.islice(iris.data, 100)
y_iter = itertools.islice(iris.target, 100)
estimator.SKCompat(est).fit(x_iter, y_iter, steps=20)
eval_result = est.evaluate(input_fn=iris_input_fn, steps=1)
x_iter_eval = itertools.islice(iris.data, 100)
y_iter_eval = itertools.islice(iris.target, 100)
score_result = estimator.SKCompat(est).score(x_iter_eval, y_iter_eval)
print(score_result)
self.assertItemsEqual(eval_result.keys(), score_result.keys())
self.assertItemsEqual(['global_step', 'loss'], score_result.keys())
predictions = estimator.SKCompat(est).predict(x=iris.data)['class']
self.assertEqual(len(predictions), iris.target.shape[0])
def testIrisIteratorArray(self):
iris = base.load_iris()
est = estimator.Estimator(model_fn=logistic_model_no_mode_fn)
x_iter = itertools.islice(iris.data, 100)
y_iter = (np.array(x) for x in iris.target)
est.fit(x_iter, y_iter, steps=100)
_ = est.evaluate(input_fn=iris_input_fn, steps=1)
_ = six.next(est.predict(x=iris.data))['class']
def testIrisIteratorPlainInt(self):
iris = base.load_iris()
est = estimator.Estimator(model_fn=logistic_model_no_mode_fn)
x_iter = itertools.islice(iris.data, 100)
y_iter = (v for v in iris.target)
est.fit(x_iter, y_iter, steps=100)
_ = est.evaluate(input_fn=iris_input_fn, steps=1)
_ = six.next(est.predict(x=iris.data))['class']
def testIrisTruncatedIterator(self):
iris = base.load_iris()
est = estimator.Estimator(model_fn=logistic_model_no_mode_fn)
x_iter = itertools.islice(iris.data, 50)
y_iter = ([np.int32(v)] for v in iris.target)
est.fit(x_iter, y_iter, steps=100)
def testTrainStepsIsIncremental(self):
est = estimator.Estimator(model_fn=linear_model_fn)
est.fit(input_fn=boston_input_fn, steps=10)
self.assertEqual(10, est.get_variable_value('global_step'))
est.fit(input_fn=boston_input_fn, steps=15)
self.assertEqual(25, est.get_variable_value('global_step'))
def testTrainMaxStepsIsNotIncremental(self):
est = estimator.Estimator(model_fn=linear_model_fn)
est.fit(input_fn=boston_input_fn, max_steps=10)
self.assertEqual(10, est.get_variable_value('global_step'))
est.fit(input_fn=boston_input_fn, max_steps=15)
self.assertEqual(15, est.get_variable_value('global_step'))
def testPredict(self):
est = estimator.Estimator(model_fn=linear_model_fn)
boston = base.load_boston()
est.fit(input_fn=boston_input_fn, steps=1)
output = list(est.predict(x=boston.data, batch_size=10))
self.assertEqual(len(output), boston.target.shape[0])
def testWithModelFnOps(self):
"""Test for model_fn that returns `ModelFnOps`."""
est = estimator.Estimator(model_fn=linear_model_fn_with_model_fn_ops)
boston = base.load_boston()
est.fit(input_fn=boston_input_fn, steps=1)
input_fn = functools.partial(boston_input_fn, num_epochs=1)
scores = est.evaluate(input_fn=input_fn, steps=1)
self.assertIn('loss', scores.keys())
output = list(est.predict(input_fn=input_fn))
self.assertEqual(len(output), boston.target.shape[0])
def testWrongInput(self):
def other_input_fn():
return {
'other': constant_op.constant([0, 0, 0])
}, constant_op.constant([0, 0, 0])
est = estimator.Estimator(model_fn=linear_model_fn)
est.fit(input_fn=boston_input_fn, steps=1)
with self.assertRaises(ValueError):
est.fit(input_fn=other_input_fn, steps=1)
def testMonitorsForFit(self):
est = estimator.Estimator(model_fn=linear_model_fn)
est.fit(input_fn=boston_input_fn,
steps=21,
monitors=[CheckCallsMonitor(expect_calls=21)])
def testHooksForEvaluate(self):
class CheckCallHook(session_run_hook.SessionRunHook):
def __init__(self):
self.run_count = 0
def after_run(self, run_context, run_values):
self.run_count += 1
est = learn.Estimator(model_fn=linear_model_fn)
est.fit(input_fn=boston_input_fn, steps=1)
hook = CheckCallHook()
est.evaluate(input_fn=boston_eval_fn, steps=3, hooks=[hook])
self.assertEqual(3, hook.run_count)
def testSummaryWriting(self):
est = estimator.Estimator(model_fn=linear_model_fn)
est.fit(input_fn=boston_input_fn, steps=200)
est.evaluate(input_fn=boston_input_fn, steps=200)
loss_summary = util_test.simple_values_from_events(
util_test.latest_events(est.model_dir), ['OptimizeLoss/loss'])
self.assertEqual(1, len(loss_summary))
def testSummaryWritingWithSummaryProto(self):
def _streaming_mean_squared_error_histogram(predictions,
labels,
weights=None,
metrics_collections=None,
updates_collections=None,
name=None):
metrics, update_ops = metric_ops.streaming_mean_squared_error(
predictions,
labels,
weights=weights,
metrics_collections=metrics_collections,
updates_collections=updates_collections,
name=name)
return summary.histogram('histogram', metrics), update_ops
est = estimator.Estimator(model_fn=linear_model_fn)
est.fit(input_fn=boston_input_fn, steps=200)
est.evaluate(
input_fn=boston_input_fn,
steps=200,
metrics={'MSE': _streaming_mean_squared_error_histogram})
events = util_test.latest_events(est.model_dir + '/eval')
output_values = {}
for e in events:
if e.HasField('summary'):
for v in e.summary.value:
output_values[v.tag] = v
self.assertTrue('MSE' in output_values)
self.assertTrue(output_values['MSE'].HasField('histo'))
def testLossInGraphCollection(self):
class _LossCheckerHook(session_run_hook.SessionRunHook):
def begin(self):
self.loss_collection = ops.get_collection(ops.GraphKeys.LOSSES)
hook = _LossCheckerHook()
est = estimator.Estimator(model_fn=linear_model_fn)
est.fit(input_fn=boston_input_fn, steps=200, monitors=[hook])
self.assertTrue(hook.loss_collection)
def test_export_returns_exported_dirname(self):
expected = '/path/to/some_dir'
with test.mock.patch.object(estimator, 'export') as mock_export_module:
mock_export_module._export_estimator.return_value = expected
est = estimator.Estimator(model_fn=linear_model_fn)
actual = est.export('/path/to')
self.assertEquals(expected, actual)
def test_export_savedmodel(self):
tmpdir = tempfile.mkdtemp()
est, serving_input_fn = _build_estimator_for_export_tests(tmpdir)
extra_file_name = os.path.join(
compat.as_bytes(tmpdir), compat.as_bytes('my_extra_file'))
extra_file = gfile.GFile(extra_file_name, mode='w')
extra_file.write(EXTRA_FILE_CONTENT)
extra_file.close()
assets_extra = {'some/sub/directory/my_extra_file': extra_file_name}
export_dir_base = os.path.join(
compat.as_bytes(tmpdir), compat.as_bytes('export'))
export_dir = est.export_savedmodel(
export_dir_base, serving_input_fn, assets_extra=assets_extra)
self.assertTrue(gfile.Exists(export_dir_base))
self.assertTrue(gfile.Exists(export_dir))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir), compat.as_bytes(
'saved_model.pb'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir), compat.as_bytes('variables'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('variables/variables.index'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('variables/variables.data-00000-of-00001'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir), compat.as_bytes('assets'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('assets/my_vocab_file'))))
self.assertEqual(
compat.as_bytes(VOCAB_FILE_CONTENT),
compat.as_bytes(
gfile.GFile(
os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('assets/my_vocab_file'))).read()))
expected_extra_path = os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('assets.extra/some/sub/directory/my_extra_file'))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir), compat.as_bytes('assets.extra'))))
self.assertTrue(gfile.Exists(expected_extra_path))
self.assertEqual(
compat.as_bytes(EXTRA_FILE_CONTENT),
compat.as_bytes(gfile.GFile(expected_extra_path).read()))
expected_vocab_file = os.path.join(
compat.as_bytes(tmpdir), compat.as_bytes('my_vocab_file'))
# Restore, to validate that the export was well-formed.
with ops.Graph().as_default() as graph:
with session_lib.Session(graph=graph) as sess:
loader.load(sess, [tag_constants.SERVING], export_dir)
assets = [
x.eval()
for x in graph.get_collection(ops.GraphKeys.ASSET_FILEPATHS)
]
self.assertItemsEqual([expected_vocab_file], assets)
graph_ops = [x.name for x in graph.get_operations()]
self.assertTrue('input_example_tensor' in graph_ops)
self.assertTrue('ParseExample/ParseExample' in graph_ops)
self.assertTrue('linear/linear/feature/matmul' in graph_ops)
self.assertItemsEqual(
['bogus_lookup', 'feature'],
[compat.as_str_any(x) for x in graph.get_collection(
constants.COLLECTION_DEF_KEY_FOR_INPUT_FEATURE_KEYS)])
# cleanup
gfile.DeleteRecursively(tmpdir)
def test_export_savedmodel_with_resource(self):
tmpdir = tempfile.mkdtemp()
est, serving_input_fn = _build_estimator_for_resource_export_test()
export_dir_base = os.path.join(
compat.as_bytes(tmpdir), compat.as_bytes('export'))
export_dir = est.export_savedmodel(export_dir_base, serving_input_fn)
self.assertTrue(gfile.Exists(export_dir_base))
self.assertTrue(gfile.Exists(export_dir))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir), compat.as_bytes(
'saved_model.pb'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir), compat.as_bytes('variables'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('variables/variables.index'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('variables/variables.data-00000-of-00001'))))
# Restore, to validate that the export was well-formed.
with ops.Graph().as_default() as graph:
with session_lib.Session(graph=graph) as sess:
loader.load(sess, [tag_constants.SERVING], export_dir)
graph_ops = [x.name for x in graph.get_operations()]
self.assertTrue('input_example_tensor' in graph_ops)
self.assertTrue('ParseExample/ParseExample' in graph_ops)
self.assertTrue('LookupTableModel' in graph_ops)
self.assertFalse('LookupTableTrainingState' in graph_ops)
# cleanup
gfile.DeleteRecursively(tmpdir)
def test_export_savedmodel_with_graph_transforms(self):
tmpdir = tempfile.mkdtemp()
est, serving_input_fn = _build_estimator_for_export_tests(tmpdir)
extra_file_name = os.path.join(
compat.as_bytes(tmpdir), compat.as_bytes('my_extra_file'))
extra_file = gfile.GFile(extra_file_name, mode='w')
extra_file.write(EXTRA_FILE_CONTENT)
extra_file.close()
assets_extra = {'some/sub/directory/my_extra_file': extra_file_name}
export_dir_base = os.path.join(
compat.as_bytes(tmpdir), compat.as_bytes('export'))
export_dir = est.export_savedmodel(
export_dir_base, serving_input_fn, assets_extra=assets_extra,
graph_rewrite_specs=[
estimator.GraphRewriteSpec(['tag_1'], []),
estimator.GraphRewriteSpec(['tag_2', 'tag_3'],
['strip_unused_nodes'])])
self.assertTrue(gfile.Exists(export_dir_base))
self.assertTrue(gfile.Exists(export_dir))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir), compat.as_bytes(
'saved_model.pb'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir), compat.as_bytes('variables'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('variables/variables.index'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('variables/variables.data-00000-of-00001'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir), compat.as_bytes('assets'))))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('assets/my_vocab_file'))))
self.assertEqual(
compat.as_bytes(VOCAB_FILE_CONTENT),
compat.as_bytes(
gfile.GFile(
os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('assets/my_vocab_file'))).read()))
expected_extra_path = os.path.join(
compat.as_bytes(export_dir),
compat.as_bytes('assets.extra/some/sub/directory/my_extra_file'))
self.assertTrue(
gfile.Exists(
os.path.join(
compat.as_bytes(export_dir), compat.as_bytes('assets.extra'))))
self.assertTrue(gfile.Exists(expected_extra_path))
self.assertEqual(
compat.as_bytes(EXTRA_FILE_CONTENT),
compat.as_bytes(gfile.GFile(expected_extra_path).read()))
expected_vocab_file = os.path.join(
compat.as_bytes(tmpdir), compat.as_bytes('my_vocab_file'))
# Restore, to validate that the export was well-formed.
# tag_1 is untransformed.
tags = ['tag_1']
with ops.Graph().as_default() as graph:
with session_lib.Session(graph=graph) as sess:
loader.load(sess, tags, export_dir)
assets = [
x.eval()
for x in graph.get_collection(ops.GraphKeys.ASSET_FILEPATHS)
]
self.assertItemsEqual([expected_vocab_file], assets)
graph_ops = [x.name for x in graph.get_operations()]
self.assertTrue('input_example_tensor' in graph_ops)
self.assertTrue('ParseExample/ParseExample' in graph_ops)
self.assertTrue('linear/linear/feature/matmul' in graph_ops)
# Since there were no transforms, both save ops are still present.
self.assertTrue('save/SaveV2/tensor_names' in graph_ops)
self.assertTrue('save_1/SaveV2/tensor_names' in graph_ops)
# Since there were no transforms, the hash table lookup is still there.
self.assertTrue('hash_table_Lookup' in graph_ops)
# Restore, to validate that the export was well-formed.
# tag_2, tag_3 was subjected to strip_unused_nodes.
tags = ['tag_2', 'tag_3']
with ops.Graph().as_default() as graph:
with session_lib.Session(graph=graph) as sess:
loader.load(sess, tags, export_dir)
assets = [
x.eval()
for x in graph.get_collection(ops.GraphKeys.ASSET_FILEPATHS)
]
self.assertItemsEqual([expected_vocab_file], assets)
graph_ops = [x.name for x in graph.get_operations()]
self.assertTrue('input_example_tensor' in graph_ops)
self.assertTrue('ParseExample/ParseExample' in graph_ops)
self.assertTrue('linear/linear/feature/matmul' in graph_ops)
# The Saver used to restore the checkpoint into the export Session
# was not added to the SAVERS collection, so strip_unused_nodes removes
# it. The one explicitly created in export_savedmodel is tracked in
# the MetaGraphDef saver_def field, so that one is retained.
# TODO(soergel): Make Savers sane again. I understand this is all a bit
# nuts but for now the test demonstrates what actually happens.
self.assertFalse('save/SaveV2/tensor_names' in graph_ops)
self.assertTrue('save_1/SaveV2/tensor_names' in graph_ops)
# The fake hash table lookup wasn't connected to anything; stripped.
self.assertFalse('hash_table_Lookup' in graph_ops)
# cleanup
gfile.DeleteRecursively(tmpdir)
class InferRealValuedColumnsTest(test.TestCase):
def testInvalidArgs(self):
with self.assertRaisesRegexp(ValueError, 'x or input_fn must be provided'):
estimator.infer_real_valued_columns_from_input(None)
with self.assertRaisesRegexp(ValueError, 'cannot be tensors'):
estimator.infer_real_valued_columns_from_input(constant_op.constant(1.0))
def _assert_single_feature_column(self, expected_shape, expected_dtype,
feature_columns):
self.assertEqual(1, len(feature_columns))
feature_column = feature_columns[0]
self.assertEqual('', feature_column.name)
self.assertEqual(
{
'':
parsing_ops.FixedLenFeature(
shape=expected_shape, dtype=expected_dtype)
},
feature_column.config)
def testInt32Input(self):
feature_columns = estimator.infer_real_valued_columns_from_input(
np.ones(
shape=[7, 8], dtype=np.int32))
self._assert_single_feature_column([8], dtypes.int32, feature_columns)
def testInt32InputFn(self):
feature_columns = estimator.infer_real_valued_columns_from_input_fn(
lambda: (array_ops.ones(shape=[7, 8], dtype=dtypes.int32), None))
self._assert_single_feature_column([8], dtypes.int32, feature_columns)
def testInt64Input(self):
feature_columns = estimator.infer_real_valued_columns_from_input(
np.ones(
shape=[7, 8], dtype=np.int64))
self._assert_single_feature_column([8], dtypes.int64, feature_columns)
def testInt64InputFn(self):
feature_columns = estimator.infer_real_valued_columns_from_input_fn(
lambda: (array_ops.ones(shape=[7, 8], dtype=dtypes.int64), None))
self._assert_single_feature_column([8], dtypes.int64, feature_columns)
def testFloat32Input(self):
feature_columns = estimator.infer_real_valued_columns_from_input(
np.ones(
shape=[7, 8], dtype=np.float32))
self._assert_single_feature_column([8], dtypes.float32, feature_columns)
def testFloat32InputFn(self):
feature_columns = estimator.infer_real_valued_columns_from_input_fn(
lambda: (array_ops.ones(shape=[7, 8], dtype=dtypes.float32), None))
self._assert_single_feature_column([8], dtypes.float32, feature_columns)
def testFloat64Input(self):
feature_columns = estimator.infer_real_valued_columns_from_input(
np.ones(
shape=[7, 8], dtype=np.float64))
self._assert_single_feature_column([8], dtypes.float64, feature_columns)
def testFloat64InputFn(self):
feature_columns = estimator.infer_real_valued_columns_from_input_fn(
lambda: (array_ops.ones(shape=[7, 8], dtype=dtypes.float64), None))
self._assert_single_feature_column([8], dtypes.float64, feature_columns)
def testBoolInput(self):
with self.assertRaisesRegexp(
ValueError, 'on integer or non floating types are not supported'):
estimator.infer_real_valued_columns_from_input(
np.array([[False for _ in xrange(8)] for _ in xrange(7)]))
def testBoolInputFn(self):
with self.assertRaisesRegexp(
ValueError, 'on integer or non floating types are not supported'):
# pylint: disable=g-long-lambda
estimator.infer_real_valued_columns_from_input_fn(
lambda: (constant_op.constant(False, shape=[7, 8], dtype=dtypes.bool),
None))
def testStringInput(self):
with self.assertRaisesRegexp(
ValueError, 'on integer or non floating types are not supported'):
# pylint: disable=g-long-lambda
estimator.infer_real_valued_columns_from_input(
np.array([['%d.0' % i for i in xrange(8)] for _ in xrange(7)]))
def testStringInputFn(self):
with self.assertRaisesRegexp(
ValueError, 'on integer or non floating types are not supported'):
# pylint: disable=g-long-lambda
estimator.infer_real_valued_columns_from_input_fn(
lambda: (
constant_op.constant([['%d.0' % i
for i in xrange(8)]
for _ in xrange(7)]),
None))
def testBostonInputFn(self):
feature_columns = estimator.infer_real_valued_columns_from_input_fn(
boston_input_fn)
self._assert_single_feature_column([_BOSTON_INPUT_DIM], dtypes.float64,
feature_columns)
def testIrisInputFn(self):
feature_columns = estimator.infer_real_valued_columns_from_input_fn(
iris_input_fn)
self._assert_single_feature_column([_IRIS_INPUT_DIM], dtypes.float64,
feature_columns)
class ReplicaDeviceSetterTest(test.TestCase):
def testVariablesAreOnPs(self):
tf_config = {'cluster': {run_config.TaskType.PS: ['fake_ps_0']}}
with test.mock.patch.dict('os.environ',
{'TF_CONFIG': json.dumps(tf_config)}):
config = run_config.RunConfig()
with ops.device(estimator._get_replica_device_setter(config)):
v = variables_lib.Variable([1, 2])
w = variables_lib.Variable([2, 1])
a = v + w
self.assertDeviceEqual('/job:ps/task:0', v.device)
self.assertDeviceEqual('/job:ps/task:0', v.initializer.device)
self.assertDeviceEqual('/job:ps/task:0', w.device)
self.assertDeviceEqual('/job:ps/task:0', w.initializer.device)
self.assertDeviceEqual('/job:worker', a.device)
def testVariablesAreLocal(self):
with ops.device(
estimator._get_replica_device_setter(run_config.RunConfig())):
v = variables_lib.Variable([1, 2])
w = variables_lib.Variable([2, 1])
a = v + w
self.assertDeviceEqual('', v.device)
self.assertDeviceEqual('', v.initializer.device)
self.assertDeviceEqual('', w.device)
self.assertDeviceEqual('', w.initializer.device)
self.assertDeviceEqual('', a.device)
def testMutableHashTableIsOnPs(self):
tf_config = {'cluster': {run_config.TaskType.PS: ['fake_ps_0']}}
with test.mock.patch.dict('os.environ',
{'TF_CONFIG': json.dumps(tf_config)}):
config = run_config.RunConfig()
with ops.device(estimator._get_replica_device_setter(config)):
default_val = constant_op.constant([-1, -1], dtypes.int64)
table = lookup.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
input_string = constant_op.constant(['brain', 'salad', 'tank'])
output = table.lookup(input_string)
self.assertDeviceEqual('/job:ps/task:0', table._table_ref.device)
self.assertDeviceEqual('/job:ps/task:0', output.device)
def testMutableHashTableIsLocal(self):
with ops.device(
estimator._get_replica_device_setter(run_config.RunConfig())):
default_val = constant_op.constant([-1, -1], dtypes.int64)
table = lookup.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
input_string = constant_op.constant(['brain', 'salad', 'tank'])
output = table.lookup(input_string)
self.assertDeviceEqual('', table._table_ref.device)
self.assertDeviceEqual('', output.device)
def testTaskIsSetOnWorkerWhenJobNameIsSet(self):
tf_config = {
'cluster': {
run_config.TaskType.PS: ['fake_ps_0']
},
'task': {
'type': run_config.TaskType.WORKER,
'index': 3
}
}
with test.mock.patch.dict('os.environ',
{'TF_CONFIG': json.dumps(tf_config)}):
config = run_config.RunConfig()
with ops.device(estimator._get_replica_device_setter(config)):
v = variables_lib.Variable([1, 2])
w = variables_lib.Variable([2, 1])
a = v + w
self.assertDeviceEqual('/job:ps/task:0', v.device)
self.assertDeviceEqual('/job:ps/task:0', v.initializer.device)
self.assertDeviceEqual('/job:ps/task:0', w.device)
self.assertDeviceEqual('/job:ps/task:0', w.initializer.device)
self.assertDeviceEqual('/job:worker/task:3', a.device)
if __name__ == '__main__':
test.main()
| apache-2.0 |
annarev/tensorflow | tensorflow/python/keras/combinations_test.py | 11 | 5741 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras combinations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
from absl.testing import parameterized
from tensorflow.python import tf2
from tensorflow.python.eager import context
from tensorflow.python.keras import combinations
from tensorflow.python.keras import models as keras_models
from tensorflow.python.keras import testing_utils
from tensorflow.python.platform import test
class CombinationsTest(test.TestCase):
def test_run_all_keras_modes(self):
test_params = []
class ExampleTest(parameterized.TestCase):
def runTest(self):
pass
@combinations.generate(combinations.keras_mode_combinations())
def testBody(self):
mode = "eager" if context.executing_eagerly() else "graph"
should_run_eagerly = testing_utils.should_run_eagerly()
test_params.append((mode, should_run_eagerly))
e = ExampleTest()
if not tf2.enabled():
e.testBody_test_mode_graph_runeagerly_False()
e.testBody_test_mode_eager_runeagerly_True()
e.testBody_test_mode_eager_runeagerly_False()
if not tf2.enabled():
self.assertLen(test_params, 3)
self.assertAllEqual(test_params, [
("graph", False),
("eager", True),
("eager", False),
])
ts = unittest.makeSuite(ExampleTest)
res = unittest.TestResult()
ts.run(res)
self.assertLen(test_params, 6)
else:
self.assertLen(test_params, 2)
self.assertAllEqual(test_params, [
("eager", True),
("eager", False),
])
ts = unittest.makeSuite(ExampleTest)
res = unittest.TestResult()
ts.run(res)
self.assertLen(test_params, 4)
def test_generate_keras_mode_eager_only(self):
result = combinations.keras_mode_combinations(mode=["eager"])
self.assertLen(result, 2)
self.assertEqual(result[0], {"mode": "eager", "run_eagerly": True})
self.assertEqual(result[1], {"mode": "eager", "run_eagerly": False})
def test_generate_keras_mode_skip_run_eagerly(self):
result = combinations.keras_mode_combinations(run_eagerly=[False])
if tf2.enabled():
self.assertLen(result, 1)
self.assertEqual(result[0], {"mode": "eager", "run_eagerly": False})
else:
self.assertLen(result, 2)
self.assertEqual(result[0], {"mode": "eager", "run_eagerly": False})
self.assertEqual(result[1], {"mode": "graph", "run_eagerly": False})
def test_run_all_keras_model_types(self):
model_types = []
models = []
class ExampleTest(parameterized.TestCase):
def runTest(self):
pass
@combinations.generate(combinations.keras_model_type_combinations())
def testBody(self):
model_types.append(testing_utils.get_model_type())
models.append(testing_utils.get_small_mlp(1, 4, input_dim=3))
e = ExampleTest()
e.testBody_test_modeltype_functional()
e.testBody_test_modeltype_subclass()
e.testBody_test_modeltype_sequential()
self.assertLen(model_types, 3)
self.assertAllEqual(model_types, [
"functional",
"subclass",
"sequential"
])
# Validate that the models are what they should be
self.assertTrue(models[0]._is_graph_network)
self.assertFalse(models[1]._is_graph_network)
self.assertNotIsInstance(models[0], keras_models.Sequential)
self.assertNotIsInstance(models[1], keras_models.Sequential)
self.assertIsInstance(models[2], keras_models.Sequential)
ts = unittest.makeSuite(ExampleTest)
res = unittest.TestResult()
ts.run(res)
self.assertLen(model_types, 6)
def test_combine_combinations(self):
test_cases = []
@combinations.generate(combinations.times(
combinations.keras_mode_combinations(),
combinations.keras_model_type_combinations()))
class ExampleTest(parameterized.TestCase):
def runTest(self):
pass
@parameterized.named_parameters(dict(testcase_name="_arg",
arg=True))
def testBody(self, arg):
del arg
mode = "eager" if context.executing_eagerly() else "graph"
should_run_eagerly = testing_utils.should_run_eagerly()
test_cases.append((mode, should_run_eagerly,
testing_utils.get_model_type()))
ts = unittest.makeSuite(ExampleTest)
res = unittest.TestResult()
ts.run(res)
expected_combinations = [
("eager", False, "functional"),
("eager", False, "sequential"),
("eager", False, "subclass"),
("eager", True, "functional"),
("eager", True, "sequential"),
("eager", True, "subclass"),
]
if not tf2.enabled():
expected_combinations.extend([
("graph", False, "functional"),
("graph", False, "sequential"),
("graph", False, "subclass"),
])
self.assertAllEqual(sorted(test_cases), expected_combinations)
if __name__ == "__main__":
test.main()
| apache-2.0 |
kalahbrown/HueBigSQL | desktop/core/ext-py/Django-1.6.10/tests/urlpatterns_reverse/views.py | 64 | 1511 | from functools import partial, update_wrapper
from django.http import HttpResponse
from django.views.generic import RedirectView
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth.decorators import user_passes_test
def empty_view(request, *args, **kwargs):
return HttpResponse('')
def kwargs_view(request, arg1=1, arg2=2):
return HttpResponse('')
def absolute_kwargs_view(request, arg1=1, arg2=2):
return HttpResponse('')
def defaults_view(request, arg1, arg2):
pass
def nested_view(request):
pass
def erroneous_view(request):
import non_existent
def pass_resolver_match_view(request, *args, **kwargs):
response = HttpResponse('')
response.resolver_match = request.resolver_match
return response
uncallable = "Can I be a view? Pleeeease?"
class ViewClass(object):
def __call__(self, request, *args, **kwargs):
return HttpResponse('')
view_class_instance = ViewClass()
class LazyRedirectView(RedirectView):
url = reverse_lazy('named-lazy-url-redirected-to')
@user_passes_test(lambda u: u.is_authenticated(), login_url=reverse_lazy('some-login-page'))
def login_required_view(request):
return HttpResponse('Hello you')
def bad_view(request, *args, **kwargs):
raise ValueError("I don't think I'm getting good value for this view")
empty_view_partial = partial(empty_view, template_name="template.html")
empty_view_wrapped = update_wrapper(
partial(empty_view, template_name="template.html"), empty_view,
)
| apache-2.0 |
bwrsandman/OpenUpgrade | addons/purchase/res_config.py | 357 | 6198 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class purchase_config_settings(osv.osv_memory):
_name = 'purchase.config.settings'
_inherit = 'res.config.settings'
_columns = {
'default_invoice_method': fields.selection(
[('manual', 'Based on purchase order lines'),
('picking', 'Based on incoming shipments'),
('order', 'Pre-generate draft invoices based on purchase orders'),
], 'Default invoicing control method', required=True, default_model='purchase.order'),
'group_purchase_pricelist':fields.boolean("Manage pricelist per supplier",
implied_group='product.group_purchase_pricelist',
help='Allows to manage different prices based on rules per category of Supplier.\n'
'Example: 10% for retailers, promotion of 5 EUR on this product, etc.'),
'group_uom':fields.boolean("Manage different units of measure for products",
implied_group='product.group_uom',
help="""Allows you to select and maintain different units of measure for products."""),
'group_costing_method':fields.boolean("Use 'Real Price' or 'Average' costing methods.",
implied_group='stock_account.group_inventory_valuation',
help="""Allows you to compute product cost price based on average cost."""),
'module_warning': fields.boolean("Alerts by products or supplier",
help='Allow to configure notification on products and trigger them when a user wants to purchase a given product or a given supplier.\n'
'Example: Product: this product is deprecated, do not purchase more than 5.\n'
'Supplier: don\'t forget to ask for an express delivery.'),
'module_purchase_double_validation': fields.boolean("Force two levels of approvals",
help='Provide a double validation mechanism for purchases exceeding minimum amount.\n'
'-This installs the module purchase_double_validation.'),
'module_purchase_requisition': fields.boolean("Manage calls for bids",
help="""Calls for bids are used when you want to generate requests for quotations to several suppliers for a given set of products.
You can configure per product if you directly do a Request for Quotation
to one supplier or if you want a Call for Bids to compare offers from several suppliers."""),
'group_advance_purchase_requisition': fields.boolean("Choose from several bids in a call for bids",
implied_group='purchase.group_advance_bidding',
help="""In the process of a public bidding, you can compare the bid lines and choose for each requested product from which bid you
buy which quantity"""),
'module_purchase_analytic_plans': fields.boolean('Use multiple analytic accounts on purchase orders',
help='Allows the user to maintain several analysis plans. These let you split lines on a purchase order between several accounts and analytic plans.\n'
'-This installs the module purchase_analytic_plans.'),
'group_analytic_account_for_purchases': fields.boolean('Analytic accounting for purchases',
implied_group='purchase.group_analytic_accounting',
help="Allows you to specify an analytic account on purchase orders."),
'module_stock_dropshipping': fields.boolean("Manage dropshipping",
help='\nCreates the dropship route and add more complex tests'
'-This installs the module stock_dropshipping.'),
}
_defaults = {
'default_invoice_method': 'order',
}
def onchange_purchase_analytic_plans(self, cr, uid, ids, module_purchase_analytic_plans, context=None):
""" change group_analytic_account_for_purchases following module_purchase_analytic_plans """
if not module_purchase_analytic_plans:
return {}
return {'value': {'group_analytic_account_for_purchases': module_purchase_analytic_plans}}
class account_config_settings(osv.osv_memory):
_inherit = 'account.config.settings'
_columns = {
'module_purchase_analytic_plans': fields.boolean('Use multiple analytic accounts on orders',
help='Allows the user to maintain several analysis plans. These let you split lines on a purchase order between several accounts and analytic plans.\n'
'-This installs the module purchase_analytic_plans.'),
'group_analytic_account_for_purchases': fields.boolean('Analytic accounting for purchases',
implied_group='purchase.group_analytic_accounting',
help="Allows you to specify an analytic account on purchase orders."),
}
def onchange_purchase_analytic_plans(self, cr, uid, ids, module_purchase_analytic_plans, context=None):
""" change group_analytic_account_for_purchases following module_purchase_analytic_plans """
if not module_purchase_analytic_plans:
return {}
return {'value': {'group_analytic_account_for_purchases': module_purchase_analytic_plans}}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dato-code/tutorials | strata-sj-2016/time-series/interactive_plot.py | 2 | 2657 |
import matplotlib.pyplot as _plt
from matplotlib.widgets import Button
_plt.style.use('ggplot')
## Plot an interactive version.
class LineDrawer(object):
def __init__(self, scores, guide_lines, threshold_lines):
self.guide_lines = guide_lines
self.threshold_lines = threshold_lines
self.figure = self.guide_lines[0].figure
self.scores = scores
self.anoms = self.scores[:0]
self.anom_plot = self.figure.axes[0].plot(self.anoms['time'],
self.anoms['count'],
color='red', lw=0, marker='o',
markersize=10,
alpha=0.7)
def connect(self):
"""Connect to the event."""
self.cid_press = self.figure.canvas.mpl_connect('button_press_event',
self.on_press)
def disconnect(self):
"""Disconnect the event bindings."""
self.figure.canvas.mpl_disconnect(self.cid_press)
def on_press(self, event):
"""Store the location data when the mouse button is pressed."""
if event.inaxes == self.figure.axes[0]:
self.threshold_lines[0].set_ydata((event.ydata, event.ydata))
self.threshold_lines[1].set_ydata((0., 0.))
self.threshold_lines[2].set_ydata((0., 0.))
col = self.scores.value_col_names[0]
elif event.inaxes == self.figure.axes[1]:
self.threshold_lines[1].set_ydata((event.ydata, event.ydata))
self.threshold_lines[0].set_ydata((0., 0.))
self.threshold_lines[2].set_ydata((0., 0.))
col = self.scores.value_col_names[1]
elif event.inaxes == self.figure.axes[2]:
self.threshold_lines[2].set_ydata((event.ydata, event.ydata))
self.threshold_lines[0].set_ydata((0., 0.))
self.threshold_lines[1].set_ydata((0., 0.))
col = self.scores.value_col_names[2]
else:
return
## Print the anomalies from the selected horizontal threshold.
mask = self.scores[col] >= event.ydata
self.anoms = self.scores[mask]
## Replot the anomalies on the first axes.
self.anom_plot[0].set_data((list(self.anoms['time']),
list(self.anoms['count'])))
## Re-position the vertical guide lines.
for line in self.guide_lines:
line.set_xdata((event.xdata, event.xdata))
## Re-draw the whole figure.
self.figure.canvas.draw()
| apache-2.0 |
mathjazz/pontoon | pontoon/base/tests/models/test_locale.py | 3 | 6906 | from unittest.mock import patch
import pytest
from pontoon.base.models import ProjectLocale
from pontoon.test.factories import (
EntityFactory,
LocaleFactory,
ResourceFactory,
SubpageFactory,
TranslatedResourceFactory,
)
@pytest.fixture
def locale_c():
return LocaleFactory(code="nv", name="Na'vi",)
@pytest.mark.django_db
def test_locale_latest_activity_with_latest(translation_a):
"""
If the locale has a latest_translation and no project is given,
return it.
"""
locale = translation_a.locale
assert locale.get_latest_activity() == translation_a.latest_activity
@pytest.mark.django_db
def test_locale_latest_activity_without_latest(locale_b):
"""
If the locale doesn't have a latest_translation and no project
is given, return None.
"""
assert locale_b.get_latest_activity() is None
@pytest.mark.django_db
def test_locale_latest_activity_with_project(locale_a, project_a):
"""
If a locale is given, defer to
ProjectLocale.get_latest_activity.
"""
with patch.object(ProjectLocale, "get_latest_activity") as m:
m.return_value = "latest"
assert locale_a.get_latest_activity(project=project_a) == "latest"
assert m.call_args[0] == (locale_a, project_a)
@pytest.mark.django_db
def test_locale_translators_group(locale_a, locale_b, user_a):
"""
Tests if user has permission to translate locales after assigment.
"""
assert user_a.has_perm("base.can_translate_locale") is False
assert user_a.has_perm("base.can_translate_locale", locale_a) is False
assert user_a.has_perm("base.can_translate_locale", locale_b) is False
user_a.groups.add(locale_b.translators_group)
assert user_a.has_perm("base.can_translate_locale") is False
assert user_a.has_perm("base.can_translate_locale", locale_a) is False
assert user_a.has_perm("base.can_translate_locale", locale_b) is True
user_a.groups.add(locale_a.translators_group)
assert user_a.has_perm("base.can_translate_locale") is False
assert user_a.has_perm("base.can_translate_locale", locale_a) is True
assert user_a.has_perm("base.can_translate_locale", locale_b) is True
@pytest.mark.django_db
def test_locale_managers_group(locale_a, locale_b, user_a):
"""
Tests if user has permission to manage and translate locales after
assigment.
"""
assert user_a.has_perm("base.can_translate_locale") is False
assert user_a.has_perm("base.can_translate_locale", locale_a) is False
assert user_a.has_perm("base.can_translate_locale", locale_b) is False
assert user_a.has_perm("base.can_manage_locale") is False
assert user_a.has_perm("base.can_manage_locale", locale_a) is False
assert user_a.has_perm("base.can_manage_locale", locale_b) is False
user_a.groups.add(locale_b.managers_group)
assert user_a.has_perm("base.can_translate_locale") is False
assert user_a.has_perm("base.can_translate_locale", locale_a) is False
assert user_a.has_perm("base.can_translate_locale", locale_b) is True
assert user_a.has_perm("base.can_manage_locale") is False
assert user_a.has_perm("base.can_manage_locale", locale_a) is False
assert user_a.has_perm("base.can_manage_locale", locale_b) is True
user_a.groups.add(locale_a.managers_group)
assert user_a.has_perm("base.can_translate_locale") is False
assert user_a.has_perm("base.can_translate_locale", locale_a) is True
assert user_a.has_perm("base.can_translate_locale", locale_b) is True
assert user_a.has_perm("base.can_manage_locale") is False
assert user_a.has_perm("base.can_manage_locale", locale_a) is True
assert user_a.has_perm("base.can_manage_locale", locale_b) is True
@pytest.mark.django_db
def test_locale_parts_stats_no_page_one_resource(locale_parts):
"""
Return resource paths and stats if no subpage and one resource defined.
"""
locale_c, locale_b, entityX = locale_parts
project = entityX.resource.project
details = locale_c.parts_stats(project)
assert len(details) == 2
assert details[0]["title"] == entityX.resource.path
assert details[0]["unreviewed_strings"] == 0
@pytest.mark.django_db
def test_locale_parts_stats_no_page_multiple_resources(locale_parts):
"""
Return resource paths and stats for locales resources are available for.
"""
locale_c, locale_b, entityX = locale_parts
project = entityX.resource.project
resourceY = ResourceFactory.create(
total_strings=1, project=project, path="/other/path.po",
)
EntityFactory.create(resource=resourceY, string="Entity Y")
TranslatedResourceFactory.create(
resource=resourceY, locale=locale_c,
)
TranslatedResourceFactory.create(
resource=resourceY, locale=locale_b,
)
# results are sorted by title
detailsX = locale_c.parts_stats(project)
assert [detail["title"] for detail in detailsX][:2] == sorted(
[entityX.resource.path, "/other/path.po"]
)
assert detailsX[0]["unreviewed_strings"] == 0
assert detailsX[1]["unreviewed_strings"] == 0
detailsY = locale_b.parts_stats(project)
assert len(detailsY) == 2
assert detailsY[0]["title"] == "/other/path.po"
assert detailsY[0]["unreviewed_strings"] == 0
@pytest.mark.django_db
def test_locale_parts_stats_pages_not_tied_to_resources(locale_parts):
"""
Return subpage name and stats.
"""
locale_a, locale_b, entity_a = locale_parts
project = entity_a.resource.project
SubpageFactory.create(project=project, name="Subpage")
details = locale_a.parts_stats(project)
assert details[0]["title"] == "Subpage"
assert details[0]["unreviewed_strings"] == 0
@pytest.mark.django_db
def test_locale_parts_stats_pages_tied_to_resources(locale_parts):
"""
Return subpage name and stats for locales resources are available for.
"""
locale_a, locale_b, entity_a = locale_parts
project = entity_a.resource.project
resourceX = ResourceFactory.create(project=project, path="/other/path.po",)
EntityFactory.create(resource=resourceX, string="Entity X")
TranslatedResourceFactory.create(
resource=resourceX, locale=locale_a,
)
TranslatedResourceFactory.create(
resource=resourceX, locale=locale_b,
)
sub1 = SubpageFactory.create(project=project, name="Subpage",)
sub1.resources.add(resourceX)
sub2 = SubpageFactory.create(project=project, name="Other Subpage",)
sub2.resources.add(resourceX)
details0 = locale_a.parts_stats(project)
detailsX = locale_b.parts_stats(project)
assert details0[0]["title"] == "Other Subpage"
assert details0[0]["unreviewed_strings"] == 0
assert details0[1]["title"] == "Subpage"
assert details0[1]["unreviewed_strings"] == 0
assert detailsX[0]["title"] == "Other Subpage"
assert detailsX[0]["unreviewed_strings"] == 0
| bsd-3-clause |
infobloxopen/neutron | neutron/tests/functional/agent/test_ovs_flows.py | 2 | 4774 | # Copyright (c) 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.cmd.sanity import checks
from neutron.plugins.openvswitch.agent import ovs_neutron_agent as ovsagt
from neutron.tests.common import machine_fixtures
from neutron.tests.common import net_helpers
from neutron.tests.functional.agent import test_ovs_lib
from neutron.tests.functional import base
class ARPSpoofTestCase(test_ovs_lib.OVSBridgeTestBase,
base.BaseSudoTestCase):
def setUp(self):
if not checks.arp_header_match_supported():
self.skipTest("ARP header matching not supported")
# NOTE(kevinbenton): it would be way cooler to use scapy for
# these but scapy requires the python process to be running as
# root to bind to the ports.
super(ARPSpoofTestCase, self).setUp()
self.src_addr = '192.168.0.1'
self.dst_addr = '192.168.0.2'
self.src_namespace = self.useFixture(
net_helpers.NamespaceFixture()).name
self.dst_namespace = self.useFixture(
net_helpers.NamespaceFixture()).name
self.pinger = machine_fixtures.Pinger(
self.src_namespace, max_attempts=2)
self.src_p = self.useFixture(
net_helpers.OVSPortFixture(self.br, self.src_namespace)).port
self.dst_p = self.useFixture(
net_helpers.OVSPortFixture(self.br, self.dst_namespace)).port
# wait to add IPs until after anti-spoof rules to ensure ARP doesn't
# happen before
def test_arp_spoof_doesnt_block_normal_traffic(self):
self._setup_arp_spoof_for_port(self.src_p.name, [self.src_addr])
self._setup_arp_spoof_for_port(self.dst_p.name, [self.dst_addr])
self.src_p.addr.add('%s/24' % self.src_addr)
self.dst_p.addr.add('%s/24' % self.dst_addr)
self.pinger.assert_ping(self.dst_addr)
def test_arp_spoof_doesnt_block_ipv6(self):
self.src_addr = '2000::1'
self.dst_addr = '2000::2'
self._setup_arp_spoof_for_port(self.src_p.name, [self.src_addr])
self._setup_arp_spoof_for_port(self.dst_p.name, [self.dst_addr])
self.src_p.addr.add('%s/64' % self.src_addr)
self.dst_p.addr.add('%s/64' % self.dst_addr)
# IPv6 addresses seem to take longer to initialize
self.pinger._max_attempts = 4
self.pinger.assert_ping(self.dst_addr)
def test_arp_spoof_blocks_response(self):
# this will prevent the destination from responding to the ARP
# request for it's own address
self._setup_arp_spoof_for_port(self.dst_p.name, ['192.168.0.3'])
self.src_p.addr.add('%s/24' % self.src_addr)
self.dst_p.addr.add('%s/24' % self.dst_addr)
self.pinger.assert_no_ping(self.dst_addr)
def test_arp_spoof_allowed_address_pairs(self):
self._setup_arp_spoof_for_port(self.dst_p.name, ['192.168.0.3',
self.dst_addr])
self.src_p.addr.add('%s/24' % self.src_addr)
self.dst_p.addr.add('%s/24' % self.dst_addr)
self.pinger.assert_ping(self.dst_addr)
def test_arp_spoof_disable_port_security(self):
# block first and then disable port security to make sure old rules
# are cleared
self._setup_arp_spoof_for_port(self.dst_p.name, ['192.168.0.3'])
self._setup_arp_spoof_for_port(self.dst_p.name, ['192.168.0.3'],
psec=False)
self.src_p.addr.add('%s/24' % self.src_addr)
self.dst_p.addr.add('%s/24' % self.dst_addr)
self.pinger.assert_ping(self.dst_addr)
def _setup_arp_spoof_for_port(self, port, addrs, psec=True):
of_port_map = self.br.get_vif_port_to_ofport_map()
class VifPort(object):
ofport = of_port_map[port]
port_name = port
ip_addr = addrs.pop()
details = {'port_security_enabled': psec,
'fixed_ips': [{'ip_address': ip_addr}],
'allowed_address_pairs': [
dict(ip_address=ip) for ip in addrs]}
ovsagt.OVSNeutronAgent.setup_arp_spoofing_protection(
self.br, VifPort(), details)
| apache-2.0 |
apikler/VideoStore | site/userprofile/common.py | 1 | 10437 | import os
import tempfile
import Image
import subprocess
from django.shortcuts import render, redirect
from django.forms.formsets import formset_factory
from django.template import RequestContext, loader
from django.template.loader import render_to_string
from django.contrib import messages
from django.contrib.auth import login, logout, authenticate
from django.contrib.auth.models import User
from django.core.files import File
from django.core.urlresolvers import reverse
from django.conf import settings
from userprofile.forms import ImageUploadForm, ProducerProfileForm, ProducerProfileForm, ProducerPaymentForm, ForgotPasswordForm, ResetPasswordForm, ChangePasswordForm, ChangeEmailForm
from userprofile.models import Producer, UserImage, PasswordReset
import userprofile.utils
import videostore.utils
import payment.utils
def picture_upload_page(request, template, redirect_to):
numpics = userprofile.utils.get_images(request.user).count()
# This should be a global setting...
max_pictures = 10
num_fields = min(max_pictures - numpics, 10)
if num_fields <= 0:
return render(request, template, {})
UploadFormSet = formset_factory(ImageUploadForm, extra=num_fields)
if request.method == 'POST':
formset = UploadFormSet(request.POST, request.FILES)
if formset.is_valid():
for form in formset:
# Skip if there is no picture uploaded here
try:
form.cleaned_data['image']
except KeyError:
continue
# Make sure we haven't hit the max number of images yet
if userprofile.utils.get_images(request.user).count() >= max_pictures:
break
# First create the model so it gets an id
pic = UserImage(user=request.user)
pic.save()
# Save the image to a temp location and strip the exif data.
(name, ext) = os.path.splitext(form.cleaned_data['image'].name)
tempname = videostore.utils.randomstring() + ext
temp_image_path = os.path.join(tempfile.gettempdir(), tempname)
with open(temp_image_path, 'wb+') as destination:
for chunk in form.cleaned_data['image'].chunks():
destination.write(chunk)
# Image is in temp directory, now actually strip exif data
DEVNULL = open(os.devnull, 'wb')
subprocess.call(['exiftool', '-all=', temp_image_path], stdout=DEVNULL, stderr=DEVNULL)
DEVNULL.close()
# Now save the actual image, which uses the id
temp_image = open(temp_image_path)
pic_content = File(temp_image)
pic.image.save(tempname, pic_content)
temp_image.close()
# Create the thumbnail
pil_image = Image.open(os.path.join(settings.MEDIA_ROOT, str(pic.image)))
pil_image.thumbnail((200, 200), Image.ANTIALIAS)
# Save thumbnail to a temporary file
basename = os.path.basename(str(pic.image))
(name, ext) = os.path.splitext(basename)
thumbname = name + '.jpg'
temp_image_path = os.path.join(tempfile.gettempdir(), thumbname)
pil_image.save(temp_image_path, 'JPEG', quality=90)
# Actually save thumbnail to database
temp_image = open(temp_image_path)
thumb_content = File(temp_image)
pic.thumbnail.save(thumbname, thumb_content)
temp_image.close()
return redirect(redirect_to)
else:
formset = UploadFormSet()
return render(request, template, {
'formset': formset,
})
def process_pictures(request, pics):
if 'delete' in request.POST:
delete = request.POST.getlist('delete')
pics_to_delete = pics.filter(id__in=delete)
for pic in pics_to_delete:
pic.image.delete()
pic.thumbnail.delete()
pic.delete()
def process_producer_registration(request, response, form):
""" Registers a producer based on the RegistrationForm argument.
Assumes the form has already been validated, and is valid.
"""
username = form.cleaned_data['username']
password = form.cleaned_data['password1']
email = form.cleaned_data['email']
user = User.objects.create_user(username, email, password)
user.save()
p = Producer(
user=user,
approved=False,
)
p.save()
user = authenticate(username=username, password=password)
login(request, user)
# If this producer was referred, set the referred_by field and give them
# the referral bonus.
referred_by = userprofile.utils.referring_user(request)
if referred_by:
p.referred_by = referred_by
p.save()
payment.utils.transaction(
user=user,
amount=settings.REFERRAL_PRODUCER_START_CREDIT,
note="Referral bonus",
triggered_by=referred_by,
)
# Delete the referral cookie, as it is no longer needed.
response.delete_cookie('refcode')
# Welcome email for producers
videostore.utils.email(
'Welcome',
render_to_string('userprofile/welcome_email.txt', {
'username': username,
'uploadurl': settings.SITEURL + reverse('video:upload'),
'sitename': settings.SITENAME,
}),
user.email,
)
# Alert to admin to approve the account.
videostore.utils.email(
'Account approval - %s' % username,
render_to_string('userprofile/approval_needed_email.txt', {
'username': username,
}),
settings.ADMINEMAIL,
)
if not form.cleaned_data['rememberme']:
request.session.set_expiry(0)
def pictures_html(request, template_name):
error = False
pics = UserImage.objects.filter(user=request.user).exclude(image='').exclude(thumbnail='')
if request.method == 'POST':
process_pictures(
request=request,
pics=pics,
)
if len(messages.get_messages(request)):
error = True
template = loader.get_template(template_name)
context = RequestContext(request, {
'pics': pics,
'ajaxurl': 'userprofile:ajax_pictures',
})
return {
'error': error,
'html': template.render(context),
}
def profile_info_html(request, template_name):
user = request.user
p = user.producer
error = False
initial = {
'firstname': user.first_name,
'lastname': user.last_name,
'email': p.email,
'website': p.website,
'background': p.background,
'business': p.business,
}
if request.method == 'POST':
form = ProducerProfileForm(request.POST, initial=initial)
if form.is_valid():
user.first_name = form.cleaned_data['firstname']
user.last_name = form.cleaned_data['lastname']
user.save()
p.email = form.cleaned_data['email']
p.website = form.cleaned_data['website']
p.background = form.cleaned_data['background']
p.business = form.cleaned_data['business']
p.save()
else:
error = True;
else:
form = ProducerProfileForm(initial=initial)
template = loader.get_template(template_name)
context = RequestContext(request, {
'form': form,
'ajaxurl': 'userprofile:ajax_info',
})
return {
'html': template.render(context),
'error': error,
}
def forgot_password_html(request, template_name):
error = False
if request.method == 'POST':
form = ForgotPasswordForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
user = User.objects.get(username=username)
# Generate a new unique key
key = videostore.utils.randomstring()
while PasswordReset.objects.filter(keystring=key):
key = videostore.utils.randomstring()
pr = PasswordReset(
user=user,
keystring=key,
)
pr.save()
videostore.utils.email(
'Password reset for %s' % user.username,
render_to_string('userprofile/password_reset_email.txt', {
'username': user.username,
'sitename': settings.SITENAME,
'siteurl': settings.SITEURL,
'reseturl': settings.SITEURL + reverse('reset_password', kwargs={'key': key}),
}),
user.email,
)
else:
error = True
else:
form = ForgotPasswordForm()
template = loader.get_template(template_name)
context = RequestContext(request, {
'form': form,
'ajaxurl': 'ajax_forgot_password',
})
return {
'html': template.render(context),
'error': error,
}
def reset_password_html(request, template_name, key=''):
error = False
validkey = True
initial = {
'key': key,
}
if request.method == 'POST':
initial['key'] = request.POST.get('key')
form = ResetPasswordForm(request.POST, initial=initial)
if form.is_valid():
# Change the password
user = form.cleaned_data['user']
user.set_password(form.cleaned_data['password1'])
user.save()
# Mark all keys for this user as inactive
PasswordReset.objects.filter(user=user).update(active=False)
else:
error = True
else:
form = ResetPasswordForm(initial=initial)
if not userprofile.utils.resetkey_to_user(key):
validkey = False
template = loader.get_template(template_name)
context = RequestContext(request, {
'form': form,
'ajaxurl': 'ajax_reset_password',
'validkey': validkey,
})
return {
'html': template.render(context),
'error': error,
}
def change_password_html(request, template_name):
""" Similar to the above reset_password_html except without all the key logic.
"""
error = False
if request.method == 'POST':
form = ChangePasswordForm(request.user, request.POST)
if form.is_valid():
# Change the password
request.user.set_password(form.cleaned_data['password1'])
request.user.save()
else:
error = True
else:
form = ChangePasswordForm(request.user)
template = loader.get_template(template_name)
context = RequestContext(request, {
'form': form,
'ajaxurl': 'userprofile:ajax_change_password',
})
return {
'html': template.render(context),
'error': error,
}
def change_email_html(request, template_name):
error = False
initial = {'email': request.user.email}
if request.method == 'POST':
form = ChangeEmailForm(request.POST, initial=initial)
if form.is_valid():
request.user.email = form.cleaned_data['email']
request.user.save()
else:
error = True
else:
form = ChangeEmailForm(initial=initial)
template = loader.get_template(template_name)
context = RequestContext(request, {
'form': form,
'ajaxurl': 'userprofile:ajax_change_email',
})
return {
'html': template.render(context),
'error': error,
}
def payment_page(request, template_name):
error = False
p = request.user.producer
initial = {
'paypal': p.paypal,
}
if request.method == 'POST':
form = ProducerPaymentForm(request.POST, initial=initial)
if form.is_valid() and p.approved:
p.paypal = form.cleaned_data['paypal']
p.save()
else:
error = True
else:
form = ProducerPaymentForm(initial=initial)
template = loader.get_template(template_name)
context = RequestContext(request, {
'form': form,
'ajaxurl': 'userprofile:ajax_payment',
})
return {
'html': template.render(context),
'error': error,
}
| bsd-2-clause |
OmniLayer/omnicore | test/functional/test_framework/script_util.py | 12 | 1326 | #!/usr/bin/env python3
# Copyright (c) 2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Useful Script constants and utils."""
from test_framework.script import CScript
# To prevent a "tx-size-small" policy rule error, a transaction has to have a
# non-witness size of at least 82 bytes (MIN_STANDARD_TX_NONWITNESS_SIZE in
# src/policy/policy.h). Considering a Tx with the smallest possible single
# input (blank, empty scriptSig), and with an output omitting the scriptPubKey,
# we get to a minimum size of 60 bytes:
#
# Tx Skeleton: 4 [Version] + 1 [InCount] + 1 [OutCount] + 4 [LockTime] = 10 bytes
# Blank Input: 32 [PrevTxHash] + 4 [Index] + 1 [scriptSigLen] + 4 [SeqNo] = 41 bytes
# Output: 8 [Amount] + 1 [scriptPubKeyLen] = 9 bytes
#
# Hence, the scriptPubKey of the single output has to have a size of at
# least 22 bytes, which corresponds to the size of a P2WPKH scriptPubKey.
# The following script constant consists of a single push of 21 bytes of 'a':
# <PUSH_21> <21-bytes of 'a'>
# resulting in a 22-byte size. It should be used whenever (small) fake
# scriptPubKeys are needed, to guarantee that the minimum transaction size is
# met.
DUMMY_P2WPKH_SCRIPT = CScript([b'a' * 21])
| mit |
be-cloud-be/horizon-addons | partner-contact/base_location/__init__.py | 30 | 1135 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Contributor: Pedro Manuel Baeza <pedro.baeza@serviciosbaeza.com>
# Ignacio Ibeas <ignacio@acysos.com>
# Alejandro Santana <alejandrosantana@anubia.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import models
| agpl-3.0 |
MobinRanjbar/hue | desktop/core/ext-py/Pygments-1.3.1/pygments/formatters/other.py | 75 | 3857 | # -*- coding: utf-8 -*-
"""
pygments.formatters.other
~~~~~~~~~~~~~~~~~~~~~~~~~
Other formatters: NullFormatter, RawTokenFormatter.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
from pygments.util import OptionError, get_choice_opt, b
from pygments.token import Token
from pygments.console import colorize
__all__ = ['NullFormatter', 'RawTokenFormatter']
class NullFormatter(Formatter):
"""
Output the text unchanged without any formatting.
"""
name = 'Text only'
aliases = ['text', 'null']
filenames = ['*.txt']
def format(self, tokensource, outfile):
enc = self.encoding
for ttype, value in tokensource:
if enc:
outfile.write(value.encode(enc))
else:
outfile.write(value)
class RawTokenFormatter(Formatter):
r"""
Format tokens as a raw representation for storing token streams.
The format is ``tokentype<TAB>repr(tokenstring)\n``. The output can later
be converted to a token stream with the `RawTokenLexer`, described in the
`lexer list <lexers.txt>`_.
Only two options are accepted:
`compress`
If set to ``'gz'`` or ``'bz2'``, compress the output with the given
compression algorithm after encoding (default: ``''``).
`error_color`
If set to a color name, highlight error tokens using that color. If
set but with no value, defaults to ``'red'``.
*New in Pygments 0.11.*
"""
name = 'Raw tokens'
aliases = ['raw', 'tokens']
filenames = ['*.raw']
unicodeoutput = False
def __init__(self, **options):
Formatter.__init__(self, **options)
if self.encoding:
raise OptionError('the raw formatter does not support the '
'encoding option')
self.encoding = 'ascii' # let pygments.format() do the right thing
self.compress = get_choice_opt(options, 'compress',
['', 'none', 'gz', 'bz2'], '')
self.error_color = options.get('error_color', None)
if self.error_color is True:
self.error_color = 'red'
if self.error_color is not None:
try:
colorize(self.error_color, '')
except KeyError:
raise ValueError("Invalid color %r specified" %
self.error_color)
def format(self, tokensource, outfile):
try:
outfile.write(b(''))
except TypeError:
raise TypeError('The raw tokens formatter needs a binary '
'output file')
if self.compress == 'gz':
import gzip
outfile = gzip.GzipFile('', 'wb', 9, outfile)
def write(text):
outfile.write(text.encode())
flush = outfile.flush
elif self.compress == 'bz2':
import bz2
compressor = bz2.BZ2Compressor(9)
def write(text):
outfile.write(compressor.compress(text.encode()))
def flush():
outfile.write(compressor.flush())
outfile.flush()
else:
def write(text):
outfile.write(text.encode())
flush = outfile.flush
lasttype = None
lastval = u''
if self.error_color:
for ttype, value in tokensource:
line = "%s\t%r\n" % (ttype, value)
if ttype is Token.Error:
write(colorize(self.error_color, line))
else:
write(line)
else:
for ttype, value in tokensource:
write("%s\t%r\n" % (ttype, value))
flush()
| apache-2.0 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.6.0/Lib/runpy.py | 21 | 11959 | """runpy.py - locating and running Python code using the module namespace
Provides support for locating and running Python scripts using the Python
module namespace instead of the native filesystem.
This allows Python code to play nicely with non-filesystem based PEP 302
importers when locating support scripts as well as when importing modules.
"""
# Written by Nick Coghlan <ncoghlan at gmail.com>
# to implement PEP 338 (Executing Modules as Scripts)
import sys
import importlib.machinery # importlib first so we can test #15386 via -m
import importlib.util
import types
from pkgutil import read_code, get_importer
__all__ = [
"run_module", "run_path",
]
class _TempModule(object):
"""Temporarily replace a module in sys.modules with an empty namespace"""
def __init__(self, mod_name):
self.mod_name = mod_name
self.module = types.ModuleType(mod_name)
self._saved_module = []
def __enter__(self):
mod_name = self.mod_name
try:
self._saved_module.append(sys.modules[mod_name])
except KeyError:
pass
sys.modules[mod_name] = self.module
return self
def __exit__(self, *args):
if self._saved_module:
sys.modules[self.mod_name] = self._saved_module[0]
else:
del sys.modules[self.mod_name]
self._saved_module = []
class _ModifiedArgv0(object):
def __init__(self, value):
self.value = value
self._saved_value = self._sentinel = object()
def __enter__(self):
if self._saved_value is not self._sentinel:
raise RuntimeError("Already preserving saved value")
self._saved_value = sys.argv[0]
sys.argv[0] = self.value
def __exit__(self, *args):
self.value = self._sentinel
sys.argv[0] = self._saved_value
# TODO: Replace these helpers with importlib._bootstrap_external functions.
def _run_code(code, run_globals, init_globals=None,
mod_name=None, mod_spec=None,
pkg_name=None, script_name=None):
"""Helper to run code in nominated namespace"""
if init_globals is not None:
run_globals.update(init_globals)
if mod_spec is None:
loader = None
fname = script_name
cached = None
else:
loader = mod_spec.loader
fname = mod_spec.origin
cached = mod_spec.cached
if pkg_name is None:
pkg_name = mod_spec.parent
run_globals.update(__name__ = mod_name,
__file__ = fname,
__cached__ = cached,
__doc__ = None,
__loader__ = loader,
__package__ = pkg_name,
__spec__ = mod_spec)
exec(code, run_globals)
return run_globals
def _run_module_code(code, init_globals=None,
mod_name=None, mod_spec=None,
pkg_name=None, script_name=None):
"""Helper to run code in new namespace with sys modified"""
fname = script_name if mod_spec is None else mod_spec.origin
with _TempModule(mod_name) as temp_module, _ModifiedArgv0(fname):
mod_globals = temp_module.module.__dict__
_run_code(code, mod_globals, init_globals,
mod_name, mod_spec, pkg_name, script_name)
# Copy the globals of the temporary module, as they
# may be cleared when the temporary module goes away
return mod_globals.copy()
# Helper to get the full name, spec and code for a module
def _get_module_details(mod_name, error=ImportError):
if mod_name.startswith("."):
raise error("Relative module names not supported")
pkg_name, _, _ = mod_name.rpartition(".")
if pkg_name:
# Try importing the parent to avoid catching initialization errors
try:
__import__(pkg_name)
except ImportError as e:
# If the parent or higher ancestor package is missing, let the
# error be raised by find_spec() below and then be caught. But do
# not allow other errors to be caught.
if e.name is None or (e.name != pkg_name and
not pkg_name.startswith(e.name + ".")):
raise
# Warn if the module has already been imported under its normal name
existing = sys.modules.get(mod_name)
if existing is not None and not hasattr(existing, "__path__"):
from warnings import warn
msg = "{mod_name!r} found in sys.modules after import of " \
"package {pkg_name!r}, but prior to execution of " \
"{mod_name!r}; this may result in unpredictable " \
"behaviour".format(mod_name=mod_name, pkg_name=pkg_name)
warn(RuntimeWarning(msg))
try:
spec = importlib.util.find_spec(mod_name)
except (ImportError, AttributeError, TypeError, ValueError) as ex:
# This hack fixes an impedance mismatch between pkgutil and
# importlib, where the latter raises other errors for cases where
# pkgutil previously raised ImportError
msg = "Error while finding module specification for {!r} ({}: {})"
raise error(msg.format(mod_name, type(ex).__name__, ex)) from ex
if spec is None:
raise error("No module named %s" % mod_name)
if spec.submodule_search_locations is not None:
if mod_name == "__main__" or mod_name.endswith(".__main__"):
raise error("Cannot use package as __main__ module")
try:
pkg_main_name = mod_name + ".__main__"
return _get_module_details(pkg_main_name, error)
except error as e:
if mod_name not in sys.modules:
raise # No module loaded; being a package is irrelevant
raise error(("%s; %r is a package and cannot " +
"be directly executed") %(e, mod_name))
loader = spec.loader
if loader is None:
raise error("%r is a namespace package and cannot be executed"
% mod_name)
try:
code = loader.get_code(mod_name)
except ImportError as e:
raise error(format(e)) from e
if code is None:
raise error("No code object available for %s" % mod_name)
return mod_name, spec, code
class _Error(Exception):
"""Error that _run_module_as_main() should report without a traceback"""
# XXX ncoghlan: Should this be documented and made public?
# (Current thoughts: don't repeat the mistake that lead to its
# creation when run_module() no longer met the needs of
# mainmodule.c, but couldn't be changed because it was public)
def _run_module_as_main(mod_name, alter_argv=True):
"""Runs the designated module in the __main__ namespace
Note that the executed module will have full access to the
__main__ namespace. If this is not desirable, the run_module()
function should be used to run the module code in a fresh namespace.
At the very least, these variables in __main__ will be overwritten:
__name__
__file__
__cached__
__loader__
__package__
"""
try:
if alter_argv or mod_name != "__main__": # i.e. -m switch
mod_name, mod_spec, code = _get_module_details(mod_name, _Error)
else: # i.e. directory or zipfile execution
mod_name, mod_spec, code = _get_main_module_details(_Error)
except _Error as exc:
msg = "%s: %s" % (sys.executable, exc)
sys.exit(msg)
main_globals = sys.modules["__main__"].__dict__
if alter_argv:
sys.argv[0] = mod_spec.origin
return _run_code(code, main_globals, None,
"__main__", mod_spec)
def run_module(mod_name, init_globals=None,
run_name=None, alter_sys=False):
"""Execute a module's code without importing it
Returns the resulting top level namespace dictionary
"""
mod_name, mod_spec, code = _get_module_details(mod_name)
if run_name is None:
run_name = mod_name
if alter_sys:
return _run_module_code(code, init_globals, run_name, mod_spec)
else:
# Leave the sys module alone
return _run_code(code, {}, init_globals, run_name, mod_spec)
def _get_main_module_details(error=ImportError):
# Helper that gives a nicer error message when attempting to
# execute a zipfile or directory by invoking __main__.py
# Also moves the standard __main__ out of the way so that the
# preexisting __loader__ entry doesn't cause issues
main_name = "__main__"
saved_main = sys.modules[main_name]
del sys.modules[main_name]
try:
return _get_module_details(main_name)
except ImportError as exc:
if main_name in str(exc):
raise error("can't find %r module in %r" %
(main_name, sys.path[0])) from exc
raise
finally:
sys.modules[main_name] = saved_main
def _get_code_from_file(run_name, fname):
# Check for a compiled file first
with open(fname, "rb") as f:
code = read_code(f)
if code is None:
# That didn't work, so try it as normal source code
with open(fname, "rb") as f:
code = compile(f.read(), fname, 'exec')
return code, fname
def run_path(path_name, init_globals=None, run_name=None):
"""Execute code located at the specified filesystem location
Returns the resulting top level namespace dictionary
The file path may refer directly to a Python script (i.e.
one that could be directly executed with execfile) or else
it may refer to a zipfile or directory containing a top
level __main__.py script.
"""
if run_name is None:
run_name = "<run_path>"
pkg_name = run_name.rpartition(".")[0]
importer = get_importer(path_name)
# Trying to avoid importing imp so as to not consume the deprecation warning.
is_NullImporter = False
if type(importer).__module__ == 'imp':
if type(importer).__name__ == 'NullImporter':
is_NullImporter = True
if isinstance(importer, type(None)) or is_NullImporter:
# Not a valid sys.path entry, so run the code directly
# execfile() doesn't help as we want to allow compiled files
code, fname = _get_code_from_file(run_name, path_name)
return _run_module_code(code, init_globals, run_name,
pkg_name=pkg_name, script_name=fname)
else:
# Finder is defined for path, so add it to
# the start of sys.path
sys.path.insert(0, path_name)
try:
# Here's where things are a little different from the run_module
# case. There, we only had to replace the module in sys while the
# code was running and doing so was somewhat optional. Here, we
# have no choice and we have to remove it even while we read the
# code. If we don't do this, a __loader__ attribute in the
# existing __main__ module may prevent location of the new module.
mod_name, mod_spec, code = _get_main_module_details()
with _TempModule(run_name) as temp_module, \
_ModifiedArgv0(path_name):
mod_globals = temp_module.module.__dict__
return _run_code(code, mod_globals, init_globals,
run_name, mod_spec, pkg_name).copy()
finally:
try:
sys.path.remove(path_name)
except ValueError:
pass
if __name__ == "__main__":
# Run the module specified as the next command line argument
if len(sys.argv) < 2:
print("No module specified for execution", file=sys.stderr)
else:
del sys.argv[0] # Make the requested module sys.argv[0]
_run_module_as_main(sys.argv[0])
| mit |
kartoza/geonode | geonode/qgis_server/tests/test_views.py | 2 | 27300 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import StringIO
import json
import os
import urlparse
import zipfile
from imghdr import what
import requests
from lxml import etree
import gisdata
from django.conf import settings
from django.contrib.staticfiles.templatetags import staticfiles
from django.core.management import call_command
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase, TestCase
from geonode import qgis_server
from geonode.decorators import on_ogc_backend
from geonode.layers.utils import file_upload
from geonode.maps.models import Map
from geonode.qgis_server.helpers import wms_get_capabilities_url, style_list
class DefaultViewsTest(TestCase):
def setUp(self):
call_command('loaddata', 'people_data', verbosity=0)
@on_ogc_backend(qgis_server.BACKEND_PACKAGE)
def test_default_context(self):
"""Test default context provided by qgis_server."""
response = self.client.get('/')
context = response.context
# Necessary context to ensure compatibility with views
# Some view needs these context to do some javascript logic.
self.assertIn('UPLOADER_URL', context)
self.assertIn('MAPFISH_PRINT_ENABLED', context)
self.assertIn('PRINT_NG_ENABLED', context)
self.assertIn('GEONODE_SECURITY_ENABLED', context)
self.assertIn('GEOGIG_ENABLED', context)
self.assertIn('TIME_ENABLED', context)
self.assertIn('MOSAIC_ENABLED', context)
class QGISServerViewsTest(LiveServerTestCase):
def setUp(self):
call_command('loaddata', 'people_data', verbosity=0)
@on_ogc_backend(qgis_server.BACKEND_PACKAGE)
def test_ogc_specific_layer(self):
"""Test we can use QGIS Server API for a layer.
For now, we are just checking we can call these views without any
exceptions. We should improve this test by checking the result.
"""
filename = os.path.join(gisdata.GOOD_DATA, 'raster/test_grid.tif')
uploaded = file_upload(filename)
filename = os.path.join(
gisdata.GOOD_DATA,
'vector/san_andres_y_providencia_administrative.shp')
vector_layer = file_upload(filename)
params = {'layername': uploaded.name}
# Zip
response = self.client.get(
reverse('qgis_server:download-zip', kwargs=params))
self.assertEqual(response.status_code, 200)
try:
f = StringIO.StringIO(response.content)
zipped_file = zipfile.ZipFile(f, 'r')
for one_file in zipped_file.namelist():
# We shoudn't get any QGIS project
self.assertFalse(one_file.endswith('.qgs'))
self.assertIsNone(zipped_file.testzip())
finally:
zipped_file.close()
f.close()
# Legend
response = self.client.get(
reverse('qgis_server:legend', kwargs=params))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.get('Content-Type'), 'image/png')
self.assertEqual(what('', h=response.content), 'png')
# Tile
coordinates = {'z': '11', 'x': '1576', 'y': '1054'}
coordinates.update(params)
response = self.client.get(
reverse('qgis_server:tile', kwargs=coordinates))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.get('Content-Type'), 'image/png')
self.assertEqual(what('', h=response.content), 'png')
# Tile 404
response = self.client.get(
reverse('qgis_server:tile', kwargs=params))
self.assertEqual(response.status_code, 404)
self.assertEqual(
response.get('Content-Type'), 'text/html; charset=utf-8')
# Geotiff
response = self.client.get(
reverse('qgis_server:geotiff', kwargs=params))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.get('Content-Type'), 'image/tiff')
self.assertEqual(what('', h=response.content), 'tiff')
# Layer is already on the database
# checking the Link
links = uploaded.link_set.download().filter(
name__in=settings.DOWNLOAD_FORMATS_RASTER)
# checks signals.py for the hardcoded names in QLR and QGS
qlr_link = links.get(name='QGIS layer file (.qlr)')
self.assertIn("download-qlr", qlr_link.url)
qgs_link = links.get(name='QGIS project file (.qgs)')
self.assertIn("download-qgs", qgs_link.url)
# QLR
response = self.client.get(
reverse('qgis_server:download-qlr', kwargs=params))
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.get('Content-Type'),
'application/x-qgis-layer-definition')
# check file name's extension
file_name = response.get('Content-Disposition').split('filename=')
file_ext = file_name[1].split('.')
self.assertEqual(file_ext[1], "qlr")
# QGS
response = self.client.get(
reverse('qgis_server:download-qgs', kwargs=params))
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.get('Content-Type'),
'application/x-qgis-project')
# check file name's extension
file_name = response.get('Content-Disposition').split('filename=')
file_ext = file_name[1].split('.')
self.assertEqual(file_ext[1], "qgs")
response = self.client.get(
reverse('qgis_server:geotiff', kwargs={
'layername': vector_layer.name
}))
self.assertEqual(response.status_code, 404)
# QML Styles
# Request list of styles
response = self.client.get(
reverse('qgis_server:download-qml', kwargs=params))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.get('Content-Type'), 'application/json')
# Should return a default style list
actual_result = json.loads(response.content)
actual_result = [s['name'] for s in actual_result]
expected_result = ['default']
self.assertEqual(set(expected_result), set(actual_result))
# Get single styles
response = self.client.get(
reverse('qgis_server:download-qml', kwargs={
'layername': params['layername'],
'style_name': 'default'
}))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.get('Content-Type'), 'text/xml')
# Set thumbnail from viewed bbox
response = self.client.get(
reverse('qgis_server:set-thumbnail', kwargs=params))
self.assertEqual(response.status_code, 400)
data = {
'bbox': '-5.54025,96.9406,-5.2820,97.1250'
}
response = self.client.post(
reverse('qgis_server:set-thumbnail', kwargs=params),
data=data)
# User dont have permission
self.assertEqual(response.status_code, 403)
# Should log in
self.client.login(username='admin', password='admin')
response = self.client.post(
reverse('qgis_server:set-thumbnail', kwargs=params),
data=data)
self.assertEqual(response.status_code, 200)
retval = json.loads(response.content)
expected_retval = {
'success': True
}
self.assertEqual(retval, expected_retval)
# OGC Server specific for THE layer
query_string = {
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphics',
'FORMAT': 'image/png',
'LAYERS': uploaded.name,
}
response = self.client.get(
reverse('qgis_server:layer-request', kwargs=params), query_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.get('Content-Type'), 'image/png')
self.assertEqual(what('', h=response.content), 'png')
# OGC Server for the Geonode instance
# GetLegendGraphics is a shortcut when using the main OGC server.
query_string = {
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphics',
'FORMAT': 'image/png',
'LAYERS': uploaded.name,
}
response = self.client.get(
reverse('qgis_server:request'), query_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.get('Content-Type'), 'image/png')
self.assertEqual(what('', h=response.content), 'png')
# WMS GetCapabilities
query_string = {
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetCapabilities'
}
response = self.client.get(
reverse('qgis_server:request'), query_string)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(
response.content, 'GetCapabilities is not supported yet.')
query_string['LAYERS'] = uploaded.name
response = self.client.get(
reverse('qgis_server:request'), query_string)
get_capabilities_content = response.content
# Check xml content
self.assertEqual(response.status_code, 200, response.content)
root = etree.fromstring(response.content)
layer_xml = root.xpath(
'wms:Capability/wms:Layer/wms:Layer/wms:Name',
namespaces={'wms': 'http://www.opengis.net/wms'})
self.assertEqual(len(layer_xml), 1)
self.assertEqual(layer_xml[0].text, uploaded.name)
# GetLegendGraphic request returned must be valid
layer_xml = root.xpath(
'wms:Capability/wms:Layer/'
'wms:Layer/wms:Style/wms:LegendURL/wms:OnlineResource',
namespaces={
'xlink': 'http://www.w3.org/1999/xlink',
'wms': 'http://www.opengis.net/wms'
})
legend_url = layer_xml[0].attrib[
'{http://www.w3.org/1999/xlink}href']
response = self.client.get(legend_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.get('Content-Type'), 'image/png')
self.assertEqual(what('', h=response.content), 'png')
# Check get capabilities using helper returns the same thing
response = requests.get(wms_get_capabilities_url(
uploaded, internal=False))
self.assertEqual(response.status_code, 200)
self.assertEqual(get_capabilities_content, response.content)
# WMS GetMap
query_string = {
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetMap',
'FORMAT': 'image/png',
'LAYERS': uploaded.name,
'HEIGHT': 250,
'WIDTH': 250,
'SRS': 'EPSG:4326',
'BBOX': '-5.54025,96.9406,-5.2820,97.1250',
}
response = self.client.get(
reverse('qgis_server:request'), query_string)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(
response.get('Content-Type'), 'image/png', response.content)
self.assertEqual(what('', h=response.content), 'png')
# End of the test, we should remove every files related to the test.
uploaded.delete()
vector_layer.delete()
@on_ogc_backend(qgis_server.BACKEND_PACKAGE)
def test_download_map_qlr(self):
"""Test download QLR file for a map"""
# 2 layers to be added to the map
filename = os.path.join(
gisdata.GOOD_DATA, 'raster/relief_san_andres.tif')
layer1 = file_upload(filename)
filename = os.path.join(
gisdata.GOOD_DATA,
'vector/san_andres_y_providencia_administrative.shp')
layer2 = file_upload(filename)
# construct json request for new map
json_payload = InitialSetup.generate_initial_map(layer1, layer2)
self.client.login(username='admin', password='admin')
response = self.client.post(
reverse('new_map_json'),
data=json.dumps(json_payload),
content_type='application/json')
# map is successfully saved
self.assertEqual(response.status_code, 200)
map_id = json.loads(response.content).get('id')
map = Map.objects.get(id=map_id)
# check that QLR is added to the links
links = map.link_set.download()
map_qlr_link = links.get(name='Download QLR Layer file')
self.assertIn('qlr', map_qlr_link.url)
# QLR
response = self.client.get(
reverse('map_download_qlr', kwargs={'mapid': map_id}))
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.get('Content-Type'),
'application/x-qgis-layer-definition')
# cleanup
map.delete()
layer1.delete()
layer2.delete()
@on_ogc_backend(qgis_server.BACKEND_PACKAGE)
def test_map_json(self):
# 2 layers to be added to the map
filename = os.path.join(
gisdata.GOOD_DATA, 'raster/relief_san_andres.tif')
layer1 = file_upload(filename)
filename = os.path.join(
gisdata.GOOD_DATA,
'vector/san_andres_y_providencia_administrative.shp')
layer2 = file_upload(filename)
json_payload = InitialSetup.generate_initial_map(layer1, layer2)
# First, create a map with two layers
# Need to log in for saving a map
self.client.login(username='admin', password='admin')
result_new_map = self.client.post(
reverse('new_map_json'),
json.dumps(json_payload),
content_type='application/json')
# the new map is successfully saved
self.assertEqual(result_new_map.status_code, 200)
map_id = json.loads(result_new_map.content).get('id')
# try to remove one layer
layers = json_payload['map']['layers']
before_remove = len(layers)
after_remove = before_remove - 1
layer = layers[0]
layers.remove(layer)
# check if the layer is eliminated from the map
result_update_map = self.client.post(
reverse('map_json', kwargs={'mapid': map_id}),
data=json.dumps(json_payload),
content_type='application/json')
# successfully updated
self.assertEqual(result_update_map.status_code, 200)
# the number of layers on the map decrease by 1
self.assertEqual(
len(result_update_map.context_data['map'].layers),
after_remove)
# clean up
map = Map.objects.get(id=map_id)
map.delete()
layer1.delete()
layer2.delete()
class QGISServerStyleManagerTest(LiveServerTestCase):
def setUp(self):
call_command('loaddata', 'people_data', verbosity=0)
def data_path(self, path):
project_root = os.path.abspath(settings.PROJECT_ROOT)
return os.path.join(
project_root, 'qgis_server/tests/data', path)
@on_ogc_backend(qgis_server.BACKEND_PACKAGE)
def test_list_style(self):
"""Test querying list of styles from QGIS Server."""
filename = os.path.join(gisdata.GOOD_DATA, 'raster/test_grid.tif')
layer = file_upload(filename)
""":type: geonode.layers.models.Layer"""
actual_list_style = style_list(layer, internal=False)
expected_list_style = ['default']
# There will be a default style
if actual_list_style:
self.assertEqual(
set(expected_list_style),
set([style.name for style in actual_list_style]))
style_list_url = reverse(
'qgis_server:download-qml',
kwargs={
'layername': layer.name
})
response = self.client.get(style_list_url)
self.assertEqual(response.status_code, 200)
actual_list_style = json.loads(response.content)
# There will be a default style
self.assertEqual(
set(expected_list_style),
set([style['name'] for style in actual_list_style]))
layer.delete()
@on_ogc_backend(qgis_server.BACKEND_PACKAGE)
def test_add_delete_style(self):
"""Test add new style using qgis_server views."""
filename = os.path.join(gisdata.GOOD_DATA, 'raster/test_grid.tif')
layer = file_upload(filename)
""":type: geonode.layers.models.Layer"""
self.client.login(username='admin', password='admin')
qml_path = self.data_path('test_grid.qml')
add_style_url = reverse(
'qgis_server:upload-qml',
kwargs={
'layername': layer.name})
with open(qml_path) as file_handle:
form_data = {
'name': 'new_style',
'title': 'New Style',
'qml': file_handle
}
response = self.client.post(
add_style_url,
data=form_data)
self.assertEqual(response.status_code, 201)
actual_list_style = style_list(layer, internal=False)
if actual_list_style:
expected_list_style = ['default', 'new_style']
self.assertEqual(
set(expected_list_style),
set([style.name for style in actual_list_style]))
# Test delete request
delete_style_url = reverse(
'qgis_server:remove-qml',
kwargs={
'layername': layer.name,
'style_name': 'default'})
response = self.client.delete(delete_style_url)
self.assertEqual(response.status_code, 200)
actual_list_style = style_list(layer, internal=False)
if actual_list_style:
expected_list_style = ['new_style']
self.assertEqual(
set(expected_list_style),
set([style.name for style in actual_list_style]))
# Check new default
default_style_url = reverse(
'qgis_server:default-qml',
kwargs={
'layername': layer.name})
response = self.client.get(default_style_url)
self.assertEqual(response.status_code, 200)
expected_default_style_retval = {
'name': 'new_style',
}
actual_default_style_retval = json.loads(response.content)
for key, value in expected_default_style_retval.iteritems():
self.assertEqual(actual_default_style_retval[key], value)
layer.delete()
class ThumbnailGenerationTest(LiveServerTestCase):
def setUp(self):
call_command('loaddata', 'people_data', verbosity=0)
@on_ogc_backend(qgis_server.BACKEND_PACKAGE)
def test_thumbnail_links(self):
"""Test that thumbnail links were created after upload."""
filename = os.path.join(gisdata.GOOD_DATA, 'raster/test_grid.tif')
layer = file_upload(filename)
""":type: geonode.layers.models.Layer"""
# check that we have remote thumbnail
remote_thumbnail_link = layer.link_set.get(
name__icontains='remote thumbnail')
self.assertTrue(remote_thumbnail_link.url)
# thumbnail won't generate because remote thumbnail uses public
# address
remote_thumbnail_url = remote_thumbnail_link.url
# Replace url's basename, we want to access it using django client
parse_result = urlparse.urlsplit(remote_thumbnail_url)
remote_thumbnail_url = urlparse.urlunsplit(
('', '', parse_result.path, parse_result.query, ''))
response = self.client.get(remote_thumbnail_url)
thumbnail_dir = os.path.join(settings.MEDIA_ROOT, 'thumbs')
thumbnail_path = os.path.join(thumbnail_dir, 'layer-thumb.png')
layer.save_thumbnail(thumbnail_path, response.content)
# Check thumbnail created
self.assertTrue(os.path.exists(thumbnail_path))
self.assertEqual(what(thumbnail_path), 'png')
# Check that now we have thumbnail
self.assertTrue(layer.has_thumbnail())
missing_thumbnail_url = staticfiles.static(settings.MISSING_THUMBNAIL)
self.assertTrue(layer.get_thumbnail_url() != missing_thumbnail_url)
thumbnail_links = layer.link_set.filter(name__icontains='thumbnail')
self.assertTrue(len(thumbnail_links) > 0)
link_names = ['remote thumbnail', 'thumbnail']
for link in thumbnail_links:
self.assertIn(link.name.lower(), link_names)
# cleanup
layer.delete()
@on_ogc_backend(qgis_server.BACKEND_PACKAGE)
def test_map_thumbnail(self):
"""Creating map will create thumbnail."""
filename = os.path.join(
gisdata.GOOD_DATA, 'raster/relief_san_andres.tif')
layer1 = file_upload(filename)
filename = os.path.join(
gisdata.GOOD_DATA,
'vector/san_andres_y_providencia_administrative.shp')
layer2 = file_upload(filename)
""":type: geonode.layers.models.Layer"""
# construct json request for new map
json_payload = InitialSetup.generate_initial_map(layer1, layer2)
self.client.login(username='admin', password='admin')
response = self.client.post(
reverse('new_map_json'),
json.dumps(json_payload),
content_type='application/json')
self.assertEqual(response.status_code, 200)
map_id = json.loads(response.content).get('id')
map = Map.objects.get(id=map_id)
# check that we have remote thumbnail
remote_thumbnail_link = map.link_set.filter(
name__icontains='remote thumbnail').first()
self.assertTrue(remote_thumbnail_link.url)
# thumbnail won't generate because remote thumbnail uses public
# address
remote_thumbnail_url = remote_thumbnail_link.url
# Replace url's basename, we want to access it using django client
parse_result = urlparse.urlsplit(remote_thumbnail_url)
remote_thumbnail_url = urlparse.urlunsplit(
('', '', parse_result.path, parse_result.query, ''))
response = self.client.get(remote_thumbnail_url)
thumbnail_dir = os.path.join(settings.MEDIA_ROOT, 'thumbs')
thumbnail_path = os.path.join(thumbnail_dir, 'map-thumb.png')
map.save_thumbnail(thumbnail_path, response.content)
# Check thumbnail created
self.assertTrue(os.path.exists(thumbnail_path))
self.assertEqual(what(thumbnail_path), 'png')
# Check that now we have thumbnail
self.assertTrue(map.has_thumbnail())
missing_thumbnail_url = staticfiles.static(settings.MISSING_THUMBNAIL)
self.assertTrue(map.get_thumbnail_url() != missing_thumbnail_url)
thumbnail_links = map.link_set.filter(name__icontains='thumbnail')
self.assertTrue(len(thumbnail_links) > 0)
link_names = ['remote thumbnail', 'thumbnail']
for link in thumbnail_links:
self.assertIn(link.name.lower(), link_names)
# cleanup
map.delete()
layer1.delete()
layer2.delete()
class InitialSetup():
@classmethod
def generate_initial_map(cls, layer1, layer2):
# construct json request for new map
json_payload = {
"sources": {
"source_OpenMapSurfer Roads": {
"url": "http://korona.geog.uni-heidelberg.de/tiles"
"/roads/x={x}&y={y}&z={z}"
},
"source_OpenStreetMap": {
"url": "http://{s}.tile.osm.org/{z}/{x}/{y}.png"
},
"source_san_andres_y_providencia_administrative": {
"url": "http://geonode.dev/qgis-server/tiles"
"/san_andres_y_providencia_administrative/"
"{z}/{x}/{y}.png"
},
"source_relief_san_andres": {
"url": "http://geonode.dev/qgis-server/tiles"
"/relief_san_andres/{z}/{x}/{y}.png"
}
},
"about": {
"title": "San Andreas",
"abstract": "San Andreas sample map"
},
"map": {
"center": [12.91890657418042, -81.298828125],
"zoom": 6,
"projection": "",
"layers": [
{
"name": "OpenMapSurfer_Roads",
"title": "OpenMapSurfer Roads",
"visibility": True,
"url": "http://korona.geog.uni-heidelberg.de/tiles/"
"roads/x={x}&y={y}&z={z}",
"group": "background",
"source": "source_OpenMapSurfer Roads"
},
{
"name": "osm",
"title": "OpenStreetMap",
"visibility": False,
"url": "http://{s}.tile.osm.org/{z}/{x}/{y}.png",
"group": "background",
"source": "source_OpenStreetMap"
},
{
"name": layer2.alternate,
"title": layer2.name,
"visibility": True,
"url": "http://geonode.dev/qgis-server/tiles"
"/san_andres_y_providencia_administrative/"
"{z}/{x}/{y}.png",
"source": "source_"
"san_andres_y_providencia_administrative"
},
{
"name": layer1.alternate,
"title": layer1.name,
"visibility": True,
"url": "http://geonode.dev/qgis-server/tiles"
"/relief_san_andres/{z}/{x}/{y}.png",
"source": "source_relief_san_andres"
}
]
}
}
return json_payload
| gpl-3.0 |
jmr0/servo | tests/wpt/web-platform-tests/tools/py/testing/path/test_svnurl.py | 218 | 3524 | import py
from py._path.svnurl import InfoSvnCommand
import datetime
import time
from svntestbase import CommonSvnTests
def pytest_funcarg__path1(request):
repo, repourl, wc = request.getfuncargvalue("repowc1")
return py.path.svnurl(repourl)
class TestSvnURLCommandPath(CommonSvnTests):
@py.test.mark.xfail
def test_load(self, path1):
super(TestSvnURLCommandPath, self).test_load(path1)
# the following two work on jython but not in local/svnwc
def test_listdir(self, path1):
super(TestSvnURLCommandPath, self).test_listdir(path1)
def test_visit_ignore(self, path1):
super(TestSvnURLCommandPath, self).test_visit_ignore(path1)
def test_svnurl_needs_arg(self, path1):
py.test.raises(TypeError, "py.path.svnurl()")
def test_svnurl_does_not_accept_None_either(self, path1):
py.test.raises(Exception, "py.path.svnurl(None)")
def test_svnurl_characters_simple(self, path1):
py.path.svnurl("svn+ssh://hello/world")
def test_svnurl_characters_at_user(self, path1):
py.path.svnurl("http://user@host.com/some/dir")
def test_svnurl_characters_at_path(self, path1):
py.test.raises(ValueError, 'py.path.svnurl("http://host.com/foo@bar")')
def test_svnurl_characters_colon_port(self, path1):
py.path.svnurl("http://host.com:8080/some/dir")
def test_svnurl_characters_tilde_end(self, path1):
py.path.svnurl("http://host.com/some/file~")
@py.test.mark.xfail("sys.platform == 'win32'")
def test_svnurl_characters_colon_path(self, path1):
# colons are allowed on win32, because they're part of the drive
# part of an absolute path... however, they shouldn't be allowed in
# other parts, I think
py.test.raises(ValueError, 'py.path.svnurl("http://host.com/foo:bar")')
def test_export(self, path1, tmpdir):
tmpdir = tmpdir.join("empty")
p = path1.export(tmpdir)
assert p == tmpdir # XXX should return None
n1 = [x.basename for x in tmpdir.listdir()]
n2 = [x.basename for x in path1.listdir()]
n1.sort()
n2.sort()
assert n1 == n2
assert not p.join('.svn').check()
rev = path1.mkdir("newdir")
tmpdir.remove()
assert not tmpdir.check()
path1.new(rev=1).export(tmpdir)
for p in tmpdir.listdir():
assert p.basename in n2
class TestSvnInfoCommand:
def test_svn_1_2(self):
line = " 2256 hpk 165 Nov 24 17:55 __init__.py"
info = InfoSvnCommand(line)
now = datetime.datetime.now()
assert info.last_author == 'hpk'
assert info.created_rev == 2256
assert info.kind == 'file'
# we don't check for the year (2006), because that depends
# on the clock correctly being setup
assert time.gmtime(info.mtime)[1:6] == (11, 24, 17, 55, 0)
assert info.size == 165
assert info.time == info.mtime * 1000000
def test_svn_1_3(self):
line =" 4784 hpk 2 Jun 01 2004 __init__.py"
info = InfoSvnCommand(line)
assert info.last_author == 'hpk'
assert info.kind == 'file'
def test_svn_1_3_b(self):
line =" 74 autoadmi Oct 06 23:59 plonesolutions.com/"
info = InfoSvnCommand(line)
assert info.last_author == 'autoadmi'
assert info.kind == 'dir'
def test_badchars():
py.test.raises(ValueError, "py.path.svnurl('http://host/tmp/@@@:')")
| mpl-2.0 |
abzaloid/maps | django-project/lib/python2.7/site-packages/django/core/files/uploadhandler.py | 102 | 6899 | """
Base file upload handler classes, and the built-in concrete subclasses
"""
from __future__ import unicode_literals
from io import BytesIO
from django.conf import settings
from django.core.files.uploadedfile import (
InMemoryUploadedFile, TemporaryUploadedFile,
)
from django.utils.encoding import python_2_unicode_compatible
from django.utils.module_loading import import_string
__all__ = [
'UploadFileException', 'StopUpload', 'SkipFile', 'FileUploadHandler',
'TemporaryFileUploadHandler', 'MemoryFileUploadHandler', 'load_handler',
'StopFutureHandlers'
]
class UploadFileException(Exception):
"""
Any error having to do with uploading files.
"""
pass
@python_2_unicode_compatible
class StopUpload(UploadFileException):
"""
This exception is raised when an upload must abort.
"""
def __init__(self, connection_reset=False):
"""
If ``connection_reset`` is ``True``, Django knows will halt the upload
without consuming the rest of the upload. This will cause the browser to
show a "connection reset" error.
"""
self.connection_reset = connection_reset
def __str__(self):
if self.connection_reset:
return 'StopUpload: Halt current upload.'
else:
return 'StopUpload: Consume request data, then halt.'
class SkipFile(UploadFileException):
"""
This exception is raised by an upload handler that wants to skip a given file.
"""
pass
class StopFutureHandlers(UploadFileException):
"""
Upload handers that have handled a file and do not want future handlers to
run should raise this exception instead of returning None.
"""
pass
class FileUploadHandler(object):
"""
Base class for streaming upload handlers.
"""
chunk_size = 64 * 2 ** 10 # : The default chunk size is 64 KB.
def __init__(self, request=None):
self.file_name = None
self.content_type = None
self.content_length = None
self.charset = None
self.content_type_extra = None
self.request = request
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Handle the raw input from the client.
Parameters:
:input_data:
An object that supports reading via .read().
:META:
``request.META``.
:content_length:
The (integer) value of the Content-Length header from the
client.
:boundary: The boundary from the Content-Type header. Be sure to
prepend two '--'.
"""
pass
def new_file(self, field_name, file_name, content_type, content_length, charset=None, content_type_extra=None):
"""
Signal that a new file has been started.
Warning: As with any data from the client, you should not trust
content_length (and sometimes won't even get it).
"""
self.field_name = field_name
self.file_name = file_name
self.content_type = content_type
self.content_length = content_length
self.charset = charset
self.content_type_extra = content_type_extra
def receive_data_chunk(self, raw_data, start):
"""
Receive data from the streamed upload parser. ``start`` is the position
in the file of the chunk.
"""
raise NotImplementedError('subclasses of FileUploadHandler must provide a receive_data_chunk() method')
def file_complete(self, file_size):
"""
Signal that a file has completed. File size corresponds to the actual
size accumulated by all the chunks.
Subclasses should return a valid ``UploadedFile`` object.
"""
raise NotImplementedError('subclasses of FileUploadHandler must provide a file_complete() method')
def upload_complete(self):
"""
Signal that the upload is complete. Subclasses should perform cleanup
that is necessary for this handler.
"""
pass
class TemporaryFileUploadHandler(FileUploadHandler):
"""
Upload handler that streams data into a temporary file.
"""
def __init__(self, *args, **kwargs):
super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs)
def new_file(self, file_name, *args, **kwargs):
"""
Create the file object to append to as data is coming in.
"""
super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs)
self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)
def receive_data_chunk(self, raw_data, start):
self.file.write(raw_data)
def file_complete(self, file_size):
self.file.seek(0)
self.file.size = file_size
return self.file
class MemoryFileUploadHandler(FileUploadHandler):
"""
File upload handler to stream uploads into memory (used for small files).
"""
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Use the content_length to signal whether or not this handler should be in use.
"""
# Check the content-length header to see if we should
# If the post is too large, we cannot use the Memory handler.
if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
self.activated = False
else:
self.activated = True
def new_file(self, *args, **kwargs):
super(MemoryFileUploadHandler, self).new_file(*args, **kwargs)
if self.activated:
self.file = BytesIO()
raise StopFutureHandlers()
def receive_data_chunk(self, raw_data, start):
"""
Add the data to the BytesIO file.
"""
if self.activated:
self.file.write(raw_data)
else:
return raw_data
def file_complete(self, file_size):
"""
Return a file object if we're activated.
"""
if not self.activated:
return
self.file.seek(0)
return InMemoryUploadedFile(
file=self.file,
field_name=self.field_name,
name=self.file_name,
content_type=self.content_type,
size=file_size,
charset=self.charset,
content_type_extra=self.content_type_extra
)
def load_handler(path, *args, **kwargs):
"""
Given a path to a handler, return an instance of that handler.
E.g.::
>>> from django.http import HttpRequest
>>> request = HttpRequest()
>>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
<TemporaryFileUploadHandler object at 0x...>
"""
return import_string(path)(*args, **kwargs)
| mit |
hal0x2328/neo-python | neo/bin/prompt.py | 1 | 15089 | #!/usr/bin/env python3
import argparse
import datetime
import os
import traceback
import asyncio
import termios
import sys
from prompt_toolkit.completion import WordCompleter
from prompt_toolkit.history import FileHistory
from prompt_toolkit.shortcuts import print_formatted_text, PromptSession
from prompt_toolkit.formatted_text import FormattedText
from prompt_toolkit.application import get_app as prompt_toolkit_get_app
from neo import __version__
from neo.Core.Blockchain import Blockchain
from neo.Storage.Implementation.DBFactory import getBlockchainDB
from neo.Implementations.Notifications.NotificationDB import NotificationDB
from neo.Prompt.Commands.Wallet import CommandWallet
from neo.Prompt.Commands.Show import CommandShow
from neo.Prompt.Commands.Search import CommandSearch
from neo.Prompt.Commands.Config import CommandConfig
from neo.Prompt.Commands.SC import CommandSC
from neo.Prompt.PromptData import PromptData
from neo.Prompt.InputParser import InputParser
from neo.Settings import settings, PrivnetConnectionError
from neo.UserPreferences import preferences
from neo.logging import log_manager
from neo.Prompt.PromptPrinter import prompt_print, token_style
from neo.Network.nodemanager import NodeManager
import neo.Storage.Implementation.DBFactory as DBFactory
logger = log_manager.getLogger()
from prompt_toolkit.eventloop import use_asyncio_event_loop
from neo.Network.p2pservice import NetworkService
from contextlib import suppress
class PromptFileHistory(FileHistory):
def append(self, string):
string = self.redact_command(string)
if len(string) == 0:
return
self.strings.append(string)
# Save to file.
with open(self.filename, 'ab') as f:
def write(t):
f.write(t.encode('utf-8'))
write('\n# %s\n' % datetime.datetime.now())
for line in string.split('\n'):
write('+%s\n' % line)
def redact_command(self, string):
if len(string) == 0:
return string
command = [comm for comm in ['import wif', 'export wif', 'import nep2', 'export nep2'] if comm in string]
if len(command) > 0:
command = command[0]
# only redacts command if wif/nep2 keys are in the command, not if the argument is left empty.
if command in string and len(command + " ") < len(string):
# example: import wif 5HueCGU8 --> import wif <wif>
return command + " <" + command.split(" ")[1] + ">"
else:
return string
return string
class PromptInterface:
prompt_completer = None
history = None
go_on = True
wallet_loop_deferred = None
Wallet = None
_known_things = []
_commands = [
CommandWallet(), CommandShow(), CommandSearch(), CommandConfig(), CommandSC()
]
_command_descs = [desc for c in _commands for desc in c.command_descs_with_sub_commands()]
commands = {command.command_desc().command: command for command in _commands}
start_height = None
start_dt = None
prompt_session = None
def __init__(self, history_filename=None):
PromptData.Prompt = self
if history_filename:
PromptInterface.history = PromptFileHistory(history_filename)
self.input_parser = InputParser()
self.start_height = Blockchain.Default().Height
self.start_dt = datetime.datetime.utcnow()
def get_bottom_toolbar(self, cli=None):
out = []
try:
if PromptData.Wallet is None:
return "[%s] Progress: 0/%s/%s" % (settings.net_name,
str(Blockchain.Default().Height),
str(Blockchain.Default().HeaderHeight))
else:
return "[%s] Progress: %s/%s/%s" % (settings.net_name, str(PromptData.Wallet._current_height),
str(Blockchain.Default().Height),
str(Blockchain.Default().HeaderHeight))
except Exception as e:
pass
return out
def get_completer(self):
standard_completions = list({word for d in self._command_descs for word in d.command.split()}) # Use a set to ensure unicity of words
standard_completions += ['quit', 'help', 'exit']
if PromptData.Wallet:
for addr in PromptData.Wallet.Addresses:
if addr not in self._known_things:
self._known_things.append(addr)
for alias in PromptData.Wallet.NamedAddr:
if alias.Title not in self._known_things:
self._known_things.append(alias.Title)
for tkn in PromptData.Wallet.GetTokens().values():
if tkn.symbol not in self._known_things:
self._known_things.append(tkn.symbol)
all_completions = standard_completions + self._known_things
PromptInterface.prompt_completer = WordCompleter(all_completions)
return PromptInterface.prompt_completer
def quit(self):
print('Shutting down. This may take a bit...')
self.go_on = False
PromptData.close_wallet()
raise SystemExit
def help(self):
prompt_print(f"\nCommands:")
for command_group in sorted(self.commands.keys()):
command = self.commands[command_group]
prompt_print(f" {command_group:<15} - {command.command_desc().short_help}")
prompt_print(f"\nRun 'COMMAND help' for more information on a command.")
def on_looperror(self, err):
logger.debug("On DB loop error! %s " % err)
async def run(self):
nodemgr = NodeManager()
while not nodemgr.running:
await asyncio.sleep(0.1)
tokens = [("class:neo", 'NEO'), ("class:default", ' cli. Type '),
("class:command", '\'help\' '), ("class:default", 'to get started')]
print_formatted_text(FormattedText(tokens), style=token_style)
print('\n')
session = PromptSession("neo> ",
completer=self.get_completer(),
history=self.history,
bottom_toolbar=self.get_bottom_toolbar,
style=token_style,
refresh_interval=3,
)
self.prompt_session = session
result = ""
while self.go_on:
# with patch_stdout():
try:
result = await session.prompt(async_=True)
except EOFError:
# Control-D pressed: quit
return self.quit()
except KeyboardInterrupt:
# Control-C pressed: pause for user input
# temporarily mute stdout during user input
# components like `network` set at DEBUG level will spam through the console
# making it impractical to input user data
log_manager.mute_stdio()
print('Logging output muted during user input...')
try:
result = await session.prompt(async_=True)
except Exception as e:
logger.error("Exception handling input: %s " % e)
# and re-enable stdio
log_manager.unmute_stdio()
except Exception as e:
logger.error("Exception handling input: %s " % e)
try:
command, arguments = self.input_parser.parse_input(result)
if command is not None and len(command) > 0:
command = command.lower()
if command in self.commands:
cmd = self.commands[command]
if len(arguments) > 0 and arguments[-1] == 'help':
cmd.handle_help(arguments)
else:
cmd.execute(arguments)
else:
if command == 'quit' or command == 'exit':
self.quit()
elif command == 'help':
self.help()
elif command is None:
print("Please specify a command")
else:
print("Command '%s' not found" % command)
except Exception as e:
print("Could not execute command: %s" % e)
traceback.print_stack()
traceback.print_exc()
def main():
parser = argparse.ArgumentParser()
# Network group
group = parser.add_mutually_exclusive_group()
group.add_argument("-m", "--mainnet", action="store_true", default=False,
help="Use MainNet instead of the default TestNet")
group.add_argument("-p", "--privnet", nargs="?", metavar="host", const=True, default=False,
help="Use a private net instead of the default TestNet, optionally using a custom host (default: 127.0.0.1)")
group.add_argument("--coznet", action="store_true", default=False,
help="Use the CoZ network instead of the default TestNet")
group.add_argument("-u", "--unittest", nargs="?", metavar="host", const=True, default=False,
help="Use a private net instead of the default TestNet, optionally using a custom host (default: 127.0.0.1)")
group.add_argument("-c", "--config", action="store", help="Use a specific config file")
# Theme
parser.add_argument("-t", "--set-default-theme", dest="theme",
choices=["dark", "light"],
help="Set the default theme to be loaded from the config file. Default: 'dark'")
# Verbose
parser.add_argument("-v", "--verbose", action="store_true", default=False,
help="Show smart-contract events by default")
# Where to store stuff
parser.add_argument("--datadir", action="store",
help="Absolute path to use for database directories")
# peers
parser.add_argument("--minpeers", action="store", type=int, choices=range(1, 10 + 1), metavar="[1-10]",
help="Min peers to use for P2P Joining")
parser.add_argument("--maxpeers", action="store", type=int, default=5, choices=range(1, 10 + 1), metavar="[1-10]",
help="Max peers to use for P2P Joining")
# Show the neo-python version
parser.add_argument("--version", action="version",
version="neo-python v{version}".format(version=__version__))
args = parser.parse_args()
# Setting the datadir must come before setting the network, else the wrong path is checked at net setup.
if args.datadir:
settings.set_data_dir(args.datadir)
# Setup depending on command line arguments. By default, the testnet settings are already loaded.
if args.config:
settings.setup(args.config)
elif args.mainnet:
settings.setup_mainnet()
elif args.privnet:
try:
settings.setup_privnet(args.privnet)
except PrivnetConnectionError as e:
logger.error(str(e))
return
elif args.coznet:
settings.setup_coznet()
elif args.unittest:
settings.setup_unittest_net()
# Logfile settings & setup
logfile_fn = os.path.join(settings.DATA_DIR_PATH, 'prompt.log')
logfile_max_bytes = 5e7 # 50 MB
logfile_backup_count = 3 # 3 logfiles history
settings.set_logfile(logfile_fn, logfile_max_bytes, logfile_backup_count)
if args.theme:
preferences.set_theme(args.theme)
if args.verbose:
settings.set_log_smart_contract_events(True)
def set_min_peers(num_peers) -> bool:
try:
settings.set_min_peers(num_peers)
print("Minpeers set to ", num_peers)
return True
except ValueError:
print("Please supply a positive integer for minpeers")
return False
def set_max_peers(num_peers) -> bool:
try:
settings.set_max_peers(num_peers)
print("Maxpeers set to ", num_peers)
return True
except ValueError:
print("Please supply a positive integer for maxpeers")
return False
minpeers = args.minpeers
maxpeers = args.maxpeers
if minpeers and maxpeers:
if minpeers > maxpeers:
print("minpeers setting cannot be bigger than maxpeers setting")
return
if not set_min_peers(minpeers) or not set_max_peers(maxpeers):
return
elif minpeers:
if not set_min_peers(minpeers):
return
if minpeers > settings.CONNECTED_PEER_MAX:
if not set_max_peers(minpeers):
return
elif maxpeers:
if not set_max_peers(maxpeers):
return
if maxpeers < settings.CONNECTED_PEER_MIN:
if not set_min_peers(maxpeers):
return
loop = asyncio.get_event_loop()
# put prompt_toolkit on top of asyncio to avoid blocking
use_asyncio_event_loop()
# Instantiate the blockchain and subscribe to notifications
blockchain = Blockchain(DBFactory.getBlockchainDB(settings.chain_leveldb_path))
Blockchain.RegisterBlockchain(blockchain)
# Try to set up a notification db
if NotificationDB.instance():
NotificationDB.instance().start()
# Start the prompt interface
fn_prompt_history = os.path.join(settings.DATA_DIR_PATH, '.prompt.py.history')
cli = PromptInterface(fn_prompt_history)
cli_task = loop.create_task(cli.run())
p2p = NetworkService()
loop.create_task(p2p.start())
async def shutdown():
all_tasks = asyncio.all_tasks()
for task in all_tasks:
task.cancel()
with suppress(asyncio.CancelledError):
await task
# prompt_toolkit hack for not cleaning up see: https://github.com/prompt-toolkit/python-prompt-toolkit/issues/787
old_attrs = termios.tcgetattr(sys.stdin)
try:
loop.run_forever()
except SystemExit:
pass
finally:
with suppress(asyncio.InvalidStateError):
app = prompt_toolkit_get_app()
if app.is_running:
app.exit()
with suppress((SystemExit, Exception)):
cli_task.exception()
loop.run_until_complete(p2p.shutdown())
loop.run_until_complete(shutdown())
loop.run_until_complete(loop.shutdown_asyncgens())
loop.stop()
loop.close()
# Run things
# After the reactor is stopped, gracefully shutdown the database.
NotificationDB.close()
Blockchain.Default().Dispose()
# clean up prompt_toolkit mess, see above
termios.tcsetattr(sys.stdin, termios.TCSANOW, old_attrs)
if __name__ == "__main__":
main()
| mit |
laszlocsomor/tensorflow | tensorflow/contrib/keras/api/keras/callbacks/__init__.py | 73 | 1815 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras callback classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras._impl.keras.callbacks import BaseLogger
from tensorflow.python.keras._impl.keras.callbacks import Callback
from tensorflow.python.keras._impl.keras.callbacks import CSVLogger
from tensorflow.python.keras._impl.keras.callbacks import EarlyStopping
from tensorflow.python.keras._impl.keras.callbacks import History
from tensorflow.python.keras._impl.keras.callbacks import LambdaCallback
from tensorflow.python.keras._impl.keras.callbacks import LearningRateScheduler
from tensorflow.python.keras._impl.keras.callbacks import ModelCheckpoint
from tensorflow.python.keras._impl.keras.callbacks import ProgbarLogger
from tensorflow.python.keras._impl.keras.callbacks import ReduceLROnPlateau
from tensorflow.python.keras._impl.keras.callbacks import RemoteMonitor
from tensorflow.python.keras._impl.keras.callbacks import TensorBoard
from tensorflow.python.keras._impl.keras.callbacks import TerminateOnNaN
del absolute_import
del division
del print_function
| apache-2.0 |
grimfang/panda3d | direct/src/stdpy/threading2.py | 3 | 26986 | """ This module reimplements Python's native threading module using Panda
threading constructs. It's designed as a drop-in replacement for the
threading module for code that works with Panda; it is necessary because
in some compilation models, Panda's threading constructs are
incompatible with the OS-provided threads used by Python's thread
module.
Unlike threading.py, this module is a more explicit implementation of
Python's threading model, designed to more precisely emulate Python's
standard threading semantics. In fact, this is a bald-face copy of
Python's threading module from Python 2.5, with a few lines at the top
to import Panda's thread reimplementation instead of the system thread
module, and so it is therefore layered on top of Panda's thread
implementation. """
import sys as _sys
from direct.stdpy import thread
from direct.stdpy.thread import stack_size, _newname, _local as local
from panda3d import core
_sleep = core.Thread.sleep
from time import time as _time
from traceback import format_exc as _format_exc
# Rename some stuff so "from threading import *" is safe
__all__ = ['activeCount', 'Condition', 'currentThread', 'enumerate', 'Event',
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread',
'Timer', 'setprofile', 'settrace', 'local', 'stack_size']
_start_new_thread = thread.start_new_thread
_allocate_lock = thread.allocate_lock
_get_ident = thread.get_ident
ThreadError = thread.error
del thread
# Debug support (adapted from ihooks.py).
# All the major classes here derive from _Verbose. We force that to
# be a new-style class so that all the major classes here are new-style.
# This helps debugging (type(instance) is more revealing for instances
# of new-style classes).
_VERBOSE = False
if __debug__:
class _Verbose(object):
def __init__(self, verbose=None):
if verbose is None:
verbose = _VERBOSE
self.__verbose = verbose
def _note(self, format, *args):
if self.__verbose:
format = format % args
format = "%s: %s\n" % (
currentThread().getName(), format)
_sys.stderr.write(format)
else:
# Disable this when using "python -O"
class _Verbose(object):
def __init__(self, verbose=None):
pass
def _note(self, *args):
pass
# Support for profile and trace hooks
_profile_hook = None
_trace_hook = None
def setprofile(func):
global _profile_hook
_profile_hook = func
def settrace(func):
global _trace_hook
_trace_hook = func
# Synchronization classes
Lock = _allocate_lock
def RLock(*args, **kwargs):
return _RLock(*args, **kwargs)
class _RLock(_Verbose):
def __init__(self, verbose=None):
_Verbose.__init__(self, verbose)
self.__block = _allocate_lock()
self.__owner = None
self.__count = 0
def __repr__(self):
return "<%s(%s, %d)>" % (
self.__class__.__name__,
self.__owner and self.__owner.getName(),
self.__count)
def acquire(self, blocking=1):
me = currentThread()
if self.__owner is me:
self.__count = self.__count + 1
if __debug__:
self._note("%s.acquire(%s): recursive success", self, blocking)
return 1
rc = self.__block.acquire(blocking)
if rc:
self.__owner = me
self.__count = 1
if __debug__:
self._note("%s.acquire(%s): initial success", self, blocking)
else:
if __debug__:
self._note("%s.acquire(%s): failure", self, blocking)
return rc
__enter__ = acquire
def release(self):
me = currentThread()
assert self.__owner is me, "release() of un-acquire()d lock"
self.__count = count = self.__count - 1
if not count:
self.__owner = None
self.__block.release()
if __debug__:
self._note("%s.release(): final release", self)
else:
if __debug__:
self._note("%s.release(): non-final release", self)
def __exit__(self, t, v, tb):
self.release()
# Internal methods used by condition variables
def _acquire_restore(self, state):
self.__block.acquire()
self.__count, self.__owner = state
if __debug__:
self._note("%s._acquire_restore()", self)
def _release_save(self):
if __debug__:
self._note("%s._release_save()", self)
count = self.__count
self.__count = 0
owner = self.__owner
self.__owner = None
self.__block.release()
return (count, owner)
def _is_owned(self):
return self.__owner is currentThread()
def Condition(*args, **kwargs):
return _Condition(*args, **kwargs)
class _Condition(_Verbose):
def __init__(self, lock=None, verbose=None):
_Verbose.__init__(self, verbose)
if lock is None:
lock = RLock()
self.__lock = lock
# Export the lock's acquire() and release() methods
self.acquire = lock.acquire
self.release = lock.release
# If the lock defines _release_save() and/or _acquire_restore(),
# these override the default implementations (which just call
# release() and acquire() on the lock). Ditto for _is_owned().
try:
self._release_save = lock._release_save
except AttributeError:
pass
try:
self._acquire_restore = lock._acquire_restore
except AttributeError:
pass
try:
self._is_owned = lock._is_owned
except AttributeError:
pass
self.__waiters = []
def __enter__(self):
return self.__lock.__enter__()
def __exit__(self, *args):
return self.__lock.__exit__(*args)
def __repr__(self):
return "<Condition(%s, %d)>" % (self.__lock, len(self.__waiters))
def _release_save(self):
self.__lock.release() # No state to save
def _acquire_restore(self, x):
self.__lock.acquire() # Ignore saved state
def _is_owned(self):
# Return True if lock is owned by currentThread.
# This method is called only if __lock doesn't have _is_owned().
if self.__lock.acquire(0):
self.__lock.release()
return False
else:
return True
def wait(self, timeout=None):
assert self._is_owned(), "wait() of un-acquire()d lock"
waiter = _allocate_lock()
waiter.acquire()
self.__waiters.append(waiter)
saved_state = self._release_save()
try: # restore state no matter what (e.g., KeyboardInterrupt)
if timeout is None:
waiter.acquire()
if __debug__:
self._note("%s.wait(): got it", self)
else:
# Balancing act: We can't afford a pure busy loop, so we
# have to sleep; but if we sleep the whole timeout time,
# we'll be unresponsive. The scheme here sleeps very
# little at first, longer as time goes on, but never longer
# than 20 times per second (or the timeout time remaining).
endtime = _time() + timeout
delay = 0.0005 # 500 us -> initial delay of 1 ms
while True:
gotit = waiter.acquire(0)
if gotit:
break
remaining = endtime - _time()
if remaining <= 0:
break
delay = min(delay * 2, remaining, .05)
_sleep(delay)
if not gotit:
if __debug__:
self._note("%s.wait(%s): timed out", self, timeout)
try:
self.__waiters.remove(waiter)
except ValueError:
pass
else:
if __debug__:
self._note("%s.wait(%s): got it", self, timeout)
finally:
self._acquire_restore(saved_state)
def notify(self, n=1):
assert self._is_owned(), "notify() of un-acquire()d lock"
__waiters = self.__waiters
waiters = __waiters[:n]
if not waiters:
if __debug__:
self._note("%s.notify(): no waiters", self)
return
self._note("%s.notify(): notifying %d waiter%s", self, n,
n!=1 and "s" or "")
for waiter in waiters:
waiter.release()
try:
__waiters.remove(waiter)
except ValueError:
pass
def notifyAll(self):
self.notify(len(self.__waiters))
def Semaphore(*args, **kwargs):
return _Semaphore(*args, **kwargs)
class _Semaphore(_Verbose):
# After Tim Peters' semaphore class, but not quite the same (no maximum)
def __init__(self, value=1, verbose=None):
assert value >= 0, "Semaphore initial value must be >= 0"
_Verbose.__init__(self, verbose)
self.__cond = Condition(Lock())
self.__value = value
def acquire(self, blocking=1):
rc = False
self.__cond.acquire()
while self.__value == 0:
if not blocking:
break
if __debug__:
self._note("%s.acquire(%s): blocked waiting, value=%s",
self, blocking, self.__value)
self.__cond.wait()
else:
self.__value = self.__value - 1
if __debug__:
self._note("%s.acquire: success, value=%s",
self, self.__value)
rc = True
self.__cond.release()
return rc
__enter__ = acquire
def release(self):
self.__cond.acquire()
self.__value = self.__value + 1
if __debug__:
self._note("%s.release: success, value=%s",
self, self.__value)
self.__cond.notify()
self.__cond.release()
def __exit__(self, t, v, tb):
self.release()
def BoundedSemaphore(*args, **kwargs):
return _BoundedSemaphore(*args, **kwargs)
class _BoundedSemaphore(_Semaphore):
"""Semaphore that checks that # releases is <= # acquires"""
def __init__(self, value=1, verbose=None):
_Semaphore.__init__(self, value, verbose)
self._initial_value = value
def release(self):
if self._Semaphore__value >= self._initial_value:
raise ValueError("Semaphore released too many times")
return _Semaphore.release(self)
def Event(*args, **kwargs):
return _Event(*args, **kwargs)
class _Event(_Verbose):
# After Tim Peters' event class (without is_posted())
def __init__(self, verbose=None):
_Verbose.__init__(self, verbose)
self.__cond = Condition(Lock())
self.__flag = False
def isSet(self):
return self.__flag
def set(self):
self.__cond.acquire()
try:
self.__flag = True
self.__cond.notifyAll()
finally:
self.__cond.release()
def clear(self):
self.__cond.acquire()
try:
self.__flag = False
finally:
self.__cond.release()
def wait(self, timeout=None):
self.__cond.acquire()
try:
if not self.__flag:
self.__cond.wait(timeout)
finally:
self.__cond.release()
# Active thread administration
_active_limbo_lock = _allocate_lock()
_active = {} # maps thread id to Thread object
_limbo = {}
# Main class for threads
class Thread(_Verbose):
__initialized = False
# Need to store a reference to sys.exc_info for printing
# out exceptions when a thread tries to use a global var. during interp.
# shutdown and thus raises an exception about trying to perform some
# operation on/with a NoneType
__exc_info = _sys.exc_info
def __init__(self, group=None, target=None, name=None,
args=(), kwargs=None, verbose=None):
assert group is None, "group argument must be None for now"
_Verbose.__init__(self, verbose)
if kwargs is None:
kwargs = {}
self.__target = target
self.__name = str(name or _newname())
self.__args = args
self.__kwargs = kwargs
self.__daemonic = self._set_daemon()
self.__started = False
self.__stopped = False
self.__block = Condition(Lock())
self.__initialized = True
# sys.stderr is not stored in the class like
# sys.exc_info since it can be changed between instances
self.__stderr = _sys.stderr
def _set_daemon(self):
# Overridden in _MainThread and _DummyThread
return currentThread().isDaemon()
def __repr__(self):
assert self.__initialized, "Thread.__init__() was not called"
status = "initial"
if self.__started:
status = "started"
if self.__stopped:
status = "stopped"
if self.__daemonic:
status = status + " daemon"
return "<%s(%s, %s)>" % (self.__class__.__name__, self.__name, status)
def start(self):
assert self.__initialized, "Thread.__init__() not called"
assert not self.__started, "thread already started"
if __debug__:
self._note("%s.start(): starting thread", self)
_active_limbo_lock.acquire()
_limbo[self] = self
_active_limbo_lock.release()
_start_new_thread(self.__bootstrap, ())
self.__started = True
_sleep(0.000001) # 1 usec, to let the thread run (Solaris hack)
def run(self):
if self.__target:
self.__target(*self.__args, **self.__kwargs)
def __bootstrap(self):
try:
self.__started = True
_active_limbo_lock.acquire()
_active[_get_ident()] = self
del _limbo[self]
_active_limbo_lock.release()
if __debug__:
self._note("%s.__bootstrap(): thread started", self)
if _trace_hook:
self._note("%s.__bootstrap(): registering trace hook", self)
_sys.settrace(_trace_hook)
if _profile_hook:
self._note("%s.__bootstrap(): registering profile hook", self)
_sys.setprofile(_profile_hook)
try:
self.run()
except SystemExit:
if __debug__:
self._note("%s.__bootstrap(): raised SystemExit", self)
except:
if __debug__:
self._note("%s.__bootstrap(): unhandled exception", self)
# If sys.stderr is no more (most likely from interpreter
# shutdown) use self.__stderr. Otherwise still use sys (as in
# _sys) in case sys.stderr was redefined since the creation of
# self.
if _sys:
_sys.stderr.write("Exception in thread %s:\n%s\n" %
(self.getName(), _format_exc()))
else:
# Do the best job possible w/o a huge amt. of code to
# approximate a traceback (code ideas from
# Lib/traceback.py)
exc_type, exc_value, exc_tb = self.__exc_info()
try:
self.__stderr.write("Exception in thread " + self.getName() +
" (most likely raised during interpreter shutdown):\n")
self.__stderr.write("Traceback (most recent call last):\n")
while exc_tb:
self.__stderr.write(' File "%s", line %s, in %s\n' %
(exc_tb.tb_frame.f_code.co_filename,
exc_tb.tb_lineno,
exc_tb.tb_frame.f_code.co_name))
exc_tb = exc_tb.tb_next
self.__stderr.write("%s: %s\n" % (exc_type, exc_value))
# Make sure that exc_tb gets deleted since it is a memory
# hog; deleting everything else is just for thoroughness
finally:
del exc_type, exc_value, exc_tb
else:
if __debug__:
self._note("%s.__bootstrap(): normal return", self)
finally:
self.__stop()
try:
self.__delete()
except:
pass
def __stop(self):
self.__block.acquire()
self.__stopped = True
self.__block.notifyAll()
self.__block.release()
def __delete(self):
"Remove current thread from the dict of currently running threads."
# Notes about running with dummy_thread:
#
# Must take care to not raise an exception if dummy_thread is being
# used (and thus this module is being used as an instance of
# dummy_threading). dummy_thread.get_ident() always returns -1 since
# there is only one thread if dummy_thread is being used. Thus
# len(_active) is always <= 1 here, and any Thread instance created
# overwrites the (if any) thread currently registered in _active.
#
# An instance of _MainThread is always created by 'threading'. This
# gets overwritten the instant an instance of Thread is created; both
# threads return -1 from dummy_thread.get_ident() and thus have the
# same key in the dict. So when the _MainThread instance created by
# 'threading' tries to clean itself up when atexit calls this method
# it gets a KeyError if another Thread instance was created.
#
# This all means that KeyError from trying to delete something from
# _active if dummy_threading is being used is a red herring. But
# since it isn't if dummy_threading is *not* being used then don't
# hide the exception.
_active_limbo_lock.acquire()
try:
try:
del _active[_get_ident()]
except KeyError:
if 'dummy_threading' not in _sys.modules:
raise
finally:
_active_limbo_lock.release()
def join(self, timeout=None):
assert self.__initialized, "Thread.__init__() not called"
assert self.__started, "cannot join thread before it is started"
assert self is not currentThread(), "cannot join current thread"
if __debug__:
if not self.__stopped:
self._note("%s.join(): waiting until thread stops", self)
self.__block.acquire()
try:
if timeout is None:
while not self.__stopped:
self.__block.wait()
if __debug__:
self._note("%s.join(): thread stopped", self)
else:
deadline = _time() + timeout
while not self.__stopped:
delay = deadline - _time()
if delay <= 0:
if __debug__:
self._note("%s.join(): timed out", self)
break
self.__block.wait(delay)
else:
if __debug__:
self._note("%s.join(): thread stopped", self)
finally:
self.__block.release()
def getName(self):
assert self.__initialized, "Thread.__init__() not called"
return self.__name
def setName(self, name):
assert self.__initialized, "Thread.__init__() not called"
self.__name = str(name)
def isAlive(self):
assert self.__initialized, "Thread.__init__() not called"
return self.__started and not self.__stopped
def isDaemon(self):
assert self.__initialized, "Thread.__init__() not called"
return self.__daemonic
def setDaemon(self, daemonic):
assert self.__initialized, "Thread.__init__() not called"
assert not self.__started, "cannot set daemon status of active thread"
self.__daemonic = daemonic
# The timer class was contributed by Itamar Shtull-Trauring
def Timer(*args, **kwargs):
return _Timer(*args, **kwargs)
class _Timer(Thread):
"""Call a function after a specified number of seconds:
t = Timer(30.0, f, args=[], kwargs={})
t.start()
t.cancel() # stop the timer's action if it's still waiting
"""
def __init__(self, interval, function, args=[], kwargs={}):
Thread.__init__(self)
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.finished = Event()
def cancel(self):
"""Stop the timer if it hasn't finished yet"""
self.finished.set()
def run(self):
self.finished.wait(self.interval)
if not self.finished.isSet():
self.function(*self.args, **self.kwargs)
self.finished.set()
# Special thread class to represent the main thread
# This is garbage collected through an exit handler
class _MainThread(Thread):
def __init__(self):
Thread.__init__(self, name="MainThread")
self._Thread__started = True
_active_limbo_lock.acquire()
_active[_get_ident()] = self
_active_limbo_lock.release()
def _set_daemon(self):
return False
def _exitfunc(self):
self._Thread__stop()
t = _pickSomeNonDaemonThread()
if t:
if __debug__:
self._note("%s: waiting for other threads", self)
while t:
t.join()
t = _pickSomeNonDaemonThread()
if __debug__:
self._note("%s: exiting", self)
self._Thread__delete()
def _pickSomeNonDaemonThread():
for t in enumerate():
if not t.isDaemon() and t.isAlive():
return t
return None
# Dummy thread class to represent threads not started here.
# These aren't garbage collected when they die, nor can they be waited for.
# If they invoke anything in threading.py that calls currentThread(), they
# leave an entry in the _active dict forever after.
# Their purpose is to return *something* from currentThread().
# They are marked as daemon threads so we won't wait for them
# when we exit (conform previous semantics).
class _DummyThread(Thread):
def __init__(self):
Thread.__init__(self, name=_newname("Dummy-%d"))
# Thread.__block consumes an OS-level locking primitive, which
# can never be used by a _DummyThread. Since a _DummyThread
# instance is immortal, that's bad, so release this resource.
del self._Thread__block
self._Thread__started = True
_active_limbo_lock.acquire()
_active[_get_ident()] = self
_active_limbo_lock.release()
def _set_daemon(self):
return True
def join(self, timeout=None):
assert False, "cannot join a dummy thread"
# Global API functions
def currentThread():
try:
return _active[_get_ident()]
except KeyError:
##print "currentThread(): no current thread for", _get_ident()
return _DummyThread()
def activeCount():
_active_limbo_lock.acquire()
count = len(_active) + len(_limbo)
_active_limbo_lock.release()
return count
def enumerate():
_active_limbo_lock.acquire()
active = list(_active.values()) + list(_limbo.values())
_active_limbo_lock.release()
return active
#from thread import stack_size
# Create the main thread object,
# and make it available for the interpreter
# (Py_Main) as threading._shutdown.
_shutdown = _MainThread()._exitfunc
# get thread-local implementation, either from the thread
# module, or from the python fallback
## try:
## from thread import _local as local
## except ImportError:
## from _threading_local import local
# Self-test code
if __debug__:
def _test():
from collections import deque
class BoundedQueue(_Verbose):
def __init__(self, limit):
_Verbose.__init__(self)
self.mon = RLock()
self.rc = Condition(self.mon)
self.wc = Condition(self.mon)
self.limit = limit
self.queue = deque()
def put(self, item):
self.mon.acquire()
while len(self.queue) >= self.limit:
self._note("put(%s): queue full", item)
self.wc.wait()
self.queue.append(item)
self._note("put(%s): appended, length now %d",
item, len(self.queue))
self.rc.notify()
self.mon.release()
def get(self):
self.mon.acquire()
while not self.queue:
self._note("get(): queue empty")
self.rc.wait()
item = self.queue.popleft()
self._note("get(): got %s, %d left", item, len(self.queue))
self.wc.notify()
self.mon.release()
return item
class ProducerThread(Thread):
def __init__(self, queue, quota):
Thread.__init__(self, name="Producer")
self.queue = queue
self.quota = quota
def run(self):
from random import random
counter = 0
while counter < self.quota:
counter = counter + 1
self.queue.put("%s.%d" % (self.getName(), counter))
_sleep(random() * 0.00001)
class ConsumerThread(Thread):
def __init__(self, queue, count):
Thread.__init__(self, name="Consumer")
self.queue = queue
self.count = count
def run(self):
while self.count > 0:
item = self.queue.get()
print(item)
self.count = self.count - 1
NP = 3
QL = 4
NI = 5
Q = BoundedQueue(QL)
P = []
for i in range(NP):
t = ProducerThread(Q, NI)
t.setName("Producer-%d" % (i+1))
P.append(t)
C = ConsumerThread(Q, NI*NP)
for t in P:
t.start()
_sleep(0.000001)
C.start()
for t in P:
t.join()
C.join()
if __name__ == '__main__':
_test()
| bsd-3-clause |
vyscond/cocos | test/test_schedule.py | 6 | 1347 | from __future__ import division, print_function, unicode_literals
# This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
#
testinfo = "s, t 2, s, t 4, s, q"
tags = "schedule, position"
import cocos
from cocos.director import director
from cocos.sprite import Sprite
import pyglet
import random
from math import sin, cos
class TestLayer(cocos.layer.Layer):
def __init__(self):
super( TestLayer, self ).__init__()
self.sprite = Sprite('grossini.png')
self.add( self.sprite )
w,h = director.get_window_size()
self.radius = h/3.0
self._elapsed = 0.0
self.schedule( self.change_sprite_pos )
self.change_sprite_pos(0.0)
def change_sprite_pos(self, dt):
self._elapsed += dt
w,h = director.get_window_size()
self.sprite.position = ( w//2 + self.radius * cos(self._elapsed * 1.5),
h//2 + self.radius * sin(self._elapsed * 1.5))
description = """
Grossini sprite will circle around the center of the screen
"""
def main():
print(description)
director.init()
test_layer = TestLayer ()
main_scene = cocos.scene.Scene (test_layer)
director.run (main_scene)
if __name__ == '__main__':
main()
| bsd-3-clause |
pombredanne/MOG | nova/tests/virt/libvirt/test_fakelibvirt.py | 7 | 15519 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2010 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import test
from lxml import etree
import nova.tests.virt.libvirt.fakelibvirt as libvirt
def get_vm_xml(name="testname", uuid=None, source_type='file',
interface_type='bridge'):
uuid_tag = ''
if uuid:
uuid_tag = '<uuid>%s</uuid>' % (uuid,)
return '''<domain type='kvm'>
<name>%(name)s</name>
%(uuid_tag)s
<memory>128000</memory>
<vcpu>1</vcpu>
<os>
<type>hvm</type>
<kernel>/somekernel</kernel>
<cmdline>root=/dev/sda</cmdline>
<boot dev='hd'/>
</os>
<features>
<acpi/>
</features>
<devices>
<disk type='file' device='disk'>
<driver name='qemu' type='qcow2'/>
<source %(source_type)s='/somefile'/>
<target dev='vda' bus='virtio'/>
</disk>
<interface type='%(interface_type)s'>
<mac address='05:26:3e:31:28:1f'/>
<source %(interface_type)s='br100'/>
</interface>
<input type='mouse' bus='ps2'/>
<graphics type='vnc' port='5901' autoport='yes' keymap='en-us'/>
<graphics type='spice' port='5901' autoport='yes' keymap='en-us'/>
</devices>
</domain>''' % {'name': name,
'uuid_tag': uuid_tag,
'source_type': source_type,
'interface_type': interface_type}
class FakeLibvirtTests(test.NoDBTestCase):
def setUp(self):
super(FakeLibvirtTests, self).setUp()
libvirt._reset()
def get_openAuth_curry_func(self, readOnly=False):
def fake_cb(credlist):
return 0
creds = [[libvirt.VIR_CRED_AUTHNAME,
libvirt.VIR_CRED_NOECHOPROMPT],
fake_cb,
None]
flags = 0
if readOnly:
flags = libvirt.VIR_CONNECT_RO
return lambda uri: libvirt.openAuth(uri, creds, flags)
def test_openAuth_accepts_None_uri_by_default(self):
conn_method = self.get_openAuth_curry_func()
conn = conn_method(None)
self.assertNotEqual(conn, None, "Connecting to fake libvirt failed")
def test_openAuth_can_refuse_None_uri(self):
conn_method = self.get_openAuth_curry_func()
libvirt.allow_default_uri_connection = False
self.assertRaises(ValueError, conn_method, None)
def test_openAuth_refuses_invalid_URI(self):
conn_method = self.get_openAuth_curry_func()
self.assertRaises(libvirt.libvirtError, conn_method, 'blah')
def test_getInfo(self):
conn_method = self.get_openAuth_curry_func(readOnly=True)
res = conn_method(None).getInfo()
self.assertIn(res[0], ('i686', 'x86_64'))
self.assertTrue(1024 <= res[1] <= 16384,
"Memory unusually high or low.")
self.assertTrue(1 <= res[2] <= 32,
"Active CPU count unusually high or low.")
self.assertTrue(800 <= res[3] <= 4500,
"CPU speed unusually high or low.")
self.assertTrue(res[2] <= (res[5] * res[6]),
"More active CPUs than num_sockets*cores_per_socket")
def test_createXML_detects_invalid_xml(self):
self._test_XML_func_detects_invalid_xml('createXML', [0])
def test_defineXML_detects_invalid_xml(self):
self._test_XML_func_detects_invalid_xml('defineXML', [])
def _test_XML_func_detects_invalid_xml(self, xmlfunc_name, args):
conn = self.get_openAuth_curry_func()('qemu:///system')
try:
getattr(conn, xmlfunc_name)("this is not valid </xml>", *args)
except libvirt.libvirtError as e:
self.assertEqual(e.get_error_code(), libvirt.VIR_ERR_XML_DETAIL)
self.assertEqual(e.get_error_domain(), libvirt.VIR_FROM_DOMAIN)
return
raise self.failureException("Invalid XML didn't raise libvirtError")
def test_defineXML_defines_domain(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
conn.defineXML(get_vm_xml())
dom = conn.lookupByName('testname')
self.assertEqual('testname', dom.name())
self.assertEqual(0, dom.isActive())
dom.undefine()
self.assertRaises(libvirt.libvirtError,
conn.lookupByName,
'testname')
def test_blockStats(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
conn.createXML(get_vm_xml(), 0)
dom = conn.lookupByName('testname')
blockstats = dom.blockStats('vda')
self.assertEqual(len(blockstats), 5)
for x in blockstats:
self.assertTrue(type(x) in [int, long])
def test_attach_detach(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
conn.createXML(get_vm_xml(), 0)
dom = conn.lookupByName('testname')
xml = '''<disk type='block'>
<driver name='qemu' type='raw'/>
<source dev='/dev/nbd0'/>
<target dev='/dev/vdc' bus='virtio'/>
</disk>'''
self.assertTrue(dom.attachDevice(xml))
self.assertTrue(dom.detachDevice(xml))
def test_info(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
conn.createXML(get_vm_xml(), 0)
dom = conn.lookupByName('testname')
info = dom.info()
self.assertEqual(info[0], libvirt.VIR_DOMAIN_RUNNING)
self.assertEqual(info[1], 128000)
self.assertTrue(info[2] <= 128000)
self.assertEqual(info[3], 1)
self.assertTrue(type(info[4]) in [int, long])
def test_createXML_runs_domain(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
conn.createXML(get_vm_xml(), 0)
dom = conn.lookupByName('testname')
self.assertEqual('testname', dom.name())
self.assertEqual(1, dom.isActive())
dom.destroy()
try:
dom = conn.lookupByName('testname')
except libvirt.libvirtError as e:
self.assertEqual(e.get_error_code(), libvirt.VIR_ERR_NO_DOMAIN)
self.assertEqual(e.get_error_domain(), libvirt.VIR_FROM_QEMU)
return
self.fail("lookupByName succeeded for destroyed non-defined VM")
def test_defineXML_remembers_uuid(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
uuid = 'b21f957d-a72f-4b93-b5a5-45b1161abb02'
conn.defineXML(get_vm_xml(uuid=uuid))
dom = conn.lookupByName('testname')
self.assertEquals(dom.UUIDString(), uuid)
def test_createWithFlags(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
conn.defineXML(get_vm_xml())
dom = conn.lookupByName('testname')
self.assertFalse(dom.isActive(), 'Defined domain was running.')
dom.createWithFlags(0)
self.assertTrue(dom.isActive(),
'Domain wasn\'t running after createWithFlags')
def test_managedSave(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
conn.defineXML(get_vm_xml())
dom = conn.lookupByName('testname')
self.assertFalse(dom.isActive(), 'Defined domain was running.')
dom.createWithFlags(0)
self.assertEquals(dom.hasManagedSaveImage(0), 0)
dom.managedSave(0)
self.assertEquals(dom.hasManagedSaveImage(0), 1)
dom.managedSaveRemove(0)
self.assertEquals(dom.hasManagedSaveImage(0), 0)
def test_listDomainsId_and_lookupById(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
self.assertEquals(conn.listDomainsID(), [])
conn.defineXML(get_vm_xml())
dom = conn.lookupByName('testname')
dom.createWithFlags(0)
self.assertEquals(len(conn.listDomainsID()), 1)
dom_id = conn.listDomainsID()[0]
self.assertEquals(conn.lookupByID(dom_id), dom)
dom_id = conn.listDomainsID()[0]
try:
conn.lookupByID(dom_id + 1)
except libvirt.libvirtError as e:
self.assertEqual(e.get_error_code(), libvirt.VIR_ERR_NO_DOMAIN)
self.assertEqual(e.get_error_domain(), libvirt.VIR_FROM_QEMU)
return
raise self.failureException("Looking up an invalid domain ID didn't "
"raise libvirtError")
def test_define_and_retrieve(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
self.assertEquals(conn.listDomainsID(), [])
conn.defineXML(get_vm_xml())
dom = conn.lookupByName('testname')
xml = dom.XMLDesc(0)
etree.fromstring(xml)
def _test_accepts_source_type(self, source_type):
conn = self.get_openAuth_curry_func()('qemu:///system')
self.assertEquals(conn.listDomainsID(), [])
conn.defineXML(get_vm_xml(source_type=source_type))
dom = conn.lookupByName('testname')
xml = dom.XMLDesc(0)
tree = etree.fromstring(xml)
elem = tree.find('./devices/disk/source')
self.assertEquals(elem.get('file'), '/somefile')
def test_accepts_source_dev(self):
self._test_accepts_source_type('dev')
def test_accepts_source_path(self):
self._test_accepts_source_type('path')
def test_network_type_bridge_sticks(self):
self._test_network_type_sticks('bridge')
def test_network_type_network_sticks(self):
self._test_network_type_sticks('network')
def _test_network_type_sticks(self, network_type):
conn = self.get_openAuth_curry_func()('qemu:///system')
self.assertEquals(conn.listDomainsID(), [])
conn.defineXML(get_vm_xml(interface_type=network_type))
dom = conn.lookupByName('testname')
xml = dom.XMLDesc(0)
tree = etree.fromstring(xml)
elem = tree.find('./devices/interface')
self.assertEquals(elem.get('type'), network_type)
elem = elem.find('./source')
self.assertEquals(elem.get(network_type), 'br100')
def test_getType(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
self.assertEquals(conn.getType(), 'QEMU')
def test_getVersion(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
self.assertTrue(type(conn.getVersion()) is int)
def test_getCapabilities(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
etree.fromstring(conn.getCapabilities())
def test_nwfilter_define_undefine(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
# Will raise an exception if it's not valid XML
xml = '''<filter name='nova-instance-instance-789' chain='root'>
<uuid>946878c6-3ad3-82b2-87f3-c709f3807f58</uuid>
</filter>'''
conn.nwfilterDefineXML(xml)
nwfilter = conn.nwfilterLookupByName('nova-instance-instance-789')
nwfilter.undefine()
try:
conn.nwfilterLookupByName('nova-instance-instance-789320334')
except libvirt.libvirtError as e:
self.assertEqual(e.get_error_code(), libvirt.VIR_ERR_NO_NWFILTER)
self.assertEqual(e.get_error_domain(), libvirt.VIR_FROM_NWFILTER)
return
raise self.failureException("Invalid NWFilter name didn't"
" raise libvirtError")
def test_compareCPU_compatible(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
xml = '''<cpu>
<arch>%s</arch>
<model>%s</model>
<vendor>%s</vendor>
<topology sockets="%d" cores="%d" threads="%d"/>
</cpu>''' % (libvirt.node_arch,
libvirt.node_cpu_model,
libvirt.node_cpu_vendor,
libvirt.node_sockets,
libvirt.node_cores,
libvirt.node_threads)
self.assertEqual(conn.compareCPU(xml, 0),
libvirt.VIR_CPU_COMPARE_IDENTICAL)
def test_compareCPU_incompatible_vendor(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
xml = '''<cpu>
<arch>%s</arch>
<model>%s</model>
<vendor>%s</vendor>
<topology sockets="%d" cores="%d" threads="%d"/>
</cpu>''' % (libvirt.node_arch,
libvirt.node_cpu_model,
"AnotherVendor",
libvirt.node_sockets,
libvirt.node_cores,
libvirt.node_threads)
self.assertEqual(conn.compareCPU(xml, 0),
libvirt.VIR_CPU_COMPARE_INCOMPATIBLE)
def test_compareCPU_incompatible_arch(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
xml = '''<cpu>
<arch>%s</arch>
<model>%s</model>
<vendor>%s</vendor>
<topology sockets="%d" cores="%d" threads="%d"/>
</cpu>''' % ('not-a-valid-arch',
libvirt.node_cpu_model,
libvirt.node_cpu_vendor,
libvirt.node_sockets,
libvirt.node_cores,
libvirt.node_threads)
self.assertEqual(conn.compareCPU(xml, 0),
libvirt.VIR_CPU_COMPARE_INCOMPATIBLE)
def test_compareCPU_incompatible_model(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
xml = '''<cpu>
<arch>%s</arch>
<model>%s</model>
<vendor>%s</vendor>
<topology sockets="%d" cores="%d" threads="%d"/>
</cpu>''' % (libvirt.node_arch,
"AnotherModel",
libvirt.node_cpu_vendor,
libvirt.node_sockets,
libvirt.node_cores,
libvirt.node_threads)
self.assertEqual(conn.compareCPU(xml, 0),
libvirt.VIR_CPU_COMPARE_INCOMPATIBLE)
def test_compareCPU_compatible_unspecified_model(self):
conn = self.get_openAuth_curry_func()('qemu:///system')
xml = '''<cpu>
<arch>%s</arch>
<vendor>%s</vendor>
<topology sockets="%d" cores="%d" threads="%d"/>
</cpu>''' % (libvirt.node_arch,
libvirt.node_cpu_vendor,
libvirt.node_sockets,
libvirt.node_cores,
libvirt.node_threads)
self.assertEqual(conn.compareCPU(xml, 0),
libvirt.VIR_CPU_COMPARE_IDENTICAL)
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.