repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
Gato-X/NotYourData
gameowfication/input.py
1
3257
""" The MIT License (MIT) Copyright (c) 2015 Guillermo Romero Franco (AKA Gato) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import pygame class Input: def __init__(self): self._rot = 0 self._fwd = 0 self._back = 0 self._left = 0 self._right = 0 self._jump = 0 self._shoot = 0 self._actions ={ pygame.K_LEFT:self._updateLeft, pygame.K_a:self._updateLeft, pygame.K_RIGHT:self._updateRight, pygame.K_d:self._updateRight, pygame.K_UP:self._updateFwd, pygame.K_w:self._updateFwd, pygame.K_DOWN:self._updateBack, pygame.K_s:self._updateBack, pygame.K_SPACE:self._updateJump, pygame.K_RETURN:self._updateShoot, pygame.K_KP_ENTER:self._updateShoot } self._should_quit = False def shouldQuit(self): return self._should_quit def wantJump(self): return self._jump def wantShoot(self): return self._shoot def fwdMotion(self): return self._fwd - self._back def rotMotion(self): return self._left - self._right def process(self): self._jump=0 self._shoot=0 events = pygame.event.get() for event in events: if event.type == pygame.KEYDOWN: try: act = self._actions[event.key] act(1) except: pass elif event.type == pygame.KEYUP: try: act = self._actions[event.key] act(0) except: pass elif event.type == pygame.QUIT: self._should_quit = True #--------- def _updateFwd(self, press): self._fwd = 1 if press else 0 def _updateBack(self, press): self._back = 1 if press else 0 def _updateLeft(self, press): self._left = 1 if press else 0 def _updateRight(self, press): self._right = 1 if press else 0 def _updateJump(self, press): if press == 1: self._jump = 1 def _updateShoot(self, press): if press == 1: self._shoot = 1
mit
ndtran/l10n-switzerland
__unported__/l10n_ch_scan_bvr/__openerp__.py
4
2015
# -*- coding: utf-8 -*- ############################################################################## # # Author: Nicolas Bessi, Vincent Renaville # Copyright 2012 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {"name": "Switzerland - Scan ESR/BVR to create invoices", "description": """ Scan ESR/BVR to create supplier invoices ======================================== This module works with C-channel or other OCR scanner. It helps you to create an invoice directly from the ESR/BVR Code. Find the menu entry called "Scan BVR" under Accounting -> Supplier. It open a popup from which you can scan the ESR/BVR number. It'll recognize the needed information and create an invoice for the right supplier. If you have completed the field "Default product supplier invoice" on the concerned supplier, it'll create a line with the proper amount and the given product. It currently supports BVR and BVR+ """, "version": "1.0", "author": "Camptocamp,Odoo Community Association (OCA)", "category": "Generic Modules/Others", "website": "http://www.camptocamp.com", "license": "AGPL-3", "depends": ["l10n_ch", "l10n_ch_payment_slip"], "data": ["wizard/scan_bvr_view.xml", "partner_view.xml", "bank_view.xml"], 'installable': False }
agpl-3.0
sunqb/oa_qian
flask/Lib/site-packages/jinja2/testsuite/__init__.py
404
4641
# -*- coding: utf-8 -*- """ jinja2.testsuite ~~~~~~~~~~~~~~~~ All the unittests of Jinja2. These tests can be executed by either running run-tests.py using multiple Python versions at the same time. :copyright: (c) 2010 by the Jinja Team. :license: BSD, see LICENSE for more details. """ import os import re import sys import unittest from traceback import format_exception from jinja2 import loaders from jinja2._compat import PY2 here = os.path.dirname(os.path.abspath(__file__)) dict_loader = loaders.DictLoader({ 'justdict.html': 'FOO' }) package_loader = loaders.PackageLoader('jinja2.testsuite.res', 'templates') filesystem_loader = loaders.FileSystemLoader(here + '/res/templates') function_loader = loaders.FunctionLoader({'justfunction.html': 'FOO'}.get) choice_loader = loaders.ChoiceLoader([dict_loader, package_loader]) prefix_loader = loaders.PrefixLoader({ 'a': filesystem_loader, 'b': dict_loader }) class JinjaTestCase(unittest.TestCase): ### use only these methods for testing. If you need standard ### unittest method, wrap them! def setup(self): pass def teardown(self): pass def setUp(self): self.setup() def tearDown(self): self.teardown() def assert_equal(self, a, b): return self.assertEqual(a, b) def assert_raises(self, *args, **kwargs): return self.assertRaises(*args, **kwargs) def assert_traceback_matches(self, callback, expected_tb): try: callback() except Exception as e: tb = format_exception(*sys.exc_info()) if re.search(expected_tb.strip(), ''.join(tb)) is None: raise self.fail('Traceback did not match:\n\n%s\nexpected:\n%s' % (''.join(tb), expected_tb)) else: self.fail('Expected exception') def find_all_tests(suite): """Yields all the tests and their names from a given suite.""" suites = [suite] while suites: s = suites.pop() try: suites.extend(s) except TypeError: yield s, '%s.%s.%s' % ( s.__class__.__module__, s.__class__.__name__, s._testMethodName ) class BetterLoader(unittest.TestLoader): """A nicer loader that solves two problems. First of all we are setting up tests from different sources and we're doing this programmatically which breaks the default loading logic so this is required anyways. Secondly this loader has a nicer interpolation for test names than the default one so you can just do ``run-tests.py ViewTestCase`` and it will work. """ def getRootSuite(self): return suite() def loadTestsFromName(self, name, module=None): root = self.getRootSuite() if name == 'suite': return root all_tests = [] for testcase, testname in find_all_tests(root): if testname == name or \ testname.endswith('.' + name) or \ ('.' + name + '.') in testname or \ testname.startswith(name + '.'): all_tests.append(testcase) if not all_tests: raise LookupError('could not find test case for "%s"' % name) if len(all_tests) == 1: return all_tests[0] rv = unittest.TestSuite() for test in all_tests: rv.addTest(test) return rv def suite(): from jinja2.testsuite import ext, filters, tests, core_tags, \ loader, inheritance, imports, lexnparse, security, api, \ regression, debug, utils, bytecode_cache, doctests suite = unittest.TestSuite() suite.addTest(ext.suite()) suite.addTest(filters.suite()) suite.addTest(tests.suite()) suite.addTest(core_tags.suite()) suite.addTest(loader.suite()) suite.addTest(inheritance.suite()) suite.addTest(imports.suite()) suite.addTest(lexnparse.suite()) suite.addTest(security.suite()) suite.addTest(api.suite()) suite.addTest(regression.suite()) suite.addTest(debug.suite()) suite.addTest(utils.suite()) suite.addTest(bytecode_cache.suite()) # doctests will not run on python 3 currently. Too many issues # with that, do not test that on that platform. if PY2: suite.addTest(doctests.suite()) return suite def main(): """Runs the testsuite as command line application.""" try: unittest.main(testLoader=BetterLoader(), defaultTest='suite') except Exception as e: print('Error: %s' % e)
apache-2.0
glennrub/micropython
tests/basics/syntaxerror.py
16
2900
# test syntax errors try: exec except NameError: print("SKIP") raise SystemExit def test_syntax(code): try: exec(code) print("no SyntaxError") except IndentationError: print("IndentationError") except SyntaxError: print("SyntaxError") # non-newline after line-continuation character (lexer error) test_syntax("a \\a\n") # dedent mismatch (lexer error) test_syntax("def f():\n a\n a\n") # unclosed string (lexer error) test_syntax("'abc") # invalid (lexer error) test_syntax("!") test_syntax("$") test_syntax("`") # bad indentation (lexer error) test_syntax(" a\n") # malformed integer literal (parser error) test_syntax("123z") # input doesn't match the grammar (parser error) test_syntax('1 or 2 or') test_syntax('{1:') # can't assign to literals test_syntax("1 = 2") test_syntax("'' = 1") test_syntax("{} = 1") # can't assign to comprehension test_syntax("(i for i in a) = 1") # can't assign to function test_syntax("f() = 1") # can't assign to power test_syntax("f**2 = 1") # can't assign to power of composite test_syntax("f[0]**2 = 1") # can't have *x on RHS test_syntax("x = *x") # can't have multiple *x on LHS test_syntax("*a, *b = c") # can't do augmented assignment to tuple test_syntax("a, b += c") test_syntax("(a, b) += c") # can't do augmented assignment to list test_syntax("[a, b] += c") # non-default argument can't follow default argument test_syntax("def f(a=1, b): pass") # can't delete these things test_syntax("del f()") test_syntax("del f[0]**2") test_syntax("del (a for a in a)") # must be in a "loop" test_syntax("break") test_syntax("continue") # must be in a function test_syntax("yield") test_syntax("nonlocal a") test_syntax("await 1") # error on uPy, warning on CPy #test_syntax("def f():\n a = 1\n global a") # default except must be last test_syntax("try:\n a\nexcept:\n pass\nexcept:\n pass") # LHS of keywords must be id's test_syntax("f(1=2)") # non-keyword after keyword test_syntax("f(a=1, 2)") # doesn't error on uPy but should #test_syntax("f(1, i for i in i)") # all elements of dict/set must be pairs or singles test_syntax("{1:2, 3}") test_syntax("{1, 2:3}") # can't mix non-bytes with bytes when concatenating test_syntax("'abc' b'def'") # can't reuse same name for argument test_syntax("def f(a, a): pass") # nonlocal must exist in outer function/class scope test_syntax("def f():\n def g():\n nonlocal a") # param can't be redefined as global test_syntax('def f(x):\n global x') # param can't be redefined as nonlocal test_syntax('def f(x):\n nonlocal x') # can define variable to be both nonlocal and global test_syntax('def f():\n nonlocal x\n global x') # can't have multiple *'s test_syntax('def f(x, *a, *):\n pass') test_syntax('lambda x, *a, *: 1') # **kw must be last test_syntax('def f(x, *a, **kw, r):\n pass') test_syntax('lambda x, *a, **kw, r: 1')
mit
CCI-MOC/nova
nova/tests/unit/api/openstack/compute/test_extensions.py
27
8620
# Copyright 2013 IBM Corp. # Copyright 2014 NEC Corporation. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg import stevedore import webob.exc from nova.api import openstack from nova.api.openstack import compute from nova.api.openstack.compute import extension_info from nova.api.openstack import extensions from nova import exception from nova import test CONF = cfg.CONF class fake_bad_extension(object): name = "fake_bad_extension" alias = "fake-bad" class fake_stevedore_enabled_extensions(object): def __init__(self, namespace, check_func, invoke_on_load=False, invoke_args=(), invoke_kwds=None): self.extensions = [] def map(self, func, *args, **kwds): pass def __iter__(self): return iter(self.extensions) class fake_loaded_extension_info(object): def __init__(self): self.extensions = {} def register_extension(self, ext): self.extensions[ext] = ext return True def get_extensions(self): return {'core1': None, 'core2': None, 'noncore1': None} class ExtensionLoadingTestCase(test.NoDBTestCase): def _set_v21_core(self, core_extensions): openstack.API_V21_CORE_EXTENSIONS = core_extensions def test_extensions_loaded(self): app = compute.APIRouterV21() self.assertIn('servers', app._loaded_extension_info.extensions) def test_check_bad_extension(self): loaded_ext_info = extension_info.LoadedExtensionInfo() self.assertFalse(loaded_ext_info._check_extension(fake_bad_extension)) def test_extensions_blacklist(self): app = compute.APIRouterV21() self.assertIn('os-hosts', app._loaded_extension_info.extensions) CONF.set_override('extensions_blacklist', ['os-hosts'], 'osapi_v21') app = compute.APIRouterV21() self.assertNotIn('os-hosts', app._loaded_extension_info.extensions) @mock.patch('nova.api.openstack.APIRouterV21._register_resources_list') def test_extensions_inherit(self, mock_register): app = compute.APIRouterV21() self.assertIn('servers', app._loaded_extension_info.extensions) self.assertIn('os-volumes', app._loaded_extension_info.extensions) mock_register.assert_called_with(mock.ANY, mock.ANY) ext_no_inherits = mock_register.call_args_list[0][0][0] ext_has_inherits = mock_register.call_args_list[1][0][0] # os-volumes inherits from servers name_list = [ext.obj.alias for ext in ext_has_inherits] self.assertIn('os-volumes', name_list) name_list = [ext.obj.alias for ext in ext_no_inherits] self.assertIn('servers', name_list) def test_extensions_whitelist_accept(self): # NOTE(maurosr): just to avoid to get an exception raised for not # loading all core api. v21_core = openstack.API_V21_CORE_EXTENSIONS openstack.API_V21_CORE_EXTENSIONS = set(['servers']) self.addCleanup(self._set_v21_core, v21_core) app = compute.APIRouterV21() self.assertIn('os-hosts', app._loaded_extension_info.extensions) CONF.set_override('extensions_whitelist', ['servers', 'os-hosts'], 'osapi_v21') app = compute.APIRouterV21() self.assertIn('os-hosts', app._loaded_extension_info.extensions) def test_extensions_whitelist_block(self): # NOTE(maurosr): just to avoid to get an exception raised for not # loading all core api. v21_core = openstack.API_V21_CORE_EXTENSIONS openstack.API_V21_CORE_EXTENSIONS = set(['servers']) self.addCleanup(self._set_v21_core, v21_core) app = compute.APIRouterV21() self.assertIn('os-hosts', app._loaded_extension_info.extensions) CONF.set_override('extensions_whitelist', ['servers'], 'osapi_v21') app = compute.APIRouterV21() self.assertNotIn('os-hosts', app._loaded_extension_info.extensions) def test_blacklist_overrides_whitelist(self): # NOTE(maurosr): just to avoid to get an exception raised for not # loading all core api. v21_core = openstack.API_V21_CORE_EXTENSIONS openstack.API_V21_CORE_EXTENSIONS = set(['servers']) self.addCleanup(self._set_v21_core, v21_core) app = compute.APIRouterV21() self.assertIn('os-hosts', app._loaded_extension_info.extensions) CONF.set_override('extensions_whitelist', ['servers', 'os-hosts'], 'osapi_v21') CONF.set_override('extensions_blacklist', ['os-hosts'], 'osapi_v21') app = compute.APIRouterV21() self.assertNotIn('os-hosts', app._loaded_extension_info.extensions) self.assertIn('servers', app._loaded_extension_info.extensions) self.assertEqual(1, len(app._loaded_extension_info.extensions)) def test_get_missing_core_extensions(self): v21_core = openstack.API_V21_CORE_EXTENSIONS openstack.API_V21_CORE_EXTENSIONS = set(['core1', 'core2']) self.addCleanup(self._set_v21_core, v21_core) self.assertEqual(0, len( compute.APIRouterV21.get_missing_core_extensions( ['core1', 'core2', 'noncore1']))) missing_core = compute.APIRouterV21.get_missing_core_extensions( ['core1']) self.assertEqual(1, len(missing_core)) self.assertIn('core2', missing_core) missing_core = compute.APIRouterV21.get_missing_core_extensions([]) self.assertEqual(2, len(missing_core)) self.assertIn('core1', missing_core) self.assertIn('core2', missing_core) missing_core = compute.APIRouterV21.get_missing_core_extensions( ['noncore1']) self.assertEqual(2, len(missing_core)) self.assertIn('core1', missing_core) self.assertIn('core2', missing_core) def test_core_extensions_present(self): self.stubs.Set(stevedore.enabled, 'EnabledExtensionManager', fake_stevedore_enabled_extensions) self.stubs.Set(extension_info, 'LoadedExtensionInfo', fake_loaded_extension_info) v21_core = openstack.API_V21_CORE_EXTENSIONS openstack.API_V21_CORE_EXTENSIONS = set(['core1', 'core2']) self.addCleanup(self._set_v21_core, v21_core) # if no core API extensions are missing then an exception will # not be raised when creating an instance of compute.APIRouterV21 compute.APIRouterV21() def test_core_extensions_missing(self): self.stubs.Set(stevedore.enabled, 'EnabledExtensionManager', fake_stevedore_enabled_extensions) self.stubs.Set(extension_info, 'LoadedExtensionInfo', fake_loaded_extension_info) self.assertRaises(exception.CoreAPIMissing, compute.APIRouterV21) def test_extensions_expected_error(self): @extensions.expected_errors(404) def fake_func(): raise webob.exc.HTTPNotFound() self.assertRaises(webob.exc.HTTPNotFound, fake_func) def test_extensions_expected_error_from_list(self): @extensions.expected_errors((404, 403)) def fake_func(): raise webob.exc.HTTPNotFound() self.assertRaises(webob.exc.HTTPNotFound, fake_func) def test_extensions_unexpected_error(self): @extensions.expected_errors(404) def fake_func(): raise webob.exc.HTTPConflict() self.assertRaises(webob.exc.HTTPInternalServerError, fake_func) def test_extensions_unexpected_error_from_list(self): @extensions.expected_errors((404, 413)) def fake_func(): raise webob.exc.HTTPConflict() self.assertRaises(webob.exc.HTTPInternalServerError, fake_func) def test_extensions_unexpected_policy_not_authorized_error(self): @extensions.expected_errors(404) def fake_func(): raise exception.PolicyNotAuthorized(action="foo") self.assertRaises(exception.PolicyNotAuthorized, fake_func)
apache-2.0
michaelBenin/django-shop
shop/util/fields.py
5
1163
# -*- coding: utf-8 -*- from decimal import Decimal from django.db.models.fields import DecimalField class CurrencyField(DecimalField): """ A CurrencyField is simply a subclass of DecimalField with a fixed format: max_digits = 30, decimal_places=10, and defaults to 0.00 """ def __init__(self, **kwargs): defaults = { 'max_digits': 30, 'decimal_places': 2, 'default': Decimal('0.0') } defaults.update(kwargs) super(CurrencyField, self).__init__(**defaults) def south_field_triple(self): # pragma: no cover """ Returns a suitable description of this field for South. This is excluded from coverage reports since it is pretty much a piece of South itself, and does not influence program behavior at all in case we don't use South. """ # We'll just introspect the _actual_ field. from south.modelsinspector import introspector field_class = "django.db.models.fields.DecimalField" args, kwargs = introspector(self) # That's our definition! return (field_class, args, kwargs)
bsd-3-clause
Huskerboy/startbootstrap-freelancer
freelancer_env/Lib/site-packages/pip/_vendor/requests/packages/chardet/big5freq.py
3133
82594
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Big5 frequency table # by Taiwan's Mandarin Promotion Council # <http://www.edu.tw:81/mandr/> # # 128 --> 0.42261 # 256 --> 0.57851 # 512 --> 0.74851 # 1024 --> 0.89384 # 2048 --> 0.97583 # # Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 # Random Distribution Ration = 512/(5401-512)=0.105 # # Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 #Char to FreqOrder table BIG5_TABLE_SIZE = 5376 Big5CharToFreqOrder = ( 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512 #Everything below is of no interest for detection purpose 2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392 2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408 5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424 5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440 5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456 5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472 5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488 5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504 5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520 5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536 5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552 5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568 5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584 5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600 6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616 6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632 6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648 6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664 6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680 6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696 6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712 6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728 6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744 6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760 6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776 6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792 6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808 6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824 6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840 6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856 6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872 6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888 6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904 6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920 6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936 6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952 6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968 6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984 6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000 6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016 6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032 6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048 6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064 6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080 6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096 6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112 6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128 6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144 6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160 6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176 6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192 6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208 6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224 6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240 6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256 3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272 6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288 6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304 3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320 6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336 6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352 6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368 6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384 6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400 6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416 6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432 4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448 6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464 6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480 3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496 6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512 6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528 6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544 6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560 6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576 6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592 6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608 6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624 6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640 6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656 6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672 7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688 7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704 7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720 7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736 7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752 7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768 7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784 7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800 7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816 7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832 7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848 7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864 7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880 7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896 7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912 7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928 7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944 7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960 7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976 7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992 7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008 7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024 7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040 7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056 7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072 7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088 7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104 7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120 7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136 7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152 7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168 7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184 7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200 7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216 7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232 7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248 7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264 7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280 7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296 7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312 7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328 7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344 7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360 7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376 7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392 7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408 7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424 7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440 3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456 7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472 7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488 7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504 7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520 4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536 7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552 7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568 7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584 7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600 7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616 7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632 7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648 7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664 7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680 7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696 7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712 8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728 8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744 8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760 8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776 8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792 8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808 8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824 8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840 8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856 8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872 8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888 8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904 8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920 8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936 8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952 8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968 8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984 8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000 8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016 8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032 8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048 8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064 8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080 8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096 8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112 8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128 8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144 8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160 8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176 8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192 8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208 8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224 8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240 8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256 8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272 8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288 8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304 8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320 8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336 8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352 8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368 8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384 8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400 8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416 8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432 8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448 8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464 8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480 8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496 8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512 8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528 8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544 8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560 8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576 8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592 8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608 8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624 8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640 8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656 8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672 8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688 4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704 8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720 8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736 8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752 8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768 9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784 9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800 9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816 9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832 9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848 9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864 9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880 9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896 9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912 9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928 9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944 9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960 9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976 9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992 9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008 9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024 9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040 9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056 9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072 9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088 9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104 9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120 9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136 9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152 9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168 9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184 9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200 9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216 9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232 9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248 9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264 9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280 9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296 9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312 9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328 9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344 9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360 9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376 3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392 9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408 9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424 9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440 4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456 9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472 9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488 9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504 9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520 9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536 9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552 9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568 9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584 9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600 9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616 9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632 9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648 9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664 9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680 9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696 9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712 9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728 9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744 9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760 9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776 9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792 9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808 9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824 10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840 10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856 10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872 10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888 10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904 10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920 10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936 10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952 10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968 4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984 10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000 10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016 10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032 10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048 10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064 10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080 10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096 10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112 4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128 10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144 10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160 10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176 10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192 10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208 10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224 10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240 10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256 10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272 10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288 10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304 10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320 10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336 10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352 10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368 10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384 10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400 4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416 10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432 10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448 10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464 10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480 10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496 10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512 10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528 10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544 10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560 10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576 10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592 10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608 10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624 10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640 10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656 10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672 10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688 10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704 10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720 10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736 10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752 10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768 10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784 10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800 10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816 10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832 10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848 10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864 10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880 10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896 11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912 11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928 11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944 4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960 11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976 11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992 11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008 11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024 11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040 11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056 11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072 11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088 11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104 11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120 11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136 11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152 11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168 11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184 11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200 11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216 11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232 11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248 11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264 11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280 11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296 11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312 11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328 11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344 11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360 11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376 11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392 11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408 11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424 11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440 11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456 11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472 4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488 11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504 11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520 11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536 11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552 11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568 11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584 11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600 11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616 11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632 11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648 11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664 11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680 11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696 11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712 11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728 11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744 11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760 11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776 11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792 11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808 11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824 11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840 11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856 11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872 11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888 11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904 11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920 11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936 12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952 12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968 12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984 12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000 12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016 12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032 12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048 12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064 12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080 12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096 12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112 12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128 12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144 12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160 12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176 4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192 4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208 4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224 12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240 12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256 12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272 12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288 12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304 12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320 12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336 12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352 12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368 12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384 12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400 12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416 12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432 12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448 12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464 12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480 12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496 12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512 12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528 12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544 12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560 12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576 12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592 12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608 12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624 12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640 12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656 12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672 12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688 12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704 12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720 12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736 12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752 12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768 12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784 12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800 12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816 12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832 12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848 12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864 12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880 12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896 12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912 12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928 12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944 12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960 12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976 4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992 13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008 13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024 13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040 13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056 13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072 13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088 13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104 4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120 13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136 13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152 13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168 13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184 13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200 13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216 13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232 13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248 13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264 13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280 13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296 13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312 13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328 13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344 13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360 5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376 13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392 13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408 13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424 13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440 13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456 13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472 13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488 13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504 13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520 13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536 13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552 13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568 13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584 13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600 13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616 13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632 13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648 13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664 13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680 13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696 13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712 13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728 13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744 13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760 13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776 13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792 13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808 13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824 13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840 13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856 13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872 13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888 13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904 13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920 13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936 13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952 13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968 13968,13969,13970,13971,13972) #13973 # flake8: noqa
mit
radlws/AWS-ElasticBeanstalk-CLI
eb/macosx/python3/lib/aws/requests/packages/charade/sbcsgroupprober.py
8
3358
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .charsetgroupprober import CharSetGroupProber from .sbcharsetprober import SingleByteCharSetProber from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, Latin5CyrillicModel, MacCyrillicModel, Ibm866Model, Ibm855Model) from .langgreekmodel import Latin7GreekModel, Win1253GreekModel from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel from .langthaimodel import TIS620ThaiModel from .langhebrewmodel import Win1255HebrewModel from .hebrewprober import HebrewProber class SBCSGroupProber(CharSetGroupProber): def __init__(self): CharSetGroupProber.__init__(self) self._mProbers = [ SingleByteCharSetProber(Win1251CyrillicModel), SingleByteCharSetProber(Koi8rModel), SingleByteCharSetProber(Latin5CyrillicModel), SingleByteCharSetProber(MacCyrillicModel), SingleByteCharSetProber(Ibm866Model), SingleByteCharSetProber(Ibm855Model), SingleByteCharSetProber(Latin7GreekModel), SingleByteCharSetProber(Win1253GreekModel), SingleByteCharSetProber(Latin5BulgarianModel), SingleByteCharSetProber(Win1251BulgarianModel), SingleByteCharSetProber(Latin2HungarianModel), SingleByteCharSetProber(Win1250HungarianModel), SingleByteCharSetProber(TIS620ThaiModel), ] hebrewProber = HebrewProber() logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, False, hebrewProber) visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True, hebrewProber) hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber) self._mProbers.extend([hebrewProber, logicalHebrewProber, visualHebrewProber]) self.reset()
apache-2.0
yfdyh000/kuma
kuma/search/renderers.py
5
1930
from rest_framework.renderers import TemplateHTMLRenderer from .filters import get_filters from .models import Index from .store import ref_from_request class ExtendedTemplateHTMLRenderer(TemplateHTMLRenderer): template_name = 'search/results.html' exception_template_names = ['search/down.html'] def resolve_context(self, data, request, response): """ Adds some more data to the template context. """ data['selected_filters'] = get_filters(request.query_params.getlist) data['search_ref'] = ref_from_request(request) data['index'] = Index.objects.get_current() return super(ExtendedTemplateHTMLRenderer, self).resolve_context(data, request, response) def render(self, data, accepted_media_type=None, renderer_context=None): """ Renders data to HTML, using Django's standard template rendering. The template name is determined by (in order of preference): 1. An explicit .template_name set on the response. 2. An explicit .template_name set on this class. 3. The return result of calling view.get_template_names(). FIXME: This is a copy of the render method from upstream to support passing the request to the render method of the template backend. This should be removed/fixed when moving to DRF 3.x. """ renderer_context = renderer_context or {} view = renderer_context['view'] request = renderer_context['request'] response = renderer_context['response'] if response.exception: template = self.get_exception_template(response) else: template_names = self.get_template_names(response, view) template = self.resolve_template(template_names) context = self.resolve_context(data, request, response) return template.render(context, request=request)
mpl-2.0
zoulianmp/moonstone-pyqt
moonstone/bloodstone/importer/serieexporter.py
2
10128
# -*- coding: utf-8 -*- # # Moonstone is platform for processing of medical images (DICOM). # Copyright (C) 2009-2011 by Neppo Tecnologia da Informação LTDA # and Aevum Softwares LTDA # # This file is part of Moonstone. # # Moonstone is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import sys import tempfile from PySide import QtCore from ..utils.compact import Zipper from ..utils import compact from ...gui.qt.widget.genericload import GenericProgressBar from ..utils.data import persist_yaml_file, remove_file from ..utils import multiprocessutils from ...utils.strUtils import hashStr from multiprocessing import Process, Value, Pipe, Lock class SerieExporter(QtCore.QObject): def __init__(self, parent, outputFile, series, dicomImages): super(SerieExporter, self).__init__(parent) self._parentWindow = parent self._outputFile = outputFile self._series = self.parseSeries(series) self._dicomImages = dicomImages self._zipper = Zipper() self._messageCount = Value("i", 0) self._statusShared = Value("c", "r") self._lock = Lock() self._text = "Exporting" self._progress = 0 self._loaderBar = GenericProgressBar(self._parentWindow, stopButton = False, cancelButton = True, cancelButtonAction= self.cancel, progressFunction=self.getProgress) self._canceled = False QtCore.QTimer.singleShot(500, self.updateValue) self.status = "running" def parseSeries(self, series): result = [] for serie in series: serieDb = {} serieDb["uid"] = serie.uid serieDb["dicomImages"] = serie.dicomImages serieDb["description"] = serie.description serieDb["thickness"] = serie.thickness serieDb["size"] = serie.size serieDb["zSpacing"] = serie.zSpacing study = serie.study serieDb["study"] = study.uid studyDb = {} studyDb["uid"] = study.uid studyDb["modality"] = study.modality studyDb["description"] = study.description studyDb["institution"] = study.institution patient = study.patient studyDb["patient"] = patient.uid patientDb = {} patientDb["uid"] = patient.uid patientDb["name"] = patient.name patientDb["birthdate"] = str(patient.birthdate) patientDb["sex"] = patient.sex patientDb["directory"] = patient.directory serieDb = {"serie": serieDb, "study" : studyDb, "patient" : patientDb} result.append(serieDb) return result def updateValue(self): run = True msg = multiprocessutils.readMessage(self._parentConn, self._messageCount, self._lock) if msg: (txt, st) = msg self._text = txt self.status = st self._loaderBar.updateValue() if self.status != "running": run = False if run and self._statusShared.value == "r" and self.process.is_alive(): QtCore.QTimer.singleShot(500, self.updateValue) else: self._loaderBar.stopProcess() def start(self): # DBUtils().createConnection() # print self._series[0].study # print self._series[0].study.patient p_conn, c_conn = Pipe() self._parentConn = p_conn self.process = Process(target=makeExport, args = (c_conn, self._messageCount, self._lock, self._statusShared, self._outputFile, self._series, self._dicomImages, Zipper.key)) self.process.start() QtCore.QTimer.singleShot(500, self.updateValue) def getProgress(self): return self._progress, self._text def cancel(self): self._canceled = True self._loaderBar.stopProcess() self._zipper.cancel() self._text = "Canceled" def makeExport(conn, messageCount, lock, statusShared, outputFile, series, dicomImages, key): ExporterProcess(conn, messageCount, lock, statusShared, outputFile, series, dicomImages, key) class ExporterProcess(): def __init__(self, conn, messageCount, lock, sharedStatus, outputFile, series, dicomImages, key): self._zipper = Zipper() self._outputFile = outputFile self._series = series self._dicomImages = dicomImages self._conn = conn self._messageCount = messageCount self._lock = lock self._sharedStatus = sharedStatus self._text = "Exporting" self._status = "running" self._key = key self.export() def updateStatus(self): multiprocessutils.sendMessage(self._conn, self._messageCount, self._lock, (self._text, self._status)) def export(self): try: seriesDicom = [] self._zipper.start(self._outputFile) seriesInfo = [] for serieDict in self._series: if self._sharedStatus.value == "r": serie = serieDict["serie"] self._text = "Exporting {0}".format(serie["description"]) self.updateStatus() patient = serieDict["patient"] serieDicomFolder = "{0}{1}{2}".format(patient["directory"], os.path.sep, hashStr(serie["uid"])) serieFolder = "{0}{1}".format(serieDicomFolder, hashStr(serie["description"])) serieDbPath = "{0}{1}{2}".format(serieFolder,os.path.sep, "export.db") persist_yaml_file(serieDbPath, serieDict) self._zipper.recursive_zip(serieFolder) remove_file(serieDbPath) if seriesDicom.count(serie["uid"]) == 0: if self._dicomImages: self._zipper.recursive_zip(serieDicomFolder) else: folderImagePath = "{0}{1}{2}".format(serieDicomFolder, os.path.sep, "images") files = os.listdir(folderImagePath) files = sorted(files) file = files[len(files)/2] filePath = "{0}{1}{2}".format(folderImagePath, os.path.sep, file) root = "{0}{1}{2}{3}{4}".format(hashStr(serie["uid"]), os.path.sep, "images", os.path.sep, file) self._zipper.recursive_zip(filePath, root) seriesDicom.append(serie["uid"]) serieDicomOutFolder = hashStr(serie["uid"]) serieOutFolder = "{0}{1}".format(serieDicomOutFolder, hashStr(serie["description"])) seriesInfo.append({"serieFolder" :serieOutFolder, "serieDicomFolder" : serieDicomOutFolder}) if self._sharedStatus.value == "r": seriesInfoFile = tempfile.NamedTemporaryFile(delete=False) persist_yaml_file(seriesInfoFile.name, seriesInfo) seriesInfoFilename = compact.encryptFile(seriesInfoFile.name, self._key) self._zipper.recursive_zip(seriesInfoFilename, "seriesInfo") self._zipper.finish() finally: self._status = "finish" self._sharedStatus.value = "f" self.updateStatus() def exportSerie(serie, outputFile): seriesDicom = [] zipper = Zipper() zipper.start(outputFile) seriesInfo = [] study = serie.study patient = study.patient serieDicomFolder = "{0}{1}{2}".format(patient.directory, os.path.sep, hashStr(serie.uid)) serieFolder = "{0}{1}".format(serieDicomFolder, hashStr(serie.description)) serieDbPath = "{0}{1}{2}".format(serieFolder,os.path.sep, "export.db") serieDb = {} serieDb["uid"] = serie.uid serieDb["dicomImages"] = serie.dicomImages serieDb["description"] = serie.description serieDb["thickness"] = serie.thickness serieDb["size"] = serie.size serieDb["zSpacing"] = serie.zSpacing serieDb["study"] = study.uid studyDb = {} studyDb["uid"] = study.uid studyDb["modality"] = study.modality studyDb["description"] = study.description studyDb["institution"] = study.institution studyDb["patient"] = patient.uid patientDb = {} patientDb["uid"] = patient.uid patientDb["name"] = patient.name patientDb["birthdate"] = str(patient.birthdate) patientDb["sex"] = patient.sex patientDb["directory"] = patient.directory serieDict = {"serie": serieDb, "study" : studyDb, "patient" : patientDb} persist_yaml_file(serieDbPath, serieDict) zipper.recursive_zip(serieFolder) remove_file(serieDbPath) if seriesDicom.count(serie.uid) == 0: zipper.recursive_zip(serieDicomFolder) seriesDicom.append(serie.uid) serieDicomOutFolder = hashStr(serie.uid) serieOutFolder = "{0}{1}".format(serieDicomOutFolder, hashStr(serie.description)) seriesInfo.append({"serieFolder" :serieOutFolder, "serieDicomFolder" : serieDicomOutFolder}) seriesInfoFile = tempfile.NamedTemporaryFile(delete=False) persist_yaml_file(seriesInfoFile.name, seriesInfo) seriesInfoFilename = compact.encryptFile(seriesInfoFile.name, zipper.key) zipper.recursive_zip(seriesInfoFilename, "seriesInfo") zipper.finish()
lgpl-3.0
rampantpixels/resource_lib
build/ninja/generator.py
9
6489
#!/usr/bin/env python """Ninja build generator""" import argparse import os import pipes import sys import platform import toolchain import syntax class Generator(object): def __init__(self, project, includepaths = [], dependlibs = [], libpaths = [], variables = None): parser = argparse.ArgumentParser(description = 'Ninja build generator') parser.add_argument('-t', '--target', help = 'Target platform', choices = platform.supported_platforms()) parser.add_argument('--host', help = 'Host platform', choices = platform.supported_platforms()) parser.add_argument('--toolchain', help = 'Toolchain to use', choices = toolchain.supported_toolchains()) parser.add_argument('-c', '--config', action = 'append', help = 'Build configuration', choices = ['debug', 'release', 'profile', 'deploy'], default = []) parser.add_argument('-a', '--arch', action = 'append', help = 'Add architecture', choices = toolchain.supported_architectures(), default = []) parser.add_argument('-i', '--includepath', action = 'append', help = 'Add include path', default = []) parser.add_argument('--monolithic', action='store_true', help = 'Build monolithic test suite', default = False) parser.add_argument('--coverage', action='store_true', help = 'Build with code coverage', default = False) parser.add_argument('--subninja', action='store', help = 'Build as subproject (exclude rules and pools) with the given subpath', default = '') parser.add_argument('--buildprefs', action='store', help = 'Read the given build preferences file', default = '') parser.add_argument('--updatebuild', action='store_true', help = 'Update submodule build scripts', default = '') options = parser.parse_args() self.project = project self.target = platform.Platform(options.target) self.host = platform.Platform(options.host) self.subninja = options.subninja archs = options.arch configs = options.config if includepaths is None: includepaths = [] if not options.includepath is None: includepaths += options.includepath buildfile = open('build.ninja', 'w') self.writer = syntax.Writer(buildfile) self.writer.variable('ninja_required_version', '1.3') self.writer.newline() self.writer.comment('configure.py arguments') self.writer.variable('configure_args', ' '.join(sys.argv[1:])) self.writer.newline() self.writer.comment('configure options') self.writer.variable('configure_target', self.target.platform) self.writer.variable('configure_host', self.host.platform) env_keys = set(['CC', 'AR', 'LINK', 'CFLAGS', 'ARFLAGS', 'LINKFLAGS']) configure_env = dict((key, os.environ[key]) for key in os.environ if key in env_keys) if configure_env: config_str = ' '.join([key + '=' + pipes.quote(configure_env[key]) for key in configure_env]) self.writer.variable('configure_env', config_str + '$ ') if variables is None: variables = {} if not isinstance(variables, dict): variables = dict(variables) if options.monolithic: variables['monolithic'] = True if options.coverage: variables['coverage'] = True if self.subninja != '': variables['internal_deps'] = True self.toolchain = toolchain.make_toolchain(self.host, self.target, options.toolchain) self.toolchain.buildprefs = options.buildprefs self.toolchain.initialize(project, archs, configs, includepaths, dependlibs, libpaths, variables, self.subninja) self.writer.variable('configure_toolchain', self.toolchain.name()) self.writer.variable('configure_archs', archs) self.writer.variable('configure_configs', configs) self.writer.newline() self.toolchain.write_variables(self.writer) if self.subninja == '': self.toolchain.write_rules(self.writer) def target(self): return self.target def host(self): return self.host def toolchain(self): return self.toolchain def writer(self): return self.writer def is_subninja(self): return self.subninja != '' def lib(self, module, sources, libname = None, basepath = None, configs = None, includepaths = None, variables = None): return self.toolchain.lib(self.writer, module, sources, libname, basepath, configs, includepaths, variables) def sharedlib(self, module, sources, libname = None, basepath = None, configs = None, includepaths = None, libpaths = None, implicit_deps = None, dependlibs = None, libs = None, frameworks = None, variables = None): return self.toolchain.sharedlib(self.writer, module, sources, libname, basepath, configs, includepaths, libpaths, implicit_deps, dependlibs, libs, frameworks, variables) def bin(self, module, sources, binname, basepath = None, configs = None, includepaths = None, libpaths = None, implicit_deps = None, dependlibs = None, libs = None, frameworks = None, variables = None): return self.toolchain.bin(self.writer, module, sources, binname, basepath, configs, includepaths, libpaths, implicit_deps, dependlibs, libs, frameworks, variables) def app(self, module, sources, binname, basepath = None, configs = None, includepaths = None, libpaths = None, implicit_deps = None, dependlibs = None, libs = None, frameworks = None, variables = None, resources = None): return self.toolchain.app(self.writer, module, sources, binname, basepath, configs, includepaths, libpaths, implicit_deps, dependlibs, libs, frameworks, variables, resources) def test_includepaths(self): #TODO: This is ugly if self.project == "foundation": return ['test'] foundation_path = os.path.join('..', 'foundation_lib') if not os.path.isfile(os.path.join(foundation_path, 'foundation', 'foundation.h')): foundation_path = os.path.join('..', 'foundation') return ['test', os.path.join(foundation_path, 'test')] def test_monolithic(self): return self.toolchain.is_monolithic()
unlicense
miaecle/deepchem
deepchem/utils/test/test_evaluate.py
1
11874
"""Unit tests for evaluators.""" import deepchem as dc import numpy as np import unittest import sklearn from deepchem.utils.evaluate import Evaluator from deepchem.utils.evaluate import GeneratorEvaluator def test_multiclass_threshold_predictions(): """Check prediction thresholding works correctly.""" # Construct a random class probability matrix y = np.random.rand(10, 5) y_sums = np.sum(y, axis=1) y = y / y_sums[:, None] y_out = dc.metrics.threshold_predictions(y) assert y_out.shape == (10,) assert np.allclose(y_out, np.argmax(y, axis=1)) def test_binary_threshold_predictions(): """Check prediction thresholding works correctly.""" # Construct a random class probability matrix y = np.random.rand(10, 2) y_sums = np.sum(y, axis=1) y = y / y_sums[:, None] y_out = dc.metrics.threshold_predictions(y, threshold=0.3) assert y_out.shape == (10,) assert np.allclose(y_out, np.where(y[:, 1] >= 0.3, np.ones(10), np.zeros(10))) def test_evaluator_dc_metric(): """Test an evaluator on a dataset.""" X = np.random.rand(10, 5) y = np.random.rand(10, 1) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(1, 5) evaluator = Evaluator(model, dataset, []) metric = dc.metrics.Metric(dc.metrics.mae_score) multitask_scores = evaluator.compute_model_performance(metric) assert isinstance(multitask_scores, dict) assert len(multitask_scores) == 1 assert multitask_scores['mae_score'] > 0 def test_multiclass_classification_singletask(): """Test multiclass classification evaluation.""" X = np.random.rand(100, 5) y = np.random.randint(5, size=(100,)) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskClassifier(1, 5, n_classes=5) evaluator = Evaluator(model, dataset, []) multitask_scores = evaluator.compute_model_performance( dc.metrics.roc_auc_score, n_classes=5) assert len(multitask_scores) == 1 assert multitask_scores["metric-1"] >= 0 def test_sklearn_multiclass_classification_singletask(): """Test multiclass classification evaluation.""" X = np.random.rand(100, 5) y = np.random.randint(5, size=(100,)) dataset = dc.data.NumpyDataset(X, y) rf = sklearn.ensemble.RandomForestClassifier(50) model = dc.models.SklearnModel(rf) model.fit(dataset) evaluator = Evaluator(model, dataset, []) multitask_scores = evaluator.compute_model_performance( dc.metrics.roc_auc_score, n_classes=5) assert len(multitask_scores) == 1 assert multitask_scores["metric-1"] >= 0 def test_evaluate_multiclass_classification_singletask(): """Test multiclass classification evaluation.""" X = np.random.rand(100, 5) y = np.random.randint(5, size=(100,)) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskClassifier(1, 5, n_classes=5) multitask_scores = model.evaluate( dataset, dc.metrics.roc_auc_score, n_classes=5) assert len(multitask_scores) == 1 assert multitask_scores["metric-1"] >= 0 def test_multitask_evaluator(): """Test evaluation of a multitask metric.""" n_tasks = 2 X = np.random.rand(10, 5) y = np.random.rand(10, 2, 1) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(2, 5) evaluator = Evaluator(model, dataset, []) metric = dc.metrics.Metric(dc.metrics.mae_score) multitask_scores, all_task_scores = evaluator.compute_model_performance( metric, per_task_metrics=True) assert isinstance(multitask_scores, dict) assert len(multitask_scores) == 1 assert multitask_scores['mae_score'] > 0 assert isinstance(all_task_scores, dict) assert len(multitask_scores) == 1 def test_model_evaluate_dc_metric(): """Test a model evaluate on a dataset.""" X = np.random.rand(10, 5) y = np.random.rand(10, 1) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(1, 5) metric = dc.metrics.Metric(dc.metrics.mae_score) multitask_scores = model.evaluate(dataset, metric, []) assert isinstance(multitask_scores, dict) assert len(multitask_scores) == 1 assert multitask_scores['mae_score'] > 0 def test_multitask_model_evaluate_sklearn(): """Test evaluation of a multitask metric.""" n_tasks = 2 X = np.random.rand(10, 5) y = np.random.rand(10, 2) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(2, 5) evaluator = Evaluator(model, dataset, []) multitask_scores, all_task_scores = evaluator.compute_model_performance( dc.metrics.mean_absolute_error, per_task_metrics=True) assert isinstance(multitask_scores, dict) assert len(multitask_scores) == 1 assert multitask_scores['metric-1'] > 0 assert isinstance(all_task_scores, dict) assert len(multitask_scores) == 1 def test_multitask_model_evaluate(): """Test evaluation of a multitask metric.""" n_tasks = 2 X = np.random.rand(10, 5) y = np.random.rand(10, 2) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(2, 5) multitask_scores, all_task_scores = model.evaluate( dataset, dc.metrics.mean_absolute_error, per_task_metrics=True) assert isinstance(multitask_scores, dict) assert len(multitask_scores) == 1 assert multitask_scores["metric-1"] > 0 assert isinstance(all_task_scores, dict) def test_evaluator_dc_multi_metric(): """Test an evaluator on a dataset.""" X = np.random.rand(10, 5) y = np.random.rand(10, 1) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(1, 5) evaluator = Evaluator(model, dataset, []) metric1 = dc.metrics.Metric(dc.metrics.mae_score, n_tasks=2) metric2 = dc.metrics.Metric(dc.metrics.r2_score, n_tasks=2) multitask_scores = evaluator.compute_model_performance([metric1, metric2]) assert isinstance(multitask_scores, dict) assert len(multitask_scores) == 2 assert multitask_scores['mae_score'] > 0 assert "r2_score" in multitask_scores def test_model_evaluate_dc_multi_metric(): """Test an evaluator on a dataset.""" X = np.random.rand(10, 5) y = np.random.rand(10, 1) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(1, 5) metric1 = dc.metrics.Metric(dc.metrics.mae_score) metric2 = dc.metrics.Metric(dc.metrics.r2_score) multitask_scores = model.evaluate(dataset, [metric1, metric2]) assert isinstance(multitask_scores, dict) assert len(multitask_scores) == 2 assert multitask_scores['mae_score'] > 0 assert "r2_score" in multitask_scores def test_generator_evaluator_dc_metric_multitask_single_point(): """Test generator evaluator on a generator.""" X = np.random.rand(10, 5) y = np.random.rand(10, 1) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(1, 5) generator = model.default_generator(dataset, pad_batches=False) evaluator = GeneratorEvaluator(model, generator, []) metric = dc.metrics.Metric(dc.metrics.mae_score) multitask_scores = evaluator.compute_model_performance(metric) assert isinstance(multitask_scores, dict) assert len(multitask_scores) == 1 assert multitask_scores['mae_score'] > 0 assert len(multitask_scores) == 1 def test_evaluator_sklearn_metric(): """Test an evaluator on a dataset.""" X = np.random.rand(10, 5) y = np.random.rand(10, 1) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(1, 5) evaluator = Evaluator(model, dataset, []) multitask_scores = evaluator.compute_model_performance( dc.metrics.mean_absolute_error) assert isinstance(multitask_scores, dict) assert len(multitask_scores) == 1 # Note that since no name as provided, metrics are index by order # given. assert multitask_scores['metric-1'] > 0 def test_generator_evaluator_dc_metric_multitask(): """Test generator evaluator on a generator.""" X = np.random.rand(10, 5) y = np.random.rand(10, 1) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(1, 5) generator = model.default_generator(dataset, pad_batches=False) evaluator = GeneratorEvaluator(model, generator, []) metric = dc.metrics.Metric(dc.metrics.mae_score) multitask_scores = evaluator.compute_model_performance(metric) assert isinstance(multitask_scores, dict) assert len(multitask_scores) == 1 assert multitask_scores['mae_score'] > 0 def test_model_evaluate_sklearn_metric(): """Test a model evaluate on a dataset.""" X = np.random.rand(10, 5) y = np.random.rand(10, 1) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(1, 5) multitask_scores = model.evaluate(dataset, dc.metrics.mean_absolute_error) assert isinstance(multitask_scores, dict) assert len(multitask_scores) == 1 # Note that since no name as provided, metrics are index by order # given. assert multitask_scores['metric-1'] > 0 def test_evaluator_sklearn_multi_metric(): """Test an evaluator on a dataset.""" X = np.random.rand(10, 5) y = np.random.rand(10, 1) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(1, 5) evaluator = Evaluator(model, dataset, []) multitask_scores = evaluator.compute_model_performance( [dc.metrics.mean_absolute_error, dc.metrics.r2_score]) assert isinstance(multitask_scores, dict) assert len(multitask_scores.keys()) == 2 # Note that since no name as provided, metrics are index by order # given. assert multitask_scores['metric-1'] > 0 assert "metric-2" in multitask_scores def test_model_evaluate_sklearn_multi_metric(): """Test an evaluator on a dataset.""" X = np.random.rand(10, 5) y = np.random.rand(10, 1) dataset = dc.data.NumpyDataset(X, y) model = dc.models.MultitaskRegressor(1, 5) multitask_scores = model.evaluate( dataset, [dc.metrics.mean_absolute_error, dc.metrics.r2_score]) assert isinstance(multitask_scores, dict) assert len(multitask_scores.keys()) == 2 # Note that since no name as provided, metrics are index by order # given. assert multitask_scores['metric-1'] > 0 assert "metric-2" in multitask_scores def test_gc_binary_classification(): """Test multiclass classification evaluation.""" smiles = ["C", "CC"] featurizer = dc.feat.ConvMolFeaturizer() X = featurizer.featurize(smiles) y = np.random.randint(2, size=(len(smiles),)) dataset = dc.data.NumpyDataset(X, y) model = dc.models.GraphConvModel(1, mode="classification") # TODO: Fix this case with correct thresholding evaluator = Evaluator(model, dataset, []) multitask_scores = evaluator.compute_model_performance( dc.metrics.accuracy_score, n_classes=2) assert len(multitask_scores) == 1 assert multitask_scores["metric-1"] >= 0 def test_gc_binary_kappa_classification(): """Test multiclass classification evaluation.""" np.random.seed(1234) smiles = ["C", "CC", "CO", "CCC", "CCCC"] featurizer = dc.feat.ConvMolFeaturizer() X = featurizer.featurize(smiles) y = np.random.randint(2, size=(len(smiles),)) dataset = dc.data.NumpyDataset(X, y) model = dc.models.GraphConvModel(1, mode="classification") # TODO: Fix this case with correct thresholding evaluator = Evaluator(model, dataset, []) multitask_scores = evaluator.compute_model_performance( dc.metrics.kappa_score, n_classes=2) assert len(multitask_scores) == 1 assert multitask_scores["metric-1"] <= 1 assert multitask_scores["metric-1"] >= -1 def test_gc_multiclass_classification(): """Test multiclass classification evaluation.""" np.random.seed(1234) smiles = ["C", "CC"] featurizer = dc.feat.ConvMolFeaturizer() X = featurizer.featurize(smiles) y = np.random.randint(5, size=(len(smiles),)) dataset = dc.data.NumpyDataset(X, y) model = dc.models.GraphConvModel(1, mode="classification", n_classes=5) evaluator = Evaluator(model, dataset, []) multitask_scores = evaluator.compute_model_performance( dc.metrics.accuracy_score, n_classes=5) assert len(multitask_scores) == 1 assert multitask_scores["metric-1"] >= 0
mit
ride90/Booktype
lib/booki/editor/south_migrations/0006_auto__add_publishwizzard.py
12
15684
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'PublishWizzard' db.create_table('editor_publishwizzard', ( ('wizz_options', self.gf('django.db.models.fields.TextField')(default='')), ('book', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['editor.Book'], null=True)), ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('wizz_type', self.gf('django.db.models.fields.CharField')(max_length=20)), ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])), )) db.send_create_signal('editor', ['PublishWizzard']) def backwards(self, orm): # Deleting model 'PublishWizzard' db.delete_table('editor_publishwizzard') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'editor.attachment': { 'Meta': {'object_name': 'Attachment'}, 'attachment': ('django.db.models.fields.files.FileField', [], {'max_length': '2500'}), 'book': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Book']"}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.BookStatus']"}), 'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.BookVersion']"}) }, 'editor.attributionexclude': { 'Meta': {'object_name': 'AttributionExclude'}, 'book': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Book']", 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'editor.book': { 'Meta': {'object_name': 'Book'}, 'cover': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'default': "''"}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.BookiGroup']", 'null': 'True'}), 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Language']", 'null': 'True'}), 'license': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.License']", 'null': 'True'}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'permission': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'published': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'status': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'status'", 'null': 'True', 'to': "orm['editor.BookStatus']"}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '2500'}), 'url_title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '2500'}), 'version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'version'", 'null': 'True', 'to': "orm['editor.BookVersion']"}) }, 'editor.bookhistory': { 'Meta': {'object_name': 'BookHistory'}, 'args': ('django.db.models.fields.CharField', [], {'max_length': '2500'}), 'book': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Book']"}), 'chapter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Chapter']", 'null': 'True'}), 'chapter_history': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.ChapterHistory']", 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'kind': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.BookVersion']", 'null': 'True'}) }, 'editor.bookigroup': { 'Meta': {'object_name': 'BookiGroup'}, 'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'description': ('django.db.models.fields.TextField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'members': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'members'", 'blank': 'True', 'to': "orm['auth.User']"}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '300'}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'url_name': ('django.db.models.fields.CharField', [], {'max_length': '300'}) }, 'editor.bookipermission': { 'Meta': {'object_name': 'BookiPermission'}, 'book': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Book']", 'null': 'True'}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.BookiGroup']", 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'permission': ('django.db.models.fields.SmallIntegerField', [], {}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'editor.booknotes': { 'Meta': {'object_name': 'BookNotes'}, 'book': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Book']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'notes': ('django.db.models.fields.TextField', [], {}) }, 'editor.bookstatus': { 'Meta': {'object_name': 'BookStatus'}, 'book': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Book']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}), 'weight': ('django.db.models.fields.SmallIntegerField', [], {}) }, 'editor.booktoc': { 'Meta': {'object_name': 'BookToc'}, 'book': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Book']"}), 'chapter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Chapter']", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '2500', 'blank': 'True'}), 'typeof': ('django.db.models.fields.SmallIntegerField', [], {}), 'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.BookVersion']"}), 'weight': ('django.db.models.fields.IntegerField', [], {}) }, 'editor.bookversion': { 'Meta': {'object_name': 'BookVersion'}, 'book': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Book']"}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'major': ('django.db.models.fields.IntegerField', [], {}), 'minor': ('django.db.models.fields.IntegerField', [], {}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}) }, 'editor.chapter': { 'Meta': {'object_name': 'Chapter'}, 'book': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Book']"}), 'content': ('django.db.models.fields.TextField', [], {}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), 'revision': ('django.db.models.fields.IntegerField', [], {'default': '1'}), 'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.BookStatus']"}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '2500'}), 'url_title': ('django.db.models.fields.CharField', [], {'max_length': '2500'}), 'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.BookVersion']"}) }, 'editor.chapterhistory': { 'Meta': {'object_name': 'ChapterHistory'}, 'chapter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Chapter']"}), 'comment': ('django.db.models.fields.CharField', [], {'max_length': '2500', 'blank': 'True'}), 'content': ('django.db.models.fields.TextField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'revision': ('django.db.models.fields.IntegerField', [], {'default': '1'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'editor.info': { 'Meta': {'object_name': 'Info'}, 'book': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Book']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'kind': ('django.db.models.fields.SmallIntegerField', [], {}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '2500', 'db_index': 'True'}), 'value_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'value_string': ('django.db.models.fields.CharField', [], {'max_length': '2500', 'null': 'True'}), 'value_text': ('django.db.models.fields.TextField', [], {'null': 'True'}) }, 'editor.language': { 'Meta': {'object_name': 'Language'}, 'abbrevation': ('django.db.models.fields.CharField', [], {'max_length': '10'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'editor.license': { 'Meta': {'object_name': 'License'}, 'abbrevation': ('django.db.models.fields.CharField', [], {'max_length': '30'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'editor.publishwizzard': { 'Meta': {'object_name': 'PublishWizzard'}, 'book': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['editor.Book']", 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'wizz_options': ('django.db.models.fields.TextField', [], {'default': "''"}), 'wizz_type': ('django.db.models.fields.CharField', [], {'max_length': '20'}) } } complete_apps = ['editor']
agpl-3.0
chaluemwut/fbserver
venv/lib/python2.7/site-packages/scipy/signal/tests/test_savitzky_golay.py
18
9463
from __future__ import division, print_function, absolute_import import numpy as np from numpy.testing import (run_module_suite, assert_allclose, assert_equal, assert_almost_equal, assert_array_equal) from scipy.ndimage import convolve1d from scipy.signal import savgol_coeffs, savgol_filter from scipy.signal._savitzky_golay import _polyder def check_polyder(p, m, expected): dp = _polyder(p, m) assert_array_equal(dp, expected) def test_polyder(): cases = [ ([5], 0, [5]), ([5], 1, [0]), ([3, 2, 1], 0, [3, 2, 1]), ([3, 2, 1], 1, [6, 2]), ([3, 2, 1], 2, [6]), ([3, 2, 1], 3, [0]), ([[3, 2, 1], [5, 6, 7]], 0, [[3, 2, 1], [5, 6, 7]]), ([[3, 2, 1], [5, 6, 7]], 1, [[6, 2], [10, 6]]), ([[3, 2, 1], [5, 6, 7]], 2, [[6], [10]]), ([[3, 2, 1], [5, 6, 7]], 3, [[0], [0]]), ] for p, m, expected in cases: yield check_polyder, np.array(p).T, m, np.array(expected).T #-------------------------------------------------------------------- # savgol_coeffs tests #-------------------------------------------------------------------- def alt_sg_coeffs(window_length, polyorder, pos): """This is an alternative implementation of the SG coefficients. It uses numpy.polyfit and numpy.polyval. The results should be equivalent to those of savgol_coeffs(), but this implementation is slower. window_length should be odd. """ if pos is None: pos = window_length // 2 t = np.arange(window_length) unit = (t == pos).astype(int) h = np.polyval(np.polyfit(t, unit, polyorder), t) return h def test_sg_coeffs_trivial(): # Test a trivial case of savgol_coeffs: polyorder = window_length - 1 h = savgol_coeffs(1, 0) assert_allclose(h, [1]) h = savgol_coeffs(3, 2) assert_allclose(h, [0, 1, 0], atol=1e-10) h = savgol_coeffs(5, 4) assert_allclose(h, [0, 0, 1, 0, 0], atol=1e-10) h = savgol_coeffs(5, 4, pos=1) assert_allclose(h, [0, 0, 0, 1, 0], atol=1e-10) h = savgol_coeffs(5, 4, pos=1, use='dot') assert_allclose(h, [0, 1, 0, 0, 0], atol=1e-10) def compare_coeffs_to_alt(window_length, order): # For the given window_length and order, compare the results # of savgol_coeffs and alt_sg_coeffs for pos from 0 to window_length - 1. # Also include pos=None. for pos in [None] + list(range(window_length)): h1 = savgol_coeffs(window_length, order, pos=pos, use='dot') h2 = alt_sg_coeffs(window_length, order, pos=pos) assert_allclose(h1, h2, atol=1e-10, err_msg=("window_length = %d, order = %d, pos = %s" % (window_length, order, pos))) def test_sg_coeffs_compare(): # Compare savgol_coeffs() to alt_sg_coeffs(). for window_length in range(1, 8, 2): for order in range(window_length): yield compare_coeffs_to_alt, window_length, order def test_sg_coeffs_exact(): polyorder = 4 window_length = 9 halflen = window_length // 2 x = np.linspace(0, 21, 43) delta = x[1] - x[0] # The data is a cubic polynomial. We'll use an order 4 # SG filter, so the filtered values should equal the input data # (except within half window_length of the edges). y = 0.5 * x ** 3 - x h = savgol_coeffs(window_length, polyorder) y0 = convolve1d(y, h) assert_allclose(y0[halflen:-halflen], y[halflen:-halflen]) # Check the same input, but use deriv=1. dy is the exact result. dy = 1.5 * x ** 2 - 1 h = savgol_coeffs(window_length, polyorder, deriv=1, delta=delta) y1 = convolve1d(y, h) assert_allclose(y1[halflen:-halflen], dy[halflen:-halflen]) # Check the same input, but use deriv=2. d2y is the exact result. d2y = 3.0 * x h = savgol_coeffs(window_length, polyorder, deriv=2, delta=delta) y2 = convolve1d(y, h) assert_allclose(y2[halflen:-halflen], d2y[halflen:-halflen]) def test_sg_coeffs_deriv(): # The data in `x` is a sampled parabola, so using savgol_coeffs with an # order 2 or higher polynomial should give exact results. i = np.array([-2.0, 0.0, 2.0, 4.0, 6.0]) x = i ** 2 / 4 dx = i / 2 d2x = 0.5 * np.ones_like(i) for pos in range(x.size): coeffs0 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot') assert_allclose(coeffs0.dot(x), x[pos], atol=1e-10) coeffs1 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot', deriv=1) assert_allclose(coeffs1.dot(x), dx[pos], atol=1e-10) coeffs2 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot', deriv=2) assert_allclose(coeffs2.dot(x), d2x[pos], atol=1e-10) #-------------------------------------------------------------------- # savgol_filter tests #-------------------------------------------------------------------- def test_sg_filter_trivial(): """ Test some trivial edge cases for savgol_filter().""" x = np.array([1.0]) y = savgol_filter(x, 1, 0) assert_equal(y, [1.0]) # Input is a single value. With a window length of 3 and polyorder 1, # the value in y is from the straight-line fit of (-1,0), (0,3) and # (1, 0) at 0. This is just the average of the three values, hence 1.0. x = np.array([3.0]) y = savgol_filter(x, 3, 1, mode='constant') assert_almost_equal(y, [1.0], decimal=15) x = np.array([3.0]) y = savgol_filter(x, 3, 1, mode='nearest') assert_almost_equal(y, [3.0], decimal=15) x = np.array([1.0] * 3) y = savgol_filter(x, 3, 1, mode='wrap') assert_almost_equal(y, [1.0, 1.0, 1.0], decimal=15) def test_sg_filter_basic(): # Some basic test cases for savgol_filter(). x = np.array([1.0, 2.0, 1.0]) y = savgol_filter(x, 3, 1, mode='constant') assert_allclose(y, [1.0, 4.0 / 3, 1.0]) y = savgol_filter(x, 3, 1, mode='mirror') assert_allclose(y, [5.0 / 3, 4.0 / 3, 5.0 / 3]) y = savgol_filter(x, 3, 1, mode='wrap') assert_allclose(y, [4.0 / 3, 4.0 / 3, 4.0 / 3]) def test_sg_filter_2d(): x = np.array([[1.0, 2.0, 1.0], [2.0, 4.0, 2.0]]) expected = np.array([[1.0, 4.0 / 3, 1.0], [2.0, 8.0 / 3, 2.0]]) y = savgol_filter(x, 3, 1, mode='constant') assert_allclose(y, expected) y = savgol_filter(x.T, 3, 1, mode='constant', axis=0) assert_allclose(y, expected.T) def test_sg_filter_interp_edges(): # Another test with low degree polynomial data, for which we can easily # give the exact results. In this test, we use mode='interp', so # savgol_filter should match the exact solution for the entire data set, # including the edges. t = np.linspace(-5, 5, 21) delta = t[1] - t[0] # Polynomial test data. x = np.array([t, 3 * t ** 2, t ** 3 - t]) dx = np.array([np.ones_like(t), 6 * t, 3 * t ** 2 - 1.0]) d2x = np.array([np.zeros_like(t), 6 * np.ones_like(t), 6 * t]) window_length = 7 y = savgol_filter(x, window_length, 3, axis=-1, mode='interp') assert_allclose(y, x, atol=1e-12) y1 = savgol_filter(x, window_length, 3, axis=-1, mode='interp', deriv=1, delta=delta) assert_allclose(y1, dx, atol=1e-12) y2 = savgol_filter(x, window_length, 3, axis=-1, mode='interp', deriv=2, delta=delta) assert_allclose(y2, d2x, atol=1e-12) # Transpose everything, and test again with axis=0. x = x.T dx = dx.T d2x = d2x.T y = savgol_filter(x, window_length, 3, axis=0, mode='interp') assert_allclose(y, x, atol=1e-12) y1 = savgol_filter(x, window_length, 3, axis=0, mode='interp', deriv=1, delta=delta) assert_allclose(y1, dx, atol=1e-12) y2 = savgol_filter(x, window_length, 3, axis=0, mode='interp', deriv=2, delta=delta) assert_allclose(y2, d2x, atol=1e-12) def test_sg_filter_interp_edges_3d(): # Test mode='interp' with a 3-D array. t = np.linspace(-5, 5, 21) delta = t[1] - t[0] x1 = np.array([t, -t]) x2 = np.array([t ** 2, 3 * t ** 2 + 5]) x3 = np.array([t ** 3, 2 * t ** 3 + t ** 2 - 0.5 * t]) dx1 = np.array([np.ones_like(t), -np.ones_like(t)]) dx2 = np.array([2 * t, 6 * t]) dx3 = np.array([3 * t ** 2, 6 * t ** 2 + 2 * t - 0.5]) # z has shape (3, 2, 21) z = np.array([x1, x2, x3]) dz = np.array([dx1, dx2, dx3]) y = savgol_filter(z, 7, 3, axis=-1, mode='interp', delta=delta) assert_allclose(y, z, atol=1e-10) dy = savgol_filter(z, 7, 3, axis=-1, mode='interp', deriv=1, delta=delta) assert_allclose(dy, dz, atol=1e-10) # z has shape (3, 21, 2) z = np.array([x1.T, x2.T, x3.T]) dz = np.array([dx1.T, dx2.T, dx3.T]) y = savgol_filter(z, 7, 3, axis=1, mode='interp', delta=delta) assert_allclose(y, z, atol=1e-10) dy = savgol_filter(z, 7, 3, axis=1, mode='interp', deriv=1, delta=delta) assert_allclose(dy, dz, atol=1e-10) # z has shape (21, 3, 2) z = z.swapaxes(0, 1).copy() dz = dz.swapaxes(0, 1).copy() y = savgol_filter(z, 7, 3, axis=0, mode='interp', delta=delta) assert_allclose(y, z, atol=1e-10) dy = savgol_filter(z, 7, 3, axis=0, mode='interp', deriv=1, delta=delta) assert_allclose(dy, dz, atol=1e-10) if __name__ == "__main__": run_module_suite()
apache-2.0
hsaputra/tensorflow
tensorflow/python/data/__init__.py
21
1484
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """`tf.data.Dataset` API for input pipelines. See the @{$datasets$Importing Data} Programmer's Guide for an overview. @@Dataset @@Iterator @@FixedLengthRecordDataset @@TextLineDataset @@TFRecordDataset """ from __future__ import absolute_import from __future__ import division from __future__ import print_function # pylint: disable=unused-import from tensorflow.python.data.ops.dataset_ops import Dataset from tensorflow.python.data.ops.iterator_ops import Iterator from tensorflow.python.data.ops.readers import FixedLengthRecordDataset from tensorflow.python.data.ops.readers import TextLineDataset from tensorflow.python.data.ops.readers import TFRecordDataset # pylint: enable=unused-import from tensorflow.python.util.all_util import remove_undocumented remove_undocumented(__name__)
apache-2.0
rg3/youtube-dl
youtube_dl/extractor/bitchute.py
6
4801
# coding: utf-8 from __future__ import unicode_literals import itertools import re from .common import InfoExtractor from ..utils import ( orderedSet, urlencode_postdata, ) class BitChuteIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?bitchute\.com/(?:video|embed|torrent/[^/]+)/(?P<id>[^/?#&]+)' _TESTS = [{ 'url': 'https://www.bitchute.com/video/szoMrox2JEI/', 'md5': '66c4a70e6bfc40dcb6be3eb1d74939eb', 'info_dict': { 'id': 'szoMrox2JEI', 'ext': 'mp4', 'title': 'Fuck bitches get money', 'description': 'md5:3f21f6fb5b1d17c3dee9cf6b5fe60b3a', 'thumbnail': r're:^https?://.*\.jpg$', 'uploader': 'Victoria X Rave', }, }, { 'url': 'https://www.bitchute.com/embed/lbb5G1hjPhw/', 'only_matching': True, }, { 'url': 'https://www.bitchute.com/torrent/Zee5BE49045h/szoMrox2JEI.webtorrent', 'only_matching': True, }] def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage( 'https://www.bitchute.com/video/%s' % video_id, video_id, headers={ 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.57 Safari/537.36', }) title = self._html_search_regex( (r'<[^>]+\bid=["\']video-title[^>]+>([^<]+)', r'<title>([^<]+)'), webpage, 'title', default=None) or self._html_search_meta( 'description', webpage, 'title', default=None) or self._og_search_description(webpage) format_urls = [] for mobj in re.finditer( r'addWebSeed\s*\(\s*(["\'])(?P<url>(?:(?!\1).)+)\1', webpage): format_urls.append(mobj.group('url')) format_urls.extend(re.findall(r'as=(https?://[^&"\']+)', webpage)) formats = [ {'url': format_url} for format_url in orderedSet(format_urls)] self._check_formats(formats, video_id) self._sort_formats(formats) description = self._html_search_regex( r'(?s)<div\b[^>]+\bclass=["\']full hidden[^>]+>(.+?)</div>', webpage, 'description', fatal=False) thumbnail = self._og_search_thumbnail( webpage, default=None) or self._html_search_meta( 'twitter:image:src', webpage, 'thumbnail') uploader = self._html_search_regex( r'(?s)<p\b[^>]+\bclass=["\']video-author[^>]+>(.+?)</p>', webpage, 'uploader', fatal=False) return { 'id': video_id, 'title': title, 'description': description, 'thumbnail': thumbnail, 'uploader': uploader, 'formats': formats, } class BitChuteChannelIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?bitchute\.com/channel/(?P<id>[^/?#&]+)' _TEST = { 'url': 'https://www.bitchute.com/channel/victoriaxrave/', 'playlist_mincount': 185, 'info_dict': { 'id': 'victoriaxrave', }, } _TOKEN = 'zyG6tQcGPE5swyAEFLqKUwMuMMuF6IO2DZ6ZDQjGfsL0e4dcTLwqkTTul05Jdve7' def _entries(self, channel_id): channel_url = 'https://www.bitchute.com/channel/%s/' % channel_id offset = 0 for page_num in itertools.count(1): data = self._download_json( '%sextend/' % channel_url, channel_id, 'Downloading channel page %d' % page_num, data=urlencode_postdata({ 'csrfmiddlewaretoken': self._TOKEN, 'name': '', 'offset': offset, }), headers={ 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8', 'Referer': channel_url, 'X-Requested-With': 'XMLHttpRequest', 'Cookie': 'csrftoken=%s' % self._TOKEN, }) if data.get('success') is False: break html = data.get('html') if not html: break video_ids = re.findall( r'class=["\']channel-videos-image-container[^>]+>\s*<a\b[^>]+\bhref=["\']/video/([^"\'/]+)', html) if not video_ids: break offset += len(video_ids) for video_id in video_ids: yield self.url_result( 'https://www.bitchute.com/video/%s' % video_id, ie=BitChuteIE.ie_key(), video_id=video_id) def _real_extract(self, url): channel_id = self._match_id(url) return self.playlist_result( self._entries(channel_id), playlist_id=channel_id)
unlicense
centricular/meson
mesonbuild/backend/xcodebackend.py
1
36523
# Copyright 2014-2016 The Meson development team # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from . import backends from .. import build from .. import mesonlib import uuid, os, sys from ..mesonlib import MesonException class XCodeBackend(backends.Backend): def __init__(self, build): super().__init__(build) self.project_uid = self.environment.coredata.guid.replace('-', '')[:24] self.project_conflist = self.gen_id() self.indent = ' ' self.indent_level = 0 self.xcodetypemap = {'c' : 'sourcecode.c.c', 'a' : 'archive.ar', 'cc': 'sourcecode.cpp.cpp', 'cxx' : 'sourcecode.cpp.cpp', 'cpp' : 'sourcecode.cpp.cpp', 'c++' : 'sourcecode.cpp.cpp', 'm' : 'sourcecode.c.objc', 'mm' : 'sourcecode.cpp.objcpp', 'h' : 'sourcecode.c.h', 'hpp' : 'sourcecode.cpp.h', 'hxx' : 'sourcecode.cpp.h', 'hh' : 'sourcecode.cpp.hh', 'inc' : 'sourcecode.c.h', 'dylib' : 'compiled.mach-o.dylib', 'o' : 'compiled.mach-o.objfile',} self.maingroup_id = self.gen_id() self.all_id = self.gen_id() self.all_buildconf_id = self.gen_id() self.buildtypes = ['debug'] self.test_id = self.gen_id() self.test_command_id = self.gen_id() self.test_buildconf_id = self.gen_id() def gen_id(self): return str(uuid.uuid4()).upper().replace('-', '')[:24] def get_target_dir(self, target): dirname = os.path.join(target.get_subdir(), self.environment.coredata.get_builtin_option('buildtype')) os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True) return dirname def write_line(self, text): self.ofile.write(self.indent*self.indent_level + text) if not text.endswith('\n'): self.ofile.write('\n') def generate(self, interp): self.interpreter = interp test_data = self.serialise_tests()[0] self.generate_filemap() self.generate_buildmap() self.generate_buildstylemap() self.generate_build_phase_map() self.generate_build_configuration_map() self.generate_build_configurationlist_map() self.generate_project_configurations_map() self.generate_buildall_configurations_map() self.generate_test_configurations_map() self.generate_native_target_map() self.generate_source_phase_map() self.generate_target_dependency_map() self.generate_pbxdep_map() self.generate_containerproxy_map() self.proj_dir = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.xcodeproj') os.makedirs(self.proj_dir, exist_ok=True) self.proj_file = os.path.join(self.proj_dir, 'project.pbxproj') with open(self.proj_file, 'w') as self.ofile: self.generate_prefix() self.generate_pbx_aggregate_target() self.generate_pbx_build_file() self.generate_pbx_build_style() self.generate_pbx_container_item_proxy() self.generate_pbx_file_reference() self.generate_pbx_group() self.generate_pbx_native_target() self.generate_pbx_project() self.generate_pbx_shell_build_phase(test_data) self.generate_pbx_sources_build_phase() self.generate_pbx_target_dependency() self.generate_xc_build_configuration() self.generate_xc_configurationList() self.generate_suffix() def get_xcodetype(self, fname): return self.xcodetypemap[fname.split('.')[-1]] def generate_filemap(self): self.filemap = {} # Key is source file relative to src root. self.target_filemap = {} for name, t in self.build.targets.items(): for s in t.sources: if isinstance(s, mesonlib.File): s = os.path.join(s.subdir, s.fname) self.filemap[s] = self.gen_id() for o in t.objects: if isinstance(o, str): o = os.path.join(t.subdir, o) self.filemap[o] = self.gen_id() self.target_filemap[name] = self.gen_id() def generate_buildmap(self): self.buildmap = {} for t in self.build.targets.values(): for s in t.sources: s = os.path.join(s.subdir, s.fname) self.buildmap[s] = self.gen_id() for o in t.objects: o = os.path.join(t.subdir, o) if isinstance(o, str): self.buildmap[o] = self.gen_id() def generate_buildstylemap(self): self.buildstylemap = {'debug' : self.gen_id()} def generate_build_phase_map(self): self.buildphasemap = {} for t in self.build.targets: self.buildphasemap[t] = self.gen_id() def generate_build_configuration_map(self): self.buildconfmap = {} for t in self.build.targets: bconfs = {'debug' : self.gen_id()} self.buildconfmap[t] = bconfs def generate_project_configurations_map(self): self.project_configurations = {'debug' : self.gen_id()} def generate_buildall_configurations_map(self): self.buildall_configurations = {'debug' : self.gen_id()} def generate_test_configurations_map(self): self.test_configurations = {'debug' : self.gen_id()} def generate_build_configurationlist_map(self): self.buildconflistmap = {} for t in self.build.targets: self.buildconflistmap[t] = self.gen_id() def generate_native_target_map(self): self.native_targets = {} for t in self.build.targets: self.native_targets[t] = self.gen_id() def generate_target_dependency_map(self): self.target_dependency_map = {} for tname, t in self.build.targets.items(): for target in t.link_targets: self.target_dependency_map[(tname, target.get_basename())] = self.gen_id() def generate_pbxdep_map(self): self.pbx_dep_map = {} for t in self.build.targets: self.pbx_dep_map[t] = self.gen_id() def generate_containerproxy_map(self): self.containerproxy_map = {} for t in self.build.targets: self.containerproxy_map[t] = self.gen_id() def generate_source_phase_map(self): self.source_phase = {} for t in self.build.targets: self.source_phase[t] = self.gen_id() def generate_pbx_aggregate_target(self): self.ofile.write('\n/* Begin PBXAggregateTarget section */\n') self.write_line('%s /* ALL_BUILD */ = {' % self.all_id) self.indent_level+=1 self.write_line('isa = PBXAggregateTarget;') self.write_line('buildConfigurationList = %s;' % self.all_buildconf_id) self.write_line('buildPhases = (') self.write_line(');') self.write_line('dependencies = (') self.indent_level+=1 for t in self.build.targets: self.write_line('%s /* PBXTargetDependency */,' % self.pbx_dep_map[t]) self.indent_level-=1 self.write_line(');') self.write_line('name = ALL_BUILD;') self.write_line('productName = ALL_BUILD;') self.indent_level-=1 self.write_line('};') self.write_line('%s /* RUN_TESTS */ = {' % self.test_id) self.indent_level +=1 self.write_line('isa = PBXAggregateTarget;') self.write_line('buildConfigurationList = %s;' % self.test_buildconf_id) self.write_line('buildPhases = (') self.indent_level+=1 self.write_line('%s /* test run command */,' % self.test_command_id) self.indent_level-=1 self.write_line(');') self.write_line('dependencies = (') self.write_line(');') self.write_line('name = RUN_TESTS;') self.write_line('productName = RUN_TESTS;') self.indent_level-=1 self.write_line('};') self.ofile.write('/* End PBXAggregateTarget section */\n') def generate_pbx_build_file(self): self.ofile.write('\n/* Begin PBXBuildFile section */\n') templ = '%s /* %s */ = { isa = PBXBuildFile; fileRef = %s /* %s */; settings = { COMPILER_FLAGS = "%s"; }; };\n' otempl = '%s /* %s */ = { isa = PBXBuildFile; fileRef = %s /* %s */;};\n' for t in self.build.targets.values(): for s in t.sources: if isinstance(s, mesonlib.File): s = s.fname if isinstance(s, str): s = os.path.join(t.subdir, s) idval = self.buildmap[s] fullpath = os.path.join(self.environment.get_source_dir(), s) fileref = self.filemap[s] fullpath2 = fullpath compiler_args = '' self.ofile.write(templ % (idval, fullpath, fileref, fullpath2, compiler_args)) for o in t.objects: o = os.path.join(t.subdir, o) idval = self.buildmap[o] fileref = self.filemap[o] fullpath = os.path.join(self.environment.get_source_dir(), o) fullpath2 = fullpath self.ofile.write(otempl % (idval, fullpath, fileref, fullpath2)) self.ofile.write('/* End PBXBuildFile section */\n') def generate_pbx_build_style(self): self.ofile.write('\n/* Begin PBXBuildStyle section */\n') for name, idval in self.buildstylemap.items(): self.write_line('%s /* %s */ = {\n' % (idval, name)) self.indent_level += 1 self.write_line('isa = PBXBuildStyle;\n') self.write_line('buildSettings = {\n') self.indent_level += 1 self.write_line('COPY_PHASE_STRIP = NO;\n') self.indent_level -= 1 self.write_line('};\n') self.write_line('name = "%s";\n' % name) self.indent_level -= 1 self.write_line('};\n') self.ofile.write('/* End PBXBuildStyle section */\n') def generate_pbx_container_item_proxy(self): self.ofile.write('\n/* Begin PBXContainerItemProxy section */\n') for t in self.build.targets: self.write_line('%s /* PBXContainerItemProxy */ = {' % self.containerproxy_map[t]) self.indent_level += 1 self.write_line('isa = PBXContainerItemProxy;') self.write_line('containerPortal = %s /* Project object */;' % self.project_uid) self.write_line('proxyType = 1;') self.write_line('remoteGlobalIDString = %s;' % self.native_targets[t]) self.write_line('remoteInfo = "%s";' % t) self.indent_level-=1 self.write_line('};') self.ofile.write('/* End PBXContainerItemProxy section */\n') def generate_pbx_file_reference(self): self.ofile.write('\n/* Begin PBXFileReference section */\n') src_templ = '%s /* %s */ = { isa = PBXFileReference; explicitFileType = "%s"; fileEncoding = 4; name = "%s"; path = "%s"; sourceTree = SOURCE_ROOT; };\n' for fname, idval in self.filemap.items(): fullpath = os.path.join(self.environment.get_source_dir(), fname) xcodetype = self.get_xcodetype(fname) name = os.path.split(fname)[-1] path = fname self.ofile.write(src_templ % (idval, fullpath, xcodetype, name, path)) target_templ = '%s /* %s */ = { isa = PBXFileReference; explicitFileType = "%s"; path = %s; refType = %d; sourceTree = BUILT_PRODUCTS_DIR; };\n' for tname, idval in self.target_filemap.items(): t = self.build.targets[tname] fname = t.get_filename() reftype = 0 if isinstance(t, build.Executable): typestr = 'compiled.mach-o.executable' path = fname elif isinstance(t, build.SharedLibrary): typestr = self.get_xcodetype('dummy.dylib') path = fname else: typestr = self.get_xcodetype(fname) path = '"%s"' % t.get_filename() self.ofile.write(target_templ % (idval, tname, typestr, path, reftype)) self.ofile.write('/* End PBXFileReference section */\n') def generate_pbx_group(self): groupmap = {} target_src_map = {} for t in self.build.targets: groupmap[t] = self.gen_id() target_src_map[t] = self.gen_id() self.ofile.write('\n/* Begin PBXGroup section */\n') sources_id = self.gen_id() resources_id = self.gen_id() products_id = self.gen_id() self.write_line('%s = {' % self.maingroup_id) self.indent_level+=1 self.write_line('isa = PBXGroup;') self.write_line('children = (') self.indent_level+=1 self.write_line('%s /* Sources */,' % sources_id) self.write_line('%s /* Resources */,' % resources_id) self.write_line('%s /* Products */,' % products_id) self.indent_level-=1 self.write_line(');') self.write_line('sourceTree = "<group>";') self.indent_level -= 1 self.write_line('};') # Sources self.write_line('%s /* Sources */ = {' % sources_id) self.indent_level+=1 self.write_line('isa = PBXGroup;') self.write_line('children = (') self.indent_level+=1 for t in self.build.targets: self.write_line('%s /* %s */,' % (groupmap[t], t)) self.indent_level-=1 self.write_line(');') self.write_line('name = Sources;') self.write_line('sourcetree = "<group>";') self.indent_level-=1 self.write_line('};') self.write_line('%s /* Resources */ = {' % resources_id) self.indent_level+=1 self.write_line('isa = PBXGroup;') self.write_line('children = (') self.write_line(');') self.write_line('name = Resources;') self.write_line('sourceTree = "<group>";') self.indent_level-=1 self.write_line('};') # Targets for t in self.build.targets: self.write_line('%s /* %s */ = {' % (groupmap[t], t)) self.indent_level+=1 self.write_line('isa = PBXGroup;') self.write_line('children = (') self.indent_level+=1 self.write_line('%s /* Source files */,' % target_src_map[t]) self.indent_level-=1 self.write_line(');') self.write_line('name = "%s";' % t) self.write_line('sourceTree = "<group>";') self.indent_level-=1 self.write_line('};') self.write_line('%s /* Source files */ = {' % target_src_map[t]) self.indent_level+=1 self.write_line('isa = PBXGroup;') self.write_line('children = (') self.indent_level+=1 for s in self.build.targets[t].sources: s = os.path.join(s.subdir, s.fname) if isinstance(s, str): self.write_line('%s /* %s */,' % (self.filemap[s], s)) for o in self.build.targets[t].objects: o = os.path.join(self.build.targets[t].subdir, o) self.write_line('%s /* %s */,' % (self.filemap[o], o)) self.indent_level-=1 self.write_line(');') self.write_line('name = "Source files";') self.write_line('sourceTree = "<group>";') self.indent_level-=1 self.write_line('};') # And finally products self.write_line('%s /* Products */ = {' % products_id) self.indent_level+=1 self.write_line('isa = PBXGroup;') self.write_line('children = (') self.indent_level+=1 for t in self.build.targets: self.write_line('%s /* %s */,' % (self.target_filemap[t], t)) self.indent_level-=1 self.write_line(');') self.write_line('name = Products;') self.write_line('sourceTree = "<group>";') self.indent_level-=1 self.write_line('};') self.ofile.write('/* End PBXGroup section */\n') def generate_pbx_native_target(self): self.ofile.write('\n/* Begin PBXNativeTarget section */\n') for tname, idval in self.native_targets.items(): t = self.build.targets[tname] self.write_line('%s /* %s */ = {' % (idval, tname)) self.indent_level+=1 self.write_line('isa = PBXNativeTarget;') self.write_line('buildConfigurationList = %s /* Build configuration list for PBXNativeTarget "%s" */;'\ % (self.buildconflistmap[tname], tname)) self.write_line('buildPhases = (') self.indent_level+=1 self.write_line('%s /* Sources */,' % self.buildphasemap[tname]) self.indent_level-=1 self.write_line(');') self.write_line('buildRules = (') self.write_line(');') self.write_line('dependencies = (') self.indent_level+=1 for lt in self.build.targets[tname].link_targets: # NOT DOCUMENTED, may need to make different links # to same target have different targetdependency item. idval = self.pbx_dep_map[lt.get_id()] self.write_line('%s /* PBXTargetDependency */,' % idval) self.indent_level -=1 self.write_line(");") self.write_line('name = "%s";' % tname) self.write_line('productName = "%s";' % tname) self.write_line('productReference = %s /* %s */;' % (self.target_filemap[tname], tname)) if isinstance(t, build.Executable): typestr = 'com.apple.product-type.tool' elif isinstance(t, build.StaticLibrary): typestr = 'com.apple.product-type.library.static' elif isinstance(t, build.SharedLibrary): typestr = 'com.apple.product-type.library.dynamic' else: raise MesonException('Unknown target type for %s' % tname) self.write_line('productType = "%s";' % typestr) self.indent_level-=1 self.write_line('};') self.ofile.write('/* End PBXNativeTarget section */\n') def generate_pbx_project(self): self.ofile.write('\n/* Begin PBXProject section */\n') self.write_line('%s /* Project object */ = {' % self.project_uid) self.indent_level += 1 self.write_line('isa = PBXProject;') self.write_line('attributes = {') self.indent_level += 1 self.write_line('BuildIndependentTargetsInParallel = YES;') self.indent_level -= 1 self.write_line('};') conftempl = 'buildConfigurationList = %s /* build configuration list for PBXProject "%s"*/;' self.write_line(conftempl % (self.project_conflist, self.build.project_name)) self.write_line('buildSettings = {') self.write_line('};') self.write_line('buildStyles = (') self.indent_level += 1 for name, idval in self.buildstylemap.items(): self.write_line('%s /* %s */,' % (idval, name)) self.indent_level -= 1 self.write_line(');') self.write_line('compatibilityVersion = "Xcode 3.2";') self.write_line('hasScannedForEncodings = 0;') self.write_line('mainGroup = %s;' % self.maingroup_id) self.write_line('projectDirPath = "%s";' % self.build_to_src) self.write_line('projectRoot = "";') self.write_line('targets = (') self.indent_level += 1 self.write_line('%s /* ALL_BUILD */,' % self.all_id) self.write_line('%s /* RUN_TESTS */,' % self.test_id) for t in self.build.targets: self.write_line('%s /* %s */,' % (self.native_targets[t], t)) self.indent_level -= 1 self.write_line(');') self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End PBXProject section */\n') def generate_pbx_shell_build_phase(self, test_data): self.ofile.write('\n/* Begin PBXShellScriptBuildPhase section */\n') self.write_line('%s = {' % self.test_command_id) self.indent_level += 1 self.write_line('isa = PBXShellScriptBuildPhase;') self.write_line('buildActionMask = 2147483647;') self.write_line('files = (') self.write_line(');') self.write_line('inputPaths = (') self.write_line(');') self.write_line('outputPaths = (') self.write_line(');') self.write_line('runOnlyForDeploymentPostprocessing = 0;') self.write_line('shellPath = /bin/sh;') script_root = self.environment.get_script_dir() test_script = os.path.join(script_root, 'meson_test.py') cmd = [sys.executable, test_script, test_data, '--wd', self.environment.get_build_dir()] cmdstr = ' '.join(["'%s'" % i for i in cmd]) self.write_line('shellScript = "%s";' % cmdstr) self.write_line('showEnvVarsInLog = 0;') self.indent_level-=1 self.write_line('};') self.ofile.write('/* End PBXShellScriptBuildPhase section */\n') def generate_pbx_sources_build_phase(self): self.ofile.write('\n/* Begin PBXSourcesBuildPhase section */\n') for name, phase_id in self.source_phase.items(): self.write_line('%s /* Sources */ = {' % self.buildphasemap[name]) self.indent_level+=1 self.write_line('isa = PBXSourcesBuildPhase;') self.write_line('buildActionMask = 2147483647;') self.write_line('files = (') self.indent_level+=1 for s in self.build.targets[name].sources: s = os.path.join(s.subdir, s.fname) if not self.environment.is_header(s): self.write_line('%s /* %s */,' % (self.buildmap[s], os.path.join(self.environment.get_source_dir(), s))) self.indent_level-=1 self.write_line(');') self.write_line('runOnlyForDeploymentPostprocessing = 0;') self.indent_level-=1 self.write_line('};') self.ofile.write('/* End PBXSourcesBuildPhase section */\n') def generate_pbx_target_dependency(self): self.ofile.write('\n/* Begin PBXTargetDependency section */\n') for t in self.build.targets: idval = self.pbx_dep_map[t] # VERIFY: is this correct? self.write_line('%s /* PBXTargetDependency */ = {' % idval) self.indent_level += 1 self.write_line('isa = PBXTargetDependency;') self.write_line('target = %s /* %s */;' % (self.native_targets[t], t)) self.write_line('targetProxy = %s /* PBXContainerItemProxy */;' % self.containerproxy_map[t]) self.indent_level-=1 self.write_line('};') self.ofile.write('/* End PBXTargetDependency section */\n') def generate_xc_build_configuration(self): self.ofile.write('\n/* Begin XCBuildConfiguration section */\n') # First the setup for the toplevel project. for buildtype in self.buildtypes: self.write_line('%s /* %s */ = {' % (self.project_configurations[buildtype], buildtype)) self.indent_level+=1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') self.indent_level+=1 self.write_line('ARCHS = "$(ARCHS_STANDARD_32_64_BIT)";') self.write_line('ONLY_ACTIVE_ARCH = YES;') self.write_line('SDKROOT = "macosx";') self.write_line('SYMROOT = "%s/build";' % self.environment.get_build_dir()) self.indent_level-=1 self.write_line('};') self.write_line('name = "%s";' % buildtype) self.indent_level-=1 self.write_line('};') # Then the all target. for buildtype in self.buildtypes: self.write_line('%s /* %s */ = {' % (self.buildall_configurations[buildtype], buildtype)) self.indent_level+=1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') self.indent_level += 1 self.write_line('COMBINE_HIDPI_IMAGES = YES;') self.write_line('GCC_GENERATE_DEBUGGING_SYMBOLS = NO;') self.write_line('GCC_INLINES_ARE_PRIVATE_EXTERN = NO;') self.write_line('GCC_OPTIMIZATION_LEVEL = 0;') self.write_line('GCC_PREPROCESSOR_DEFINITIONS = ("");') self.write_line('GCC_SYMBOLS_PRIVATE_EXTERN = NO;') self.write_line('INSTALL_PATH = "";') self.write_line('OTHER_CFLAGS = " ";') self.write_line('OTHER_LDFLAGS = " ";') self.write_line('OTHER_REZFLAGS = "";') self.write_line('PRODUCT_NAME = ALL_BUILD;') self.write_line('SECTORDER_FLAGS = "";') self.write_line('SYMROOT = "%s";' % self.environment.get_build_dir()) self.write_line('USE_HEADERMAP = NO;') self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );') self.indent_level-=1 self.write_line('};') self.write_line('name = "%s";' % buildtype) self.indent_level-=1 self.write_line('};') # Then the test target. for buildtype in self.buildtypes: self.write_line('%s /* %s */ = {' % (self.test_configurations[buildtype], buildtype)) self.indent_level+=1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') self.indent_level += 1 self.write_line('COMBINE_HIDPI_IMAGES = YES;') self.write_line('GCC_GENERATE_DEBUGGING_SYMBOLS = NO;') self.write_line('GCC_INLINES_ARE_PRIVATE_EXTERN = NO;') self.write_line('GCC_OPTIMIZATION_LEVEL = 0;') self.write_line('GCC_PREPROCESSOR_DEFINITIONS = ("");') self.write_line('GCC_SYMBOLS_PRIVATE_EXTERN = NO;') self.write_line('INSTALL_PATH = "";') self.write_line('OTHER_CFLAGS = " ";') self.write_line('OTHER_LDFLAGS = " ";') self.write_line('OTHER_REZFLAGS = "";') self.write_line('PRODUCT_NAME = RUN_TESTS;') self.write_line('SECTORDER_FLAGS = "";') self.write_line('SYMROOT = "%s";' % self.environment.get_build_dir()) self.write_line('USE_HEADERMAP = NO;') self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );') self.indent_level-=1 self.write_line('};') self.write_line('name = "%s";' % buildtype) self.indent_level-=1 self.write_line('};') # Now finally targets. langnamemap = {'c' : 'C', 'cpp' : 'CPLUSPLUS', 'objc' : 'OBJC', 'objcpp' : 'OBJCPLUSPLUS'} for target_name, target in self.build.targets.items(): for buildtype in self.buildtypes: dep_libs = [] links_dylib = False headerdirs = [] for d in target.include_dirs: for sd in d.incdirs: cd = os.path.join(d.curdir, sd) headerdirs.append(os.path.join(self.environment.get_source_dir(), cd)) headerdirs.append(os.path.join(self.environment.get_build_dir(), cd)) for l in target.link_targets: abs_path = os.path.join(self.environment.get_build_dir(), l.subdir, buildtype, l.get_filename()) dep_libs.append("'%s'" % abs_path) if isinstance(l, build.SharedLibrary): links_dylib = True if links_dylib: dep_libs = ['-Wl,-search_paths_first', '-Wl,-headerpad_max_install_names'] + dep_libs dylib_version = None if isinstance(target, build.SharedLibrary): ldargs = ['-dynamiclib', '-Wl,-headerpad_max_install_names'] + dep_libs install_path = os.path.join(self.environment.get_build_dir(), target.subdir, buildtype) dylib_version = target.version else: ldargs = dep_libs install_path = '' if dylib_version is not None: product_name = target.get_basename() + '.' + dylib_version else: product_name = target.get_basename() ldargs += target.link_args ldstr = ' '.join(ldargs) valid = self.buildconfmap[target_name][buildtype] langargs = {} for lang in self.environment.coredata.compilers: if lang not in langnamemap: continue gargs = self.build.global_args.get(lang, []) targs = target.get_extra_args(lang) args = gargs + targs if len(args) > 0: langargs[langnamemap[lang]] = args symroot = os.path.join(self.environment.get_build_dir(), target.subdir) self.write_line('%s /* %s */ = {' % (valid, buildtype)) self.indent_level+=1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') self.indent_level += 1 self.write_line('COMBINE_HIDPI_IMAGES = YES;') if dylib_version is not None: self.write_line('DYLIB_CURRENT_VERSION = "%s";' % dylib_version) self.write_line('EXECUTABLE_PREFIX = "%s";' % target.prefix) if target.suffix == '': suffix = '' else: suffix = '.' + target.suffix self.write_line('EXECUTABLE_SUFFIX = "%s";' % suffix) self.write_line('GCC_GENERATE_DEBUGGING_SYMBOLS = YES;') self.write_line('GCC_INLINES_ARE_PRIVATE_EXTERN = NO;') self.write_line('GCC_OPTIMIZATION_LEVEL = 0;') self.write_line('GCC_PREPROCESSOR_DEFINITIONS = ("");') self.write_line('GCC_SYMBOLS_PRIVATE_EXTERN = NO;') if len(headerdirs) > 0: quotedh = ','.join(['"\\"%s\\""' % i for i in headerdirs]) self.write_line('HEADER_SEARCH_PATHS=(%s);' % quotedh) self.write_line('INSTALL_PATH = "%s";' % install_path) self.write_line('LIBRARY_SEARCH_PATHS = "";') if isinstance(target, build.SharedLibrary): self.write_line('LIBRARY_STYLE = DYNAMIC;') for langname, args in langargs.items(): argstr = ' '.join(args) self.write_line('OTHER_%sFLAGS = "%s";' % (langname, argstr)) self.write_line('OTHER_LDFLAGS = "%s";' % ldstr) self.write_line('OTHER_REZFLAGS = "";') self.write_line('PRODUCT_NAME = %s;' % product_name) self.write_line('SECTORDER_FLAGS = "";') self.write_line('SYMROOT = "%s";' % symroot) self.write_line('USE_HEADERMAP = NO;') self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );') self.indent_level-=1 self.write_line('};') self.write_line('name = "%s";' % buildtype) self.indent_level-=1 self.write_line('};') self.ofile.write('/* End XCBuildConfiguration section */\n') def generate_xc_configurationList(self): self.ofile.write('\n/* Begin XCConfigurationList section */\n') self.write_line('%s /* Build configuration list for PBXProject "%s" */ = {' % (self.project_conflist, self.build.project_name)) self.indent_level+=1 self.write_line('isa = XCConfigurationList;') self.write_line('buildConfigurations = (') self.indent_level+=1 for buildtype in self.buildtypes: self.write_line('%s /* %s */,' % (self.project_configurations[buildtype], buildtype)) self.indent_level-=1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') self.write_line('defaultConfigurationName = debug;') self.indent_level-=1 self.write_line('};') # Now the all target self.write_line('%s /* Build configuration list for PBXAggregateTarget "ALL_BUILD" */ = {' % self.all_buildconf_id) self.indent_level+=1 self.write_line('isa = XCConfigurationList;') self.write_line('buildConfigurations = (') self.indent_level+=1 for buildtype in self.buildtypes: self.write_line('%s /* %s */,' % (self.buildall_configurations[buildtype], buildtype)) self.indent_level-=1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') self.write_line('defaultConfigurationName = debug;') self.indent_level-=1 self.write_line('};') # Test target self.write_line('%s /* Build configuration list for PBXAggregateTarget "ALL_BUILD" */ = {' % self.test_buildconf_id) self.indent_level+=1 self.write_line('isa = XCConfigurationList;') self.write_line('buildConfigurations = (') self.indent_level+=1 for buildtype in self.buildtypes: self.write_line('%s /* %s */,' % (self.test_configurations[buildtype], buildtype)) self.indent_level-=1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') self.write_line('defaultConfigurationName = debug;') self.indent_level-=1 self.write_line('};') for target_name in self.build.targets: listid = self.buildconflistmap[target_name] self.write_line('%s /* Build configuration list for PBXNativeTarget "%s" */ = {' % (listid, target_name)) self.indent_level += 1 self.write_line('isa = XCConfigurationList;') self.write_line('buildConfigurations = (') self.indent_level += 1 typestr = 'debug' idval = self.buildconfmap[target_name][typestr] self.write_line('%s /* %s */,' % (idval, typestr)) self.indent_level -= 1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') self.write_line('defaultConfigurationName = "%s";' % typestr) self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End XCConfigurationList section */\n') def generate_prefix(self): self.ofile.write('// !$*UTF8*$!\n{\n') self.indent_level += 1 self.write_line('archiveVersion = 1;\n') self.write_line('classes = {\n') self.write_line('};\n') self.write_line('objectVersion = 46;\n') self.write_line('objects = {\n') self.indent_level += 1 def generate_suffix(self): self.indent_level -= 1 self.write_line('};\n') self.write_line('rootObject = ' + self.project_uid + ';') self.indent_level -= 1 self.write_line('}\n')
apache-2.0
orion/swift-config
swift/container/sync.py
3
19488
# Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from gettext import gettext as _ from time import ctime, time from random import random, shuffle from struct import unpack_from from eventlet import sleep, Timeout import swift.common.db from swift.container import server as container_server from swiftclient import ClientException, delete_object, put_object, \ quote from swift.common.direct_client import direct_get_object from swift.common.ring import Ring from swift.common.db import ContainerBroker from swift.common.utils import audit_location_generator, get_logger, \ hash_path, config_true_value, validate_sync_to, whataremyips, FileLikeIter from swift.common.daemon import Daemon from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND class ContainerSync(Daemon): """ Daemon to sync syncable containers. This is done by scanning the local devices for container databases and checking for x-container-sync-to and x-container-sync-key metadata values. If they exist, newer rows since the last sync will trigger PUTs or DELETEs to the other container. .. note:: Container sync will sync object POSTs only if the proxy server is set to use "object_post_as_copy = true" which is the default. So-called fast object posts, "object_post_as_copy = false" do not update the container listings and therefore can't be detected for synchronization. The actual syncing is slightly more complicated to make use of the three (or number-of-replicas) main nodes for a container without each trying to do the exact same work but also without missing work if one node happens to be down. Two sync points are kept per container database. All rows between the two sync points trigger updates. Any rows newer than both sync points cause updates depending on the node's position for the container (primary nodes do one third, etc. depending on the replica count of course). After a sync run, the first sync point is set to the newest ROWID known and the second sync point is set to newest ROWID for which all updates have been sent. An example may help. Assume replica count is 3 and perfectly matching ROWIDs starting at 1. First sync run, database has 6 rows: * SyncPoint1 starts as -1. * SyncPoint2 starts as -1. * No rows between points, so no "all updates" rows. * Six rows newer than SyncPoint1, so a third of the rows are sent by node 1, another third by node 2, remaining third by node 3. * SyncPoint1 is set as 6 (the newest ROWID known). * SyncPoint2 is left as -1 since no "all updates" rows were synced. Next sync run, database has 12 rows: * SyncPoint1 starts as 6. * SyncPoint2 starts as -1. * The rows between -1 and 6 all trigger updates (most of which should short-circuit on the remote end as having already been done). * Six more rows newer than SyncPoint1, so a third of the rows are sent by node 1, another third by node 2, remaining third by node 3. * SyncPoint1 is set as 12 (the newest ROWID known). * SyncPoint2 is set as 6 (the newest "all updates" ROWID). In this way, under normal circumstances each node sends its share of updates each run and just sends a batch of older updates to ensure nothing was missed. :param conf: The dict of configuration values from the [container-sync] section of the container-server.conf :param container_ring: If None, the <swift_dir>/container.ring.gz will be loaded. This is overridden by unit tests. :param object_ring: If None, the <swift_dir>/object.ring.gz will be loaded. This is overridden by unit tests. """ def __init__(self, conf, container_ring=None, object_ring=None): #: The dict of configuration values from the [container-sync] section #: of the container-server.conf. self.conf = conf #: Logger to use for container-sync log lines. self.logger = get_logger(conf, log_route='container-sync') #: Path to the local device mount points. self.devices = conf.get('devices', '/srv/node') #: Indicates whether mount points should be verified as actual mount #: points (normally true, false for tests and SAIO). self.mount_check = config_true_value(conf.get('mount_check', 'true')) #: Minimum time between full scans. This is to keep the daemon from #: running wild on near empty systems. self.interval = int(conf.get('interval', 300)) #: Maximum amount of time to spend syncing a container before moving on #: to the next one. If a conatiner sync hasn't finished in this time, #: it'll just be resumed next scan. self.container_time = int(conf.get('container_time', 60)) #: The list of hosts we're allowed to send syncs to. self.allowed_sync_hosts = [ h.strip() for h in conf.get('allowed_sync_hosts', '127.0.0.1').split(',') if h.strip()] self.proxy = conf.get('sync_proxy') #: Number of containers with sync turned on that were successfully #: synced. self.container_syncs = 0 #: Number of successful DELETEs triggered. self.container_deletes = 0 #: Number of successful PUTs triggered. self.container_puts = 0 #: Number of containers that didn't have sync turned on. self.container_skips = 0 #: Number of containers that had a failure of some type. self.container_failures = 0 #: Time of last stats report. self.reported = time() swift_dir = conf.get('swift_dir', '/etc/swift') #: swift.common.ring.Ring for locating containers. self.container_ring = container_ring or Ring(swift_dir, ring_name='container') #: swift.common.ring.Ring for locating objects. self.object_ring = object_ring or Ring(swift_dir, ring_name='object') self._myips = whataremyips() self._myport = int(conf.get('bind_port', 6001)) swift.common.db.DB_PREALLOCATION = \ config_true_value(conf.get('db_preallocation', 'f')) def run_forever(self): """ Runs container sync scans until stopped. """ sleep(random() * self.interval) while True: begin = time() all_locs = audit_location_generator(self.devices, container_server.DATADIR, '.db', mount_check=self.mount_check, logger=self.logger) for path, device, partition in all_locs: self.container_sync(path) if time() - self.reported >= 3600: # once an hour self.report() elapsed = time() - begin if elapsed < self.interval: sleep(self.interval - elapsed) def run_once(self): """ Runs a single container sync scan. """ self.logger.info(_('Begin container sync "once" mode')) begin = time() all_locs = audit_location_generator(self.devices, container_server.DATADIR, '.db', mount_check=self.mount_check, logger=self.logger) for path, device, partition in all_locs: self.container_sync(path) if time() - self.reported >= 3600: # once an hour self.report() self.report() elapsed = time() - begin self.logger.info( _('Container sync "once" mode completed: %.02fs'), elapsed) def report(self): """ Writes a report of the stats to the logger and resets the stats for the next report. """ self.logger.info( _('Since %(time)s: %(sync)s synced [%(delete)s deletes, %(put)s ' 'puts], %(skip)s skipped, %(fail)s failed'), {'time': ctime(self.reported), 'sync': self.container_syncs, 'delete': self.container_deletes, 'put': self.container_puts, 'skip': self.container_skips, 'fail': self.container_failures}) self.reported = time() self.container_syncs = 0 self.container_deletes = 0 self.container_puts = 0 self.container_skips = 0 self.container_failures = 0 def container_sync(self, path): """ Checks the given path for a container database, determines if syncing is turned on for that database and, if so, sends any updates to the other container. :param path: the path to a container db """ broker = None try: broker = ContainerBroker(path) info = broker.get_info() x, nodes = self.container_ring.get_nodes(info['account'], info['container']) for ordinal, node in enumerate(nodes): if node['ip'] in self._myips and node['port'] == self._myport: break else: return if not broker.is_deleted(): sync_to = None sync_key = None sync_point1 = info['x_container_sync_point1'] sync_point2 = info['x_container_sync_point2'] for key, (value, timestamp) in broker.metadata.iteritems(): if key.lower() == 'x-container-sync-to': sync_to = value elif key.lower() == 'x-container-sync-key': sync_key = value if not sync_to or not sync_key: self.container_skips += 1 self.logger.increment('skips') return sync_to = sync_to.rstrip('/') err = validate_sync_to(sync_to, self.allowed_sync_hosts) if err: self.logger.info( _('ERROR %(db_file)s: %(validate_sync_to_err)s'), {'db_file': broker.db_file, 'validate_sync_to_err': err}) self.container_failures += 1 self.logger.increment('failures') return stop_at = time() + self.container_time next_sync_point = None while time() < stop_at and sync_point2 < sync_point1: rows = broker.get_items_since(sync_point2, 1) if not rows: break row = rows[0] if row['ROWID'] > sync_point1: break key = hash_path(info['account'], info['container'], row['name'], raw_digest=True) # This node will only initially sync out one third of the # objects (if 3 replicas, 1/4 if 4, etc.) and will skip # problematic rows as needed in case of faults. # This section will attempt to sync previously skipped # rows in case the previous attempts by any of the nodes # didn't succeed. if not self.container_sync_row(row, sync_to, sync_key, broker, info): if not next_sync_point: next_sync_point = sync_point2 sync_point2 = row['ROWID'] broker.set_x_container_sync_points(None, sync_point2) if next_sync_point: broker.set_x_container_sync_points(None, next_sync_point) while time() < stop_at: rows = broker.get_items_since(sync_point1, 1) if not rows: break row = rows[0] key = hash_path(info['account'], info['container'], row['name'], raw_digest=True) # This node will only initially sync out one third of the # objects (if 3 replicas, 1/4 if 4, etc.). It'll come back # around to the section above and attempt to sync # previously skipped rows in case the other nodes didn't # succeed or in case it failed to do so the first time. if unpack_from('>I', key)[0] % \ len(nodes) == ordinal: self.container_sync_row(row, sync_to, sync_key, broker, info) sync_point1 = row['ROWID'] broker.set_x_container_sync_points(sync_point1, None) self.container_syncs += 1 self.logger.increment('syncs') except (Exception, Timeout) as err: self.container_failures += 1 self.logger.increment('failures') self.logger.exception(_('ERROR Syncing %s'), broker.db_file if broker else path) def container_sync_row(self, row, sync_to, sync_key, broker, info): """ Sends the update the row indicates to the sync_to container. :param row: The updated row in the local database triggering the sync update. :param sync_to: The URL to the remote container. :param sync_key: The X-Container-Sync-Key to use when sending requests to the other container. :param broker: The local container database broker. :param info: The get_info result from the local container database broker. :returns: True on success """ try: start_time = time() if row['deleted']: try: delete_object(sync_to, name=row['name'], headers={'x-timestamp': row['created_at'], 'x-container-sync-key': sync_key}, proxy=self.proxy) except ClientException, err: if err.http_status != HTTP_NOT_FOUND: raise self.container_deletes += 1 self.logger.increment('deletes') self.logger.timing_since('deletes.timing', start_time) else: part, nodes = self.object_ring.get_nodes( info['account'], info['container'], row['name']) shuffle(nodes) exc = None looking_for_timestamp = float(row['created_at']) timestamp = -1 headers = body = None for node in nodes: try: these_headers, this_body = direct_get_object( node, part, info['account'], info['container'], row['name'], resp_chunk_size=65536) this_timestamp = float(these_headers['x-timestamp']) if this_timestamp > timestamp: timestamp = this_timestamp headers = these_headers body = this_body except ClientException, err: # If any errors are not 404, make sure we report the # non-404 one. We don't want to mistakenly assume the # object no longer exists just because one says so and # the others errored for some other reason. if not exc or exc.http_status == HTTP_NOT_FOUND: exc = err except (Exception, Timeout), err: exc = err if timestamp < looking_for_timestamp: if exc: raise exc raise Exception( _('Unknown exception trying to GET: %(node)r ' '%(account)r %(container)r %(object)r'), {'node': node, 'part': part, 'account': info['account'], 'container': info['container'], 'object': row['name']}) for key in ('date', 'last-modified'): if key in headers: del headers[key] if 'etag' in headers: headers['etag'] = headers['etag'].strip('"') headers['x-timestamp'] = row['created_at'] headers['x-container-sync-key'] = sync_key put_object(sync_to, name=row['name'], headers=headers, contents=FileLikeIter(body), proxy=self.proxy) self.container_puts += 1 self.logger.increment('puts') self.logger.timing_since('puts.timing', start_time) except ClientException, err: if err.http_status == HTTP_UNAUTHORIZED: self.logger.info( _('Unauth %(sync_from)r => %(sync_to)r'), {'sync_from': '%s/%s' % (quote(info['account']), quote(info['container'])), 'sync_to': sync_to}) elif err.http_status == HTTP_NOT_FOUND: self.logger.info( _('Not found %(sync_from)r => %(sync_to)r \ - object %(obj_name)r'), {'sync_from': '%s/%s' % (quote(info['account']), quote(info['container'])), 'sync_to': sync_to, 'obj_name': row['name']}) else: self.logger.exception( _('ERROR Syncing %(db_file)s %(row)s'), {'db_file': broker.db_file, 'row': row}) self.container_failures += 1 self.logger.increment('failures') return False except (Exception, Timeout), err: self.logger.exception( _('ERROR Syncing %(db_file)s %(row)s'), {'db_file': broker.db_file, 'row': row}) self.container_failures += 1 self.logger.increment('failures') return False return True
apache-2.0
arbrandes/edx-platform
openedx/core/lib/license/mixin.py
9
2209
""" License mixin for XBlocks and XModules """ from xblock.core import XBlockMixin from xblock.fields import Scope, String # Make '_' a no-op so we can scrape strings. Using lambda instead of # `django.utils.translation.ugettext_noop` because Django cannot be imported in this file _ = lambda text: text class LicenseMixin(XBlockMixin): """ Mixin that allows an author to indicate a license on the contents of an XBlock. For example, a video could be marked as Creative Commons SA-BY licensed. You can even indicate the license on an entire course. If this mixin is not applied to an XBlock, or if the license field is blank, then the content is subject to whatever legal licensing terms that apply to content by default. For example, in the United States, that content is exclusively owned by the creator of the content by default. Other countries may have similar laws. """ license = String( display_name=_("License"), help=_("A license defines how the contents of this block can be shared and reused."), default=None, scope=Scope.content, ) @classmethod def parse_license_from_xml(cls, definition, node): """ When importing an XBlock from XML, this method will parse the license information out of the XML and attach it to the block. It is defined here so that classes that use this mixin can simply refer to this method, rather than reimplementing it in their XML import functions. """ license = node.get('license', default=None) # pylint: disable=redefined-builtin if license: definition['license'] = license return definition def add_license_to_xml(self, node, default=None): """ When generating XML from an XBlock, this method will add the XBlock's license to the XML representation before it is serialized. It is defined here so that classes that use this mixin can simply refer to this method, rather than reimplementing it in their XML export functions. """ if getattr(self, "license", default): node.set('license', self.license)
agpl-3.0
kaiweifan/neutron
neutron/plugins/ml2/drivers/cisco/mech_cisco_nexus.py
9
8775
# Copyright 2013 OpenStack Foundation # All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ ML2 Mechanism Driver for Cisco Nexus platforms. """ from oslo.config import cfg from neutron.common import constants as n_const from neutron.extensions import portbindings from neutron.openstack.common import excutils from neutron.openstack.common import log as logging from neutron.plugins.ml2 import driver_api as api from neutron.plugins.ml2.drivers.cisco import config as conf from neutron.plugins.ml2.drivers.cisco import credentials_v2 as cred from neutron.plugins.ml2.drivers.cisco import exceptions as excep from neutron.plugins.ml2.drivers.cisco import nexus_db_v2 as nxos_db from neutron.plugins.ml2.drivers.cisco import nexus_network_driver LOG = logging.getLogger(__name__) class CiscoNexusMechanismDriver(api.MechanismDriver): """Cisco Nexus ML2 Mechanism Driver.""" def initialize(self): # Create ML2 device dictionary from ml2_conf.ini entries. conf.ML2MechCiscoConfig() # Extract configuration parameters from the configuration file. self._nexus_switches = conf.ML2MechCiscoConfig.nexus_dict LOG.debug(_("nexus_switches found = %s"), self._nexus_switches) self.credentials = {} self.driver = nexus_network_driver.CiscoNexusDriver() # Initialize credential store after database initialization cred.Store.initialize() def _valid_network_segment(self, segment): return (cfg.CONF.ml2_cisco.managed_physical_network is None or cfg.CONF.ml2_cisco.managed_physical_network == segment[api.PHYSICAL_NETWORK]) def _get_vlanid(self, context): segment = context.bound_segment if (segment and segment[api.NETWORK_TYPE] == 'vlan' and self._valid_network_segment(segment)): return context.bound_segment.get(api.SEGMENTATION_ID) def _is_deviceowner_compute(self, port): return port['device_owner'].startswith('compute') def _is_status_active(self, port): return port['status'] == n_const.PORT_STATUS_ACTIVE def _get_credential(self, nexus_ip): """Return credential information for a given Nexus IP address. If credential doesn't exist then also add to local dictionary. """ if nexus_ip not in self.credentials: _nexus_username = cred.Store.get_username(nexus_ip) _nexus_password = cred.Store.get_password(nexus_ip) self.credentials[nexus_ip] = { 'username': _nexus_username, 'password': _nexus_password } return self.credentials[nexus_ip] def _manage_port(self, vlan_name, vlan_id, host, instance): """Called during create and update port events. Create a VLAN in the appropriate switch/port and configure the appropriate interfaces for this VLAN. """ # Grab the switch IP and port for this host for switch_ip, attr in self._nexus_switches: if str(attr) == str(host): port_id = self._nexus_switches[switch_ip, attr] break else: raise excep.NexusComputeHostNotConfigured(host=host) # Check if this network is already in the DB vlan_created = False vlan_trunked = False try: nxos_db.get_port_vlan_switch_binding(port_id, vlan_id, switch_ip) except excep.NexusPortBindingNotFound: # Check for vlan/switch binding try: nxos_db.get_nexusvlan_binding(vlan_id, switch_ip) except excep.NexusPortBindingNotFound: # Create vlan and trunk vlan on the port LOG.debug(_("Nexus: create & trunk vlan %s"), vlan_name) self.driver.create_and_trunk_vlan(switch_ip, vlan_id, vlan_name, port_id) vlan_created = True vlan_trunked = True else: # Only trunk vlan on the port LOG.debug(_("Nexus: trunk vlan %s"), vlan_name) self.driver.enable_vlan_on_trunk_int(switch_ip, vlan_id, port_id) vlan_trunked = True try: nxos_db.add_nexusport_binding(port_id, str(vlan_id), switch_ip, instance) except Exception: with excutils.save_and_reraise_exception(): # Add binding failed, roll back any vlan creation/enabling if vlan_created and vlan_trunked: LOG.debug(_("Nexus: delete & untrunk vlan %s"), vlan_name) self.driver.delete_and_untrunk_vlan(switch_ip, vlan_id, port_id) elif vlan_created: LOG.debug(_("Nexus: delete vlan %s"), vlan_name) self.driver.delete_vlan(switch_ip, vlan_id) elif vlan_trunked: LOG.debug(_("Nexus: untrunk vlan %s"), vlan_name) self.driver.disable_vlan_on_trunk_int(switch_ip, vlan_id, port_id) def _invoke_nexus_on_port_event(self, context): vlan_id = self._get_vlanid(context) host_id = context.current.get(portbindings.HOST_ID) if vlan_id and host_id: vlan_name = cfg.CONF.ml2_cisco.vlan_name_prefix + str(vlan_id) instance_id = context.current.get('device_id') self._manage_port(vlan_name, vlan_id, host_id, instance_id) else: LOG.debug(_("Vlan ID %(vlan_id)s or Host ID %(host_id)s missing."), {'vlan_id': vlan_id, 'host_id': host_id}) def update_port_postcommit(self, context): """Update port post-database commit event.""" port = context.current if self._is_deviceowner_compute(port) and self._is_status_active(port): self._invoke_nexus_on_port_event(context) def delete_port_precommit(self, context): """Delete port pre-database commit event. Delete port bindings from the database and scan whether the network is still required on the interfaces trunked. """ if not self._is_deviceowner_compute(context.current): return port = context.current device_id = port['device_id'] vlan_id = self._get_vlanid(context) if not vlan_id or not device_id: return # Delete DB row for this port try: row = nxos_db.get_nexusvm_binding(vlan_id, device_id) except excep.NexusPortBindingNotFound: return switch_ip = row.switch_ip nexus_port = None if row.port_id != 'router': nexus_port = row.port_id nxos_db.remove_nexusport_binding(row.port_id, row.vlan_id, row.switch_ip, row.instance_id) # Check for any other bindings with the same vlan_id and switch_ip try: nxos_db.get_nexusvlan_binding(row.vlan_id, row.switch_ip) except excep.NexusPortBindingNotFound: try: # Delete this vlan from this switch if nexus_port: self.driver.disable_vlan_on_trunk_int(switch_ip, row.vlan_id, nexus_port) self.driver.delete_vlan(switch_ip, row.vlan_id) except Exception: # The delete vlan operation on the Nexus failed, # so this delete_port request has failed. For # consistency, roll back the Nexus database to what # it was before this request. with excutils.save_and_reraise_exception(): nxos_db.add_nexusport_binding(row.port_id, row.vlan_id, row.switch_ip, row.instance_id)
apache-2.0
dfalt974/SickRage
lib/subliminal/subtitle.py
8
9007
# -*- coding: utf-8 -*- import codecs import logging import os import chardet import pysrt from .score import get_equivalent_release_groups from .video import Episode, Movie from .utils import sanitize, sanitize_release_group logger = logging.getLogger(__name__) #: Subtitle extensions SUBTITLE_EXTENSIONS = ('.srt', '.sub', '.smi', '.txt', '.ssa', '.ass', '.mpl') class Subtitle(object): """Base class for subtitle. :param language: language of the subtitle. :type language: :class:`~babelfish.language.Language` :param bool hearing_impaired: whether or not the subtitle is hearing impaired. :param page_link: URL of the web page from which the subtitle can be downloaded. :type page_link: str :param encoding: Text encoding of the subtitle. :type encoding: str """ #: Name of the provider that returns that class of subtitle provider_name = '' def __init__(self, language, hearing_impaired=False, page_link=None, encoding=None): #: Language of the subtitle self.language = language #: Whether or not the subtitle is hearing impaired self.hearing_impaired = hearing_impaired #: URL of the web page from which the subtitle can be downloaded self.page_link = page_link #: Content as bytes self.content = None #: Encoding to decode with when accessing :attr:`text` self.encoding = None # validate the encoding if encoding: try: self.encoding = codecs.lookup(encoding).name except (TypeError, LookupError): logger.debug('Unsupported encoding %s', encoding) @property def id(self): """Unique identifier of the subtitle""" raise NotImplementedError @property def text(self): """Content as string If :attr:`encoding` is None, the encoding is guessed with :meth:`guess_encoding` """ if not self.content: return if self.encoding: return self.content.decode(self.encoding, errors='replace') return self.content.decode(self.guess_encoding(), errors='replace') def is_valid(self): """Check if a :attr:`text` is a valid SubRip format. :return: whether or not the subtitle is valid. :rtype: bool """ if not self.text: return False try: pysrt.from_string(self.text, error_handling=pysrt.ERROR_RAISE) except pysrt.Error as e: if e.args[0] < 80: return False return True def guess_encoding(self): """Guess encoding using the language, falling back on chardet. :return: the guessed encoding. :rtype: str """ logger.info('Guessing encoding for language %s', self.language) # always try utf-8 first encodings = ['utf-8'] # add language-specific encodings if self.language.alpha3 == 'zho': encodings.extend(['gb18030', 'big5']) elif self.language.alpha3 == 'jpn': encodings.append('shift-jis') elif self.language.alpha3 == 'ara': encodings.append('windows-1256') elif self.language.alpha3 == 'heb': encodings.append('windows-1255') elif self.language.alpha3 == 'tur': encodings.extend(['iso-8859-9', 'windows-1254']) elif self.language.alpha3 == 'pol': # Eastern European Group 1 encodings.extend(['windows-1250']) elif self.language.alpha3 == 'bul': # Eastern European Group 2 encodings.extend(['windows-1251']) else: # Western European (windows-1252) encodings.append('latin-1') # try to decode logger.debug('Trying encodings %r', encodings) for encoding in encodings: try: self.content.decode(encoding) except UnicodeDecodeError: pass else: logger.info('Guessed encoding %s', encoding) return encoding logger.warning('Could not guess encoding from language') # fallback on chardet encoding = chardet.detect(self.content)['encoding'] logger.info('Chardet found encoding %s', encoding) return encoding def get_matches(self, video): """Get the matches against the `video`. :param video: the video to get the matches with. :type video: :class:`~subliminal.video.Video` :return: matches of the subtitle. :rtype: set """ raise NotImplementedError def __hash__(self): return hash(self.provider_name + '-' + self.id) def __repr__(self): return '<%s %r [%s]>' % (self.__class__.__name__, self.id, self.language) def get_subtitle_path(video_path, language=None, extension='.srt'): """Get the subtitle path using the `video_path` and `language`. :param str video_path: path to the video. :param language: language of the subtitle to put in the path. :type language: :class:`~babelfish.language.Language` :param str extension: extension of the subtitle. :return: path of the subtitle. :rtype: str """ subtitle_root = os.path.splitext(video_path)[0] if language: subtitle_root += '.' + str(language) return subtitle_root + extension def guess_matches(video, guess, partial=False): """Get matches between a `video` and a `guess`. If a guess is `partial`, the absence information won't be counted as a match. :param video: the video. :type video: :class:`~subliminal.video.Video` :param guess: the guess. :type guess: dict :param bool partial: whether or not the guess is partial. :return: matches between the `video` and the `guess`. :rtype: set """ matches = set() if isinstance(video, Episode): # series if video.series and 'title' in guess and sanitize(guess['title']) == sanitize(video.series): matches.add('series') # title if video.title and 'episode_title' in guess and sanitize(guess['episode_title']) == sanitize(video.title): matches.add('title') # season if video.season and 'season' in guess and guess['season'] == video.season: matches.add('season') # episode # Currently we only have single-ep support (guessit returns a multi-ep as a list with int values) # Most providers only support single-ep, so make sure it contains only 1 episode # In case of multi-ep, take the lowest episode (subtitles will normally be available on lowest episode number) if video.episode and 'episode' in guess: episode_guess = guess['episode'] episode = min(episode_guess) if episode_guess and isinstance(episode_guess, list) else episode_guess if episode == video.episode: matches.add('episode') # year if video.year and 'year' in guess and guess['year'] == video.year: matches.add('year') # count "no year" as an information if not partial and video.original_series and 'year' not in guess: matches.add('year') elif isinstance(video, Movie): # year if video.year and 'year' in guess and guess['year'] == video.year: matches.add('year') # title if video.title and 'title' in guess and sanitize(guess['title']) == sanitize(video.title): matches.add('title') # release_group if (video.release_group and 'release_group' in guess and sanitize_release_group(guess['release_group']) in get_equivalent_release_groups(sanitize_release_group(video.release_group))): matches.add('release_group') # resolution if video.resolution and 'screen_size' in guess and guess['screen_size'] == video.resolution: matches.add('resolution') # format # Guessit may return a list for `format`, which indicates a conflict in the guessing. # We should match `format` only when it returns single value to avoid false `format` matches if video.format and guess.get('format') and not isinstance(guess['format'], list) \ and guess['format'].lower() == video.format.lower(): matches.add('format') # video_codec if video.video_codec and 'video_codec' in guess and guess['video_codec'] == video.video_codec: matches.add('video_codec') # audio_codec if video.audio_codec and 'audio_codec' in guess and guess['audio_codec'] == video.audio_codec: matches.add('audio_codec') return matches def fix_line_ending(content): """Fix line ending of `content` by changing it to \n. :param bytes content: content of the subtitle. :return: the content with fixed line endings. :rtype: bytes """ return content.replace(b'\r\n', b'\n').replace(b'\r', b'\n')
gpl-3.0
kovach/libsbp
python/sbp/utils.py
8
2405
#!/usr/bin/env python # Copyright (C) 2011-2014 Swift Navigation Inc. # Contact: Bhaskar Mookerji <mookerji@swiftnav.com> # # This source is subject to the license found in the file 'LICENSE' which must # be be distributed together with this source. All other rights reserved. # # THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, # EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE. """Shared utility functions. """ EXCLUDE = ['sender', 'msg_type', 'crc', 'length', 'preamble', 'payload'] from construct import Container, Field, OptionalGreedyRange, Rename, StringAdapter def exclude_fields(obj, exclude=EXCLUDE): """ Return dict of object without parent attrs. """ return dict([(k, getattr(obj, k)) for k in obj.__slots__ if k not in exclude]) def walk_json_dict(coll): """ Flatten a parsed SBP object into a dicts and lists, which are compatible for JSON output. Parameters ---------- coll : dict """ if isinstance(coll, dict): return dict((k, walk_json_dict(v)) for (k, v) in coll.iteritems()) elif hasattr(coll, '__iter__'): return [walk_json_dict(seq) for seq in coll] else: return coll def containerize(coll): """Walk attribute fields passed from an SBP message and convert to Containers where appropriate. Needed for Construct proper serialization. Parameters ---------- coll : dict """ if isinstance(coll, Container): [setattr(coll, k, containerize(v)) for (k, v) in coll.iteritems()] return coll elif isinstance(coll, dict): return containerize(Container(**coll)) elif isinstance(coll, list): for j, i in enumerate(coll): if isinstance(i, dict): coll[j] = containerize(Container(**i)) return coll else: return coll def fmt_repr(obj): """Print a orphaned string representation of an object without the clutter of its parent object. """ items = ["%s = %r" % (k, v) for k, v in exclude_fields(obj).items()] return "<%s: {%s}>" % (obj.__class__.__name__, ', '.join(items)) # TODO: Once https://github.com/construct/construct/pull/59 is published # to pypi, remove this in favor of construct.GreedyString def greedy_string(name): """ Variable-length string field. """ return Rename(name, StringAdapter(OptionalGreedyRange(Field(None, 1))))
lgpl-3.0
our-city-app/oca-backend
src/rogerthat/utils/generate_phone_number_tools_tables.py
1
2004
# -*- coding: utf-8 -*- # Copyright 2020 Green Valley Belgium NV # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # @@license_version:1.7@@ from phone_number_tools import info1, info2, info3 remove1 = [] remove2 = [] remove3 = [] result = [] for (cname3, ciso3, ccall3) in info3: found2 = [] for (mcc2, ciso2, cname2) in info2: if (ciso3 == ciso2): remove3.append((cname3, ciso3, ccall3)) remove2.append((mcc2, ciso2, cname2)) found2.append((mcc2, ciso2, cname2)) found1 = [] for (cname1, ccall1, cexit1, ctrunk1) in info1: if (ccall3 == ccall1): remove1.append((cname1, ccall1, cexit1, ctrunk1)) found1.append((cname1, ccall1, cexit1, ctrunk1)) if len(found1)>1: # something fishy print '=' * 10 + "INFO1" + "=" * 10 print found1 print (cname3, ciso3, ccall3) if len(found1)==0: # something fishy print 'aaaaargh - ' + ciso3 if len(found2)==0: # something fishy print '=' * 10 + "INFO2" + '=' * 10 print found2 print (cname3, ciso3, ccall3) # cname3, ciso, mcc, ccall, cexit, ctrunk result.append((cname3, ciso3, found2[0][0], ccall3, found1[0][2], found1[0][3])) print '-' * 10 print set(info1).difference(set(remove1)) print '-' * 10 print set(info2).difference(set(remove2)) print '-' * 10 print set(info3).difference(set(remove3)) print '-' * 10 for t in result: print unicode(t) + ','
apache-2.0
tashaxe/Red-DiscordBot
lib/requests/packages/chardet/langhungarianmodel.py
2763
12536
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # 255: Control characters that usually does not exist in any text # 254: Carriage/Return # 253: symbol (punctuation) that does not belong to word # 252: 0 - 9 # Character Mapping Table: Latin2_HungarianCharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, 253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, 159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, 175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, 191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, 221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, 232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, 245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, ) win1250HungarianCharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, 253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, 161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, 177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, 191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, 221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, 232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, 245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, ) # Model Table: # total sequences: 100% # first 512 sequences: 94.7368% # first 1024 sequences:5.2623% # rest sequences: 0.8894% # negative sequences: 0.0009% HungarianLangModel = ( 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, 3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, 3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, 3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, 0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, 3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, 0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, 3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, 3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, 3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, 1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, 1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, 1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, 3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, 2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, 2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, 2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, 2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, 2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, 3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, 2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, 2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, 2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, 1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, 1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, 3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, 1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, 1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, 2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, 2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, 2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, 3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, 2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, 1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, 1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, 2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, 2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, 1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, 1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, 2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, 1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, 1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, 2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, 2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, 2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, 1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, 1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, 1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, 0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, 2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, 2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, 1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, 2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, 1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, 1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, 2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, 2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, 2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, 1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, 2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, 0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, 1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, 0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, 1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, 0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, 2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, 0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, ) Latin2HungarianModel = { 'charToOrderMap': Latin2_HungarianCharToOrderMap, 'precedenceMatrix': HungarianLangModel, 'mTypicalPositiveRatio': 0.947368, 'keepEnglishLetter': True, 'charsetName': "ISO-8859-2" } Win1250HungarianModel = { 'charToOrderMap': win1250HungarianCharToOrderMap, 'precedenceMatrix': HungarianLangModel, 'mTypicalPositiveRatio': 0.947368, 'keepEnglishLetter': True, 'charsetName': "windows-1250" } # flake8: noqa
gpl-3.0
Ardesco/selenium
py/test/selenium/webdriver/firefox/ff_takes_full_page_screenshots_tests.py
11
1222
# Licensed to the Software Freedom Conservancy (SFC) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The SFC licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import base64 import imghdr def test_get_full_page_screenshot_as_base64(driver, pages): pages.load("simpleTest.html") result = base64.b64decode(driver.get_full_page_screenshot_as_base64()) assert imghdr.what('', result) == 'png' def test_get_full_page_screenshot_as_png(driver, pages): pages.load("simpleTest.html") result = driver.get_full_page_screenshot_as_png() assert imghdr.what('', result) == 'png'
apache-2.0
winndows/cinder
cinder/api/openstack/__init__.py
13
4900
# Copyright (c) 2013 OpenStack Foundation # # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ WSGI middleware for OpenStack API controllers. """ from oslo_log import log as logging import routes from cinder.api.openstack import wsgi from cinder.i18n import _, _LW from cinder.wsgi import common as base_wsgi LOG = logging.getLogger(__name__) class APIMapper(routes.Mapper): def routematch(self, url=None, environ=None): if url is "": result = self._match("", environ) return result[0], result[1] return routes.Mapper.routematch(self, url, environ) def connect(self, *args, **kwargs): # NOTE(inhye): Default the format part of a route to only accept json # and xml so it doesn't eat all characters after a '.' # in the url. kwargs.setdefault('requirements', {}) if not kwargs['requirements'].get('format'): kwargs['requirements']['format'] = 'json|xml' return routes.Mapper.connect(self, *args, **kwargs) class ProjectMapper(APIMapper): def resource(self, member_name, collection_name, **kwargs): if 'parent_resource' not in kwargs: kwargs['path_prefix'] = '{project_id}/' else: parent_resource = kwargs['parent_resource'] p_collection = parent_resource['collection_name'] p_member = parent_resource['member_name'] kwargs['path_prefix'] = '{project_id}/%s/:%s_id' % (p_collection, p_member) routes.Mapper.resource(self, member_name, collection_name, **kwargs) class APIRouter(base_wsgi.Router): """Routes requests on the API to the appropriate controller and method.""" ExtensionManager = None # override in subclasses @classmethod def factory(cls, global_config, **local_config): """Simple paste factory, :class:`cinder.wsgi.Router` doesn't have.""" return cls() def __init__(self, ext_mgr=None): if ext_mgr is None: if self.ExtensionManager: ext_mgr = self.ExtensionManager() else: raise Exception(_("Must specify an ExtensionManager class")) mapper = ProjectMapper() self.resources = {} self._setup_routes(mapper, ext_mgr) self._setup_ext_routes(mapper, ext_mgr) self._setup_extensions(ext_mgr) super(APIRouter, self).__init__(mapper) def _setup_ext_routes(self, mapper, ext_mgr): for resource in ext_mgr.get_resources(): LOG.debug('Extended resource: %s', resource.collection) wsgi_resource = wsgi.Resource(resource.controller) self.resources[resource.collection] = wsgi_resource kargs = dict( controller=wsgi_resource, collection=resource.collection_actions, member=resource.member_actions) if resource.parent: kargs['parent_resource'] = resource.parent mapper.resource(resource.collection, resource.collection, **kargs) if resource.custom_routes_fn: resource.custom_routes_fn(mapper, wsgi_resource) def _setup_extensions(self, ext_mgr): for extension in ext_mgr.get_controller_extensions(): collection = extension.collection controller = extension.controller if collection not in self.resources: LOG.warning(_LW('Extension %(ext_name)s: Cannot extend ' 'resource %(collection)s: No such resource'), {'ext_name': extension.extension.name, 'collection': collection}) continue LOG.debug('Extension %(ext_name)s extending resource: ' '%(collection)s', {'ext_name': extension.extension.name, 'collection': collection}) resource = self.resources[collection] resource.register_actions(controller) resource.register_extensions(controller) def _setup_routes(self, mapper, ext_mgr): raise NotImplementedError
apache-2.0
dgarros/ansible
lib/ansible/plugins/strategy/__init__.py
6
43367
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import threading import time from collections import deque from multiprocessing import Lock from jinja2.exceptions import UndefinedError from ansible import constants as C from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable from ansible.executor import action_write_locks from ansible.executor.process.worker import WorkerProcess from ansible.executor.task_result import TaskResult from ansible.inventory.host import Host from ansible.module_utils.six.moves import queue as Queue from ansible.module_utils.six import iteritems, string_types from ansible.module_utils._text import to_text from ansible.playbook.helpers import load_list_of_blocks from ansible.playbook.included_file import IncludedFile from ansible.playbook.task_include import TaskInclude from ansible.playbook.role_include import IncludeRole from ansible.plugins import action_loader, connection_loader, filter_loader, lookup_loader, module_loader, test_loader from ansible.template import Templar from ansible.utils.vars import combine_vars from ansible.vars.manager import strip_internal_keys try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() __all__ = ['StrategyBase'] class StrategySentinel: pass # TODO: this should probably be in the plugins/__init__.py, with # a smarter mechanism to set all of the attributes based on # the loaders created there class SharedPluginLoaderObj: ''' A simple object to make pass the various plugin loaders to the forked processes over the queue easier ''' def __init__(self): self.action_loader = action_loader self.connection_loader = connection_loader self.filter_loader = filter_loader self.test_loader = test_loader self.lookup_loader = lookup_loader self.module_loader = module_loader _sentinel = StrategySentinel() def results_thread_main(strategy): while True: try: result = strategy._final_q.get() if isinstance(result, StrategySentinel): break else: strategy._results_lock.acquire() strategy._results.append(result) strategy._results_lock.release() except (IOError, EOFError): break except Queue.Empty: pass class StrategyBase: ''' This is the base class for strategy plugins, which contains some common code useful to all strategies like running handlers, cleanup actions, etc. ''' def __init__(self, tqm): self._tqm = tqm self._inventory = tqm.get_inventory() self._workers = tqm.get_workers() self._notified_handlers = tqm._notified_handlers self._listening_handlers = tqm._listening_handlers self._variable_manager = tqm.get_variable_manager() self._loader = tqm.get_loader() self._final_q = tqm._final_q self._step = getattr(tqm._options, 'step', False) self._diff = getattr(tqm._options, 'diff', False) # Backwards compat: self._display isn't really needed, just import the global display and use that. self._display = display # internal counters self._pending_results = 0 self._cur_worker = 0 # this dictionary is used to keep track of hosts that have # outstanding tasks still in queue self._blocked_hosts = dict() self._results = deque() self._results_lock = threading.Condition(threading.Lock()) # create the result processing thread for reading results in the background self._results_thread = threading.Thread(target=results_thread_main, args=(self,)) self._results_thread.daemon = True self._results_thread.start() def cleanup(self): self._final_q.put(_sentinel) self._results_thread.join() def run(self, iterator, play_context, result=0): # execute one more pass through the iterator without peeking, to # make sure that all of the hosts are advanced to their final task. # This should be safe, as everything should be ITERATING_COMPLETE by # this point, though the strategy may not advance the hosts itself. [iterator.get_next_task_for_host(host) for host in self._inventory.get_hosts(iterator._play.hosts) if host.name not in self._tqm._unreachable_hosts] # save the failed/unreachable hosts, as the run_handlers() # method will clear that information during its execution failed_hosts = iterator.get_failed_hosts() unreachable_hosts = self._tqm._unreachable_hosts.keys() display.debug("running handlers") handler_result = self.run_handlers(iterator, play_context) if isinstance(handler_result, bool) and not handler_result: result |= self._tqm.RUN_ERROR elif not handler_result: result |= handler_result # now update with the hosts (if any) that failed or were # unreachable during the handler execution phase failed_hosts = set(failed_hosts).union(iterator.get_failed_hosts()) unreachable_hosts = set(unreachable_hosts).union(self._tqm._unreachable_hosts.keys()) # return the appropriate code, depending on the status hosts after the run if not isinstance(result, bool) and result != self._tqm.RUN_OK: return result elif len(unreachable_hosts) > 0: return self._tqm.RUN_UNREACHABLE_HOSTS elif len(failed_hosts) > 0: return self._tqm.RUN_FAILED_HOSTS else: return self._tqm.RUN_OK def get_hosts_remaining(self, play): return [host for host in self._inventory.get_hosts(play.hosts) if host.name not in self._tqm._failed_hosts and host.name not in self._tqm._unreachable_hosts] def get_failed_hosts(self, play): return [host for host in self._inventory.get_hosts(play.hosts) if host.name in self._tqm._failed_hosts] def add_tqm_variables(self, vars, play): ''' Base class method to add extra variables/information to the list of task vars sent through the executor engine regarding the task queue manager state. ''' vars['ansible_current_hosts'] = [h.name for h in self.get_hosts_remaining(play)] vars['ansible_failed_hosts'] = [h.name for h in self.get_failed_hosts(play)] def _queue_task(self, host, task, task_vars, play_context): ''' handles queueing the task up to be sent to a worker ''' display.debug("entering _queue_task() for %s/%s" % (host.name, task.action)) # Add a write lock for tasks. # Maybe this should be added somewhere further up the call stack but # this is the earliest in the code where we have task (1) extracted # into its own variable and (2) there's only a single code path # leading to the module being run. This is called by three # functions: __init__.py::_do_handler_run(), linear.py::run(), and # free.py::run() so we'd have to add to all three to do it there. # The next common higher level is __init__.py::run() and that has # tasks inside of play_iterator so we'd have to extract them to do it # there. if task.action not in action_write_locks.action_write_locks: display.debug('Creating lock for %s' % task.action) action_write_locks.action_write_locks[task.action] = Lock() # and then queue the new task try: # create a dummy object with plugin loaders set as an easier # way to share them with the forked processes shared_loader_obj = SharedPluginLoaderObj() queued = False starting_worker = self._cur_worker while True: (worker_prc, rslt_q) = self._workers[self._cur_worker] if worker_prc is None or not worker_prc.is_alive(): worker_prc = WorkerProcess(self._final_q, task_vars, host, task, play_context, self._loader, self._variable_manager, shared_loader_obj) self._workers[self._cur_worker][0] = worker_prc worker_prc.start() display.debug("worker is %d (out of %d available)" % (self._cur_worker + 1, len(self._workers))) queued = True self._cur_worker += 1 if self._cur_worker >= len(self._workers): self._cur_worker = 0 if queued: break elif self._cur_worker == starting_worker: time.sleep(0.0001) self._pending_results += 1 except (EOFError, IOError, AssertionError) as e: # most likely an abort display.debug("got an error while queuing: %s" % e) return display.debug("exiting _queue_task() for %s/%s" % (host.name, task.action)) def get_task_hosts(self, iterator, task_host, task): if task.run_once: host_list = [host for host in self._inventory.get_hosts(iterator._play.hosts) if host.name not in self._tqm._unreachable_hosts] else: host_list = [task_host] return host_list def get_delegated_hosts(self, result, task): host_name = result.get('_ansible_delegated_vars', {}).get('ansible_host', None) if host_name is not None: actual_host = self._inventory.get_host(host_name) if actual_host is None: actual_host = Host(name=host_name) else: actual_host = Host(name=task.delegate_to) return [actual_host] def _process_pending_results(self, iterator, one_pass=False, max_passes=None): ''' Reads results off the final queue and takes appropriate action based on the result (executing callbacks, updating state, etc.). ''' ret_results = [] def get_original_host(host_name): # FIXME: this should not need x2 _inventory host_name = to_text(host_name) if host_name in self._inventory.hosts: return self._inventory.hosts[host_name] else: return self._inventory.get_host(host_name) def search_handler_blocks_by_name(handler_name, handler_blocks): for handler_block in handler_blocks: for handler_task in handler_block.block: if handler_task.name: handler_vars = self._variable_manager.get_vars(play=iterator._play, task=handler_task) templar = Templar(loader=self._loader, variables=handler_vars) try: # first we check with the full result of get_name(), which may # include the role name (if the handler is from a role). If that # is not found, we resort to the simple name field, which doesn't # have anything extra added to it. target_handler_name = templar.template(handler_task.name) if target_handler_name == handler_name: return handler_task else: target_handler_name = templar.template(handler_task.get_name()) if target_handler_name == handler_name: return handler_task except (UndefinedError, AnsibleUndefinedVariable): # We skip this handler due to the fact that it may be using # a variable in the name that was conditionally included via # set_fact or some other method, and we don't want to error # out unnecessarily continue return None def search_handler_blocks_by_uuid(handler_uuid, handler_blocks): for handler_block in handler_blocks: for handler_task in handler_block.block: if handler_uuid == handler_task._uuid: return handler_task return None def parent_handler_match(target_handler, handler_name): if target_handler: if isinstance(target_handler, (TaskInclude, IncludeRole)): try: handler_vars = self._variable_manager.get_vars(play=iterator._play, task=target_handler) templar = Templar(loader=self._loader, variables=handler_vars) target_handler_name = templar.template(target_handler.name) if target_handler_name == handler_name: return True else: target_handler_name = templar.template(target_handler.get_name()) if target_handler_name == handler_name: return True except (UndefinedError, AnsibleUndefinedVariable): pass return parent_handler_match(target_handler._parent, handler_name) else: return False cur_pass = 0 while True: try: self._results_lock.acquire() task_result = self._results.pop() except IndexError: break finally: self._results_lock.release() # get the original host and task. We then assign them to the TaskResult for use in callbacks/etc. original_host = get_original_host(task_result._host) found_task = iterator.get_original_task(original_host, task_result._task) original_task = found_task.copy(exclude_parent=True, exclude_tasks=True) original_task._parent = found_task._parent original_task.from_attrs(task_result._task_fields) task_result._host = original_host task_result._task = original_task # get the correct loop var for use later if original_task.loop_control: loop_var = original_task.loop_control.loop_var or 'item' else: loop_var = 'item' # send callbacks for 'non final' results if '_ansible_retry' in task_result._result: self._tqm.send_callback('v2_runner_retry', task_result) continue elif '_ansible_item_result' in task_result._result: if task_result.is_failed() or task_result.is_unreachable(): self._tqm.send_callback('v2_runner_item_on_failed', task_result) elif task_result.is_skipped(): self._tqm.send_callback('v2_runner_item_on_skipped', task_result) else: if 'diff' in task_result._result: if self._diff: self._tqm.send_callback('v2_on_file_diff', task_result) self._tqm.send_callback('v2_runner_item_on_ok', task_result) continue if original_task.register: host_list = self.get_task_hosts(iterator, original_host, original_task) clean_copy = strip_internal_keys(task_result._result) if 'invocation' in clean_copy: del clean_copy['invocation'] for target_host in host_list: self._variable_manager.set_nonpersistent_facts(target_host, {original_task.register: clean_copy}) # all host status messages contain 2 entries: (msg, task_result) role_ran = False if task_result.is_failed(): role_ran = True ignore_errors = original_task.ignore_errors if not ignore_errors: display.debug("marking %s as failed" % original_host.name) if original_task.run_once: # if we're using run_once, we have to fail every host here for h in self._inventory.get_hosts(iterator._play.hosts): if h.name not in self._tqm._unreachable_hosts: state, _ = iterator.get_next_task_for_host(h, peek=True) iterator.mark_host_failed(h) state, new_task = iterator.get_next_task_for_host(h, peek=True) else: iterator.mark_host_failed(original_host) # increment the failed count for this host self._tqm._stats.increment('failures', original_host.name) # grab the current state and if we're iterating on the rescue portion # of a block then we save the failed task in a special var for use # within the rescue/always state, _ = iterator.get_next_task_for_host(original_host, peek=True) if iterator.is_failed(original_host) and state and state.run_state == iterator.ITERATING_COMPLETE: self._tqm._failed_hosts[original_host.name] = True if state and state.run_state == iterator.ITERATING_RESCUE: self._variable_manager.set_nonpersistent_facts( original_host, dict( ansible_failed_task=original_task.serialize(), ansible_failed_result=task_result._result, ), ) else: self._tqm._stats.increment('ok', original_host.name) if 'changed' in task_result._result and task_result._result['changed']: self._tqm._stats.increment('changed', original_host.name) self._tqm.send_callback('v2_runner_on_failed', task_result, ignore_errors=ignore_errors) elif task_result.is_unreachable(): self._tqm._unreachable_hosts[original_host.name] = True iterator._play._removed_hosts.append(original_host.name) self._tqm._stats.increment('dark', original_host.name) self._tqm.send_callback('v2_runner_on_unreachable', task_result) elif task_result.is_skipped(): self._tqm._stats.increment('skipped', original_host.name) self._tqm.send_callback('v2_runner_on_skipped', task_result) else: role_ran = True if original_task.loop: # this task had a loop, and has more than one result, so # loop over all of them instead of a single result result_items = task_result._result.get('results', []) else: result_items = [task_result._result] for result_item in result_items: if '_ansible_notify' in result_item: if task_result.is_changed(): # The shared dictionary for notified handlers is a proxy, which # does not detect when sub-objects within the proxy are modified. # So, per the docs, we reassign the list so the proxy picks up and # notifies all other threads for handler_name in result_item['_ansible_notify']: found = False # Find the handler using the above helper. First we look up the # dependency chain of the current task (if it's from a role), otherwise # we just look through the list of handlers in the current play/all # roles and use the first one that matches the notify name target_handler = search_handler_blocks_by_name(handler_name, iterator._play.handlers) if target_handler is not None: found = True if original_host not in self._notified_handlers[target_handler._uuid]: self._notified_handlers[target_handler._uuid].append(original_host) # FIXME: should this be a callback? display.vv("NOTIFIED HANDLER %s" % (handler_name,)) else: # As there may be more than one handler with the notified name as the # parent, so we just keep track of whether or not we found one at all for target_handler_uuid in self._notified_handlers: target_handler = search_handler_blocks_by_uuid(target_handler_uuid, iterator._play.handlers) if target_handler and parent_handler_match(target_handler, handler_name): found = True if original_host not in self._notified_handlers[target_handler._uuid]: self._notified_handlers[target_handler._uuid].append(original_host) display.vv("NOTIFIED HANDLER %s" % (target_handler.get_name(),)) if handler_name in self._listening_handlers: for listening_handler_uuid in self._listening_handlers[handler_name]: listening_handler = search_handler_blocks_by_uuid(listening_handler_uuid, iterator._play.handlers) if listening_handler is not None: found = True else: continue if original_host not in self._notified_handlers[listening_handler._uuid]: self._notified_handlers[listening_handler._uuid].append(original_host) display.vv("NOTIFIED HANDLER %s" % (listening_handler.get_name(),)) # and if none were found, then we raise an error if not found: msg = ("The requested handler '%s' was not found in either the main handlers list nor in the listening " "handlers list" % handler_name) if C.ERROR_ON_MISSING_HANDLER: raise AnsibleError(msg) else: display.warning(msg) if 'add_host' in result_item: # this task added a new host (add_host module) new_host_info = result_item.get('add_host', dict()) self._add_host(new_host_info, iterator) elif 'add_group' in result_item: # this task added a new group (group_by module) self._add_group(original_host, result_item) if 'ansible_facts' in result_item: # if delegated fact and we are delegating facts, we need to change target host for them if original_task.delegate_to is not None and original_task.delegate_facts: host_list = self.get_delegated_hosts(result_item, original_task) else: host_list = self.get_task_hosts(iterator, original_host, original_task) if original_task.action == 'include_vars': for (var_name, var_value) in iteritems(result_item['ansible_facts']): # find the host we're actually referring too here, which may # be a host that is not really in inventory at all for target_host in host_list: self._variable_manager.set_host_variable(target_host, var_name, var_value) else: for target_host in host_list: if original_task.action == 'set_fact': self._variable_manager.set_nonpersistent_facts(target_host, result_item['ansible_facts'].copy()) else: self._variable_manager.set_host_facts(target_host, result_item['ansible_facts'].copy()) if 'ansible_stats' in result_item and 'data' in result_item['ansible_stats'] and result_item['ansible_stats']['data']: if 'per_host' not in result_item['ansible_stats'] or result_item['ansible_stats']['per_host']: host_list = self.get_task_hosts(iterator, original_host, original_task) else: host_list = [None] data = result_item['ansible_stats']['data'] aggregate = 'aggregate' in result_item['ansible_stats'] and result_item['ansible_stats']['aggregate'] for myhost in host_list: for k in data.keys(): if aggregate: self._tqm._stats.update_custom_stats(k, data[k], myhost) else: self._tqm._stats.set_custom_stats(k, data[k], myhost) if 'diff' in task_result._result: if self._diff: self._tqm.send_callback('v2_on_file_diff', task_result) if not isinstance(original_task, TaskInclude): self._tqm._stats.increment('ok', original_host.name) if 'changed' in task_result._result and task_result._result['changed']: self._tqm._stats.increment('changed', original_host.name) # finally, send the ok for this task self._tqm.send_callback('v2_runner_on_ok', task_result) self._pending_results -= 1 if original_host.name in self._blocked_hosts: del self._blocked_hosts[original_host.name] # If this is a role task, mark the parent role as being run (if # the task was ok or failed, but not skipped or unreachable) if original_task._role is not None and role_ran: # TODO: and original_task.action != 'include_role':? # lookup the role in the ROLE_CACHE to make sure we're dealing # with the correct object and mark it as executed for (entry, role_obj) in iteritems(iterator._play.ROLE_CACHE[original_task._role._role_name]): if role_obj._uuid == original_task._role._uuid: role_obj._had_task_run[original_host.name] = True ret_results.append(task_result) if one_pass or max_passes is not None and (cur_pass + 1) >= max_passes: break cur_pass += 1 return ret_results def _wait_on_pending_results(self, iterator): ''' Wait for the shared counter to drop to zero, using a short sleep between checks to ensure we don't spin lock ''' ret_results = [] display.debug("waiting for pending results...") while self._pending_results > 0 and not self._tqm._terminated: if self._tqm.has_dead_workers(): raise AnsibleError("A worker was found in a dead state") results = self._process_pending_results(iterator) ret_results.extend(results) if self._pending_results > 0: time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL) display.debug("no more pending results, returning what we have") return ret_results def _add_host(self, host_info, iterator): ''' Helper function to add a new host to inventory based on a task result. ''' if host_info: host_name = host_info.get('host_name') # Check if host in inventory, add if not if host_name not in self._inventory.hosts: self._inventory.add_host(host_name, 'all') new_host = self._inventory.hosts.get(host_name) # Set/update the vars for this host new_host.vars = combine_vars(new_host.get_vars(), host_info.get('host_vars', dict())) new_groups = host_info.get('groups', []) for group_name in new_groups: if group_name not in self._inventory.groups: self._inventory.add_group(group_name) new_group = self._inventory.groups[group_name] new_group.add_host(self._inventory.hosts[host_name]) # clear pattern caching completely since it's unpredictable what patterns may have referenced the group self._inventory.clear_pattern_cache() # reconcile inventory, ensures inventory rules are followed self._inventory.reconcile_inventory() def _add_group(self, host, result_item): ''' Helper function to add a group (if it does not exist), and to assign the specified host to that group. ''' changed = False # the host here is from the executor side, which means it was a # serialized/cloned copy and we'll need to look up the proper # host object from the master inventory real_host = self._inventory.hosts[host.name] group_name = result_item.get('add_group') if group_name not in self._inventory.groups: # create the new group and add it to inventory self._inventory.add_group(group_name) changed = True group = self._inventory.groups[group_name] if real_host.name not in group.get_hosts(): group.add_host(real_host) changed = True if group_name not in host.get_groups(): real_host.add_group(group) changed = True if changed: self._inventory.clear_pattern_cache() self._inventory.reconcile_inventory() return changed def _load_included_file(self, included_file, iterator, is_handler=False): ''' Loads an included YAML file of tasks, applying the optional set of variables. ''' display.debug("loading included file: %s" % included_file._filename) try: data = self._loader.load_from_file(included_file._filename) if data is None: return [] elif not isinstance(data, list): raise AnsibleError("included task files must contain a list of tasks") ti_copy = included_file._task.copy() temp_vars = ti_copy.vars.copy() temp_vars.update(included_file._args) # pop tags out of the include args, if they were specified there, and assign # them to the include. If the include already had tags specified, we raise an # error so that users know not to specify them both ways tags = included_file._task.vars.pop('tags', []) if isinstance(tags, string_types): tags = tags.split(',') if len(tags) > 0: if len(included_file._task.tags) > 0: raise AnsibleParserError("Include tasks should not specify tags in more than one way (both via args and directly on the task). " "Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement", obj=included_file._task._ds) display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option") included_file._task.tags = tags ti_copy.vars = temp_vars block_list = load_list_of_blocks( data, play=iterator._play, parent_block=None, task_include=ti_copy, role=included_file._task._role, use_handlers=is_handler, loader=self._loader, variable_manager=self._variable_manager, ) # since we skip incrementing the stats when the task result is # first processed, we do so now for each host in the list for host in included_file._hosts: self._tqm._stats.increment('ok', host.name) except AnsibleError as e: # mark all of the hosts including this file as failed, send callbacks, # and increment the stats for this host for host in included_file._hosts: tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=to_text(e))) iterator.mark_host_failed(host) self._tqm._failed_hosts[host.name] = True self._tqm._stats.increment('failures', host.name) self._tqm.send_callback('v2_runner_on_failed', tr) return [] # finally, send the callback and return the list of blocks loaded self._tqm.send_callback('v2_playbook_on_include', included_file) display.debug("done processing included file") return block_list def run_handlers(self, iterator, play_context): ''' Runs handlers on those hosts which have been notified. ''' result = self._tqm.RUN_OK for handler_block in iterator._play.handlers: # FIXME: handlers need to support the rescue/always portions of blocks too, # but this may take some work in the iterator and gets tricky when # we consider the ability of meta tasks to flush handlers for handler in handler_block.block: if handler._uuid in self._notified_handlers and len(self._notified_handlers[handler._uuid]): result = self._do_handler_run(handler, handler.get_name(), iterator=iterator, play_context=play_context) if not result: break return result def _do_handler_run(self, handler, handler_name, iterator, play_context, notified_hosts=None): # FIXME: need to use iterator.get_failed_hosts() instead? # if not len(self.get_hosts_remaining(iterator._play)): # self._tqm.send_callback('v2_playbook_on_no_hosts_remaining') # result = False # break saved_name = handler.name handler.name = handler_name self._tqm.send_callback('v2_playbook_on_handler_task_start', handler) handler.name = saved_name if notified_hosts is None: notified_hosts = self._notified_handlers[handler._uuid] run_once = False try: action = action_loader.get(handler.action, class_only=True) if handler.run_once or getattr(action, 'BYPASS_HOST_LOOP', False): run_once = True except KeyError: # we don't care here, because the action may simply not have a # corresponding action plugin pass host_results = [] for host in notified_hosts: if not handler.has_triggered(host) and (not iterator.is_failed(host) or play_context.force_handlers): task_vars = self._variable_manager.get_vars(play=iterator._play, host=host, task=handler) self.add_tqm_variables(task_vars, play=iterator._play) self._queue_task(host, handler, task_vars, play_context) if run_once: break # collect the results from the handler run host_results = self._wait_on_pending_results(iterator) try: included_files = IncludedFile.process_include_results( host_results, self._tqm, iterator=iterator, inventory=self._inventory, loader=self._loader, variable_manager=self._variable_manager ) except AnsibleError as e: return False result = True if len(included_files) > 0: for included_file in included_files: try: new_blocks = self._load_included_file(included_file, iterator=iterator, is_handler=True) # for every task in each block brought in by the include, add the list # of hosts which included the file to the notified_handlers dict for block in new_blocks: iterator._play.handlers.append(block) iterator.cache_block_tasks(block) for task in block.block: result = self._do_handler_run( handler=task, handler_name=None, iterator=iterator, play_context=play_context, notified_hosts=included_file._hosts[:], ) if not result: break except AnsibleError as e: for host in included_file._hosts: iterator.mark_host_failed(host) self._tqm._failed_hosts[host.name] = True display.warning(str(e)) continue # wipe the notification list self._notified_handlers[handler._uuid] = [] display.debug("done running handlers, result is: %s" % result) return result def _take_step(self, task, host=None): ret = False msg = u'Perform task: %s ' % task if host: msg += u'on %s ' % host msg += u'(N)o/(y)es/(c)ontinue: ' resp = display.prompt(msg) if resp.lower() in ['y', 'yes']: display.debug("User ran task") ret = True elif resp.lower() in ['c', 'continue']: display.debug("User ran task and canceled step mode") self._step = False ret = True else: display.debug("User skipped task") display.banner(msg) return ret def _execute_meta(self, task, play_context, iterator, target_host): # meta tasks store their args in the _raw_params field of args, # since they do not use k=v pairs, so get that meta_action = task.args.get('_raw_params') # FIXME(s): # * raise an error or show a warning when a conditional is used # on a meta task that doesn't support them def _evaluate_conditional(h): all_vars = self._variable_manager.get_vars(play=iterator._play, host=h, task=task) templar = Templar(loader=self._loader, variables=all_vars) return task.evaluate_conditional(templar, all_vars) skipped = False msg = '' if meta_action == 'noop': # FIXME: issue a callback for the noop here? msg = "noop" elif meta_action == 'flush_handlers': self.run_handlers(iterator, play_context) msg = "ran handlers" elif meta_action == 'refresh_inventory': self._inventory.refresh_inventory() msg = "inventory successfully refreshed" elif meta_action == 'clear_facts': if _evaluate_conditional(target_host): for host in self._inventory.get_hosts(iterator._play.hosts): self._variable_manager.clear_facts(host) msg = "facts cleared" else: skipped = True elif meta_action == 'clear_host_errors': if _evaluate_conditional(target_host): for host in self._inventory.get_hosts(iterator._play.hosts): self._tqm._failed_hosts.pop(host.name, False) self._tqm._unreachable_hosts.pop(host.name, False) iterator._host_states[host.name].fail_state = iterator.FAILED_NONE msg = "cleared host errors" else: skipped = True elif meta_action == 'end_play': if _evaluate_conditional(target_host): for host in self._inventory.get_hosts(iterator._play.hosts): if host.name not in self._tqm._unreachable_hosts: iterator._host_states[host.name].run_state = iterator.ITERATING_COMPLETE msg = "ending play" elif meta_action == 'reset_connection': connection = connection_loader.get(play_context.connection, play_context, os.devnull) if connection: connection.reset() msg = 'reset connection' else: msg = 'no connection, nothing to reset' else: raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds) result = {'msg': msg} if skipped: result['skipped'] = True else: result['changed'] = False display.vv("META: %s" % msg) return [TaskResult(target_host, task, result)] def get_hosts_left(self, iterator): ''' returns list of available hosts for this iterator by filtering out unreachables ''' hosts_left = [] for host in self._inventory.get_hosts(iterator._play.hosts, order=iterator._play.order): if host.name not in self._tqm._unreachable_hosts: hosts_left.append(host) return hosts_left
gpl-3.0
sauloal/cufflinksviewer
venvwin/Lib/site-packages/werkzeug-0.8.3-py2.7.egg/werkzeug/contrib/kickstart.py
95
11336
# -*- coding: utf-8 -*- """ werkzeug.contrib.kickstart ~~~~~~~~~~~~~~~~~~~~~~~~~~ This module provides some simple shortcuts to make using Werkzeug simpler for small scripts. These improvements include predefined `Request` and `Response` objects as well as a predefined `Application` object which can be customized in child classes, of course. The `Request` and `Reponse` objects handle URL generation as well as sessions via `werkzeug.contrib.sessions` and are purely optional. There is also some integration of template engines. The template loaders are, of course, not neccessary to use the template engines in Werkzeug, but they provide a common interface. Currently supported template engines include Werkzeug's minitmpl and Genshi_. Support for other engines can be added in a trivial way. These loaders provide a template interface similar to the one used by Django_. .. _Genshi: http://genshi.edgewall.org/ .. _Django: http://www.djangoproject.com/ :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from os import path from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase from werkzeug.templates import Template from werkzeug.exceptions import HTTPException from werkzeug.routing import RequestRedirect __all__ = ['Request', 'Response', 'TemplateNotFound', 'TemplateLoader', 'GenshiTemplateLoader', 'Application'] from warnings import warn warn(DeprecationWarning('werkzeug.contrib.kickstart is deprecated and ' 'will be removed in Werkzeug 1.0')) class Request(RequestBase): """A handy subclass of the base request that adds a URL builder. It when supplied a session store, it is also able to handle sessions. """ def __init__(self, environ, url_map, session_store=None, cookie_name=None): # call the parent for initialization RequestBase.__init__(self, environ) # create an adapter self.url_adapter = url_map.bind_to_environ(environ) # create all stuff for sessions self.session_store = session_store self.cookie_name = cookie_name if session_store is not None and cookie_name is not None: if cookie_name in self.cookies: # get the session out of the storage self.session = session_store.get(self.cookies[cookie_name]) else: # create a new session self.session = session_store.new() def url_for(self, callback, **values): return self.url_adapter.build(callback, values) class Response(ResponseBase): """ A subclass of base response which sets the default mimetype to text/html. It the `Request` that came in is using Werkzeug sessions, this class takes care of saving that session. """ default_mimetype = 'text/html' def __call__(self, environ, start_response): # get the request object request = environ['werkzeug.request'] if request.session_store is not None: # save the session if neccessary request.session_store.save_if_modified(request.session) # set the cookie for the browser if it is not there: if request.cookie_name not in request.cookies: self.set_cookie(request.cookie_name, request.session.sid) # go on with normal response business return ResponseBase.__call__(self, environ, start_response) class Processor(object): """A request and response processor - it is what Django calls a middleware, but Werkzeug also includes straight-foward support for real WSGI middlewares, so another name was chosen. The code of this processor is derived from the example in the Werkzeug trac, called `Request and Response Processor <http://dev.pocoo.org/projects/werkzeug/wiki/RequestResponseProcessor>`_ """ def process_request(self, request): return request def process_response(self, request, response): return response def process_view(self, request, view_func, view_args, view_kwargs): """process_view() is called just before the Application calls the function specified by view_func. If this returns None, the Application processes the next Processor, and if it returns something else (like a Response instance), that will be returned without any further processing. """ return None def process_exception(self, request, exception): return None class Application(object): """A generic WSGI application which can be used to start with Werkzeug in an easy, straightforward way. """ def __init__(self, name, url_map, session=False, processors=None): # save the name and the URL-map, as it'll be needed later on self.name = name self.url_map = url_map # save the list of processors if supplied self.processors = processors or [] # create an instance of the storage if session: self.store = session else: self.store = None def __call__(self, environ, start_response): # create a request - with or without session support if self.store is not None: request = Request(environ, self.url_map, session_store=self.store, cookie_name='%s_sid' % self.name) else: request = Request(environ, self.url_map) # apply the request processors for processor in self.processors: request = processor.process_request(request) try: # find the callback to which the URL is mapped callback, args = request.url_adapter.match(request.path) except (HTTPException, RequestRedirect), e: response = e else: # check all view processors for processor in self.processors: action = processor.process_view(request, callback, (), args) if action is not None: # it is overriding the default behaviour, this is # short-circuiting the processing, so it returns here return action(environ, start_response) try: response = callback(request, **args) except Exception, exception: # the callback raised some exception, need to process that for processor in reversed(self.processors): # filter it through the exception processor action = processor.process_exception(request, exception) if action is not None: # the exception processor returned some action return action(environ, start_response) # still not handled by a exception processor, so re-raise raise # apply the response processors for processor in reversed(self.processors): response = processor.process_response(request, response) # return the completely processed response return response(environ, start_response) def config_session(self, store, expiration='session'): """ Configures the setting for cookies. You can also disable cookies by setting store to None. """ self.store = store # expiration=session is the default anyway # TODO: add settings to define the expiration date, the domain, the # path any maybe the secure parameter. class TemplateNotFound(IOError, LookupError): """ A template was not found by the template loader. """ def __init__(self, name): IOError.__init__(self, name) self.name = name class TemplateLoader(object): """ A simple loader interface for the werkzeug minitmpl template language. """ def __init__(self, search_path, encoding='utf-8'): self.search_path = path.abspath(search_path) self.encoding = encoding def get_template(self, name): """Get a template from a given name.""" filename = path.join(self.search_path, *[p for p in name.split('/') if p and p[0] != '.']) if not path.exists(filename): raise TemplateNotFound(name) return Template.from_file(filename, self.encoding) def render_to_response(self, *args, **kwargs): """Load and render a template into a response object.""" return Response(self.render_to_string(*args, **kwargs)) def render_to_string(self, *args, **kwargs): """Load and render a template into a unicode string.""" try: template_name, args = args[0], args[1:] except IndexError: raise TypeError('name of template required') return self.get_template(template_name).render(*args, **kwargs) class GenshiTemplateLoader(TemplateLoader): """A unified interface for loading Genshi templates. Actually a quite thin wrapper for Genshi's TemplateLoader. It sets some defaults that differ from the Genshi loader, most notably auto_reload is active. All imporant options can be passed through to Genshi. The default output type is 'html', but can be adjusted easily by changing the `output_type` attribute. """ def __init__(self, search_path, encoding='utf-8', **kwargs): TemplateLoader.__init__(self, search_path, encoding) # import Genshi here, because we don't want a general Genshi # dependency, only a local one from genshi.template import TemplateLoader as GenshiLoader from genshi.template.loader import TemplateNotFound self.not_found_exception = TemplateNotFound # set auto_reload to True per default reload_template = kwargs.pop('auto_reload', True) # get rid of default_encoding as this template loaders overwrites it # with the value of encoding kwargs.pop('default_encoding', None) # now, all arguments are clean, pass them on self.loader = GenshiLoader(search_path, default_encoding=encoding, auto_reload=reload_template, **kwargs) # the default output is HTML but can be overridden easily self.output_type = 'html' self.encoding = encoding def get_template(self, template_name): """Get the template which is at the given name""" try: return self.loader.load(template_name, encoding=self.encoding) except self.not_found_exception, e: # catch the exception raised by Genshi, convert it into a werkzeug # exception (for the sake of consistency) raise TemplateNotFound(template_name) def render_to_string(self, template_name, context=None): """Load and render a template into an unicode string""" # create an empty context if no context was specified context = context or {} tmpl = self.get_template(template_name) # render the template into a unicode string (None means unicode) return tmpl. \ generate(**context). \ render(self.output_type, encoding=None)
mit
clayz/crazy-quiz-web
lib/werkzeug/urls.py
298
31741
# -*- coding: utf-8 -*- """ werkzeug.urls ~~~~~~~~~~~~~ This module implements various URL related functions. :copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import re from werkzeug._compat import text_type, PY2, to_unicode, \ to_native, implements_to_string, try_coerce_native, \ normalize_string_tuple, make_literal_wrapper, \ fix_tuple_repr from werkzeug._internal import _encode_idna, _decode_idna from werkzeug.datastructures import MultiDict, iter_multi_items from collections import namedtuple # A regular expression for what a valid schema looks like _scheme_re = re.compile(r'^[a-zA-Z0-9+-.]+$') # Characters that are safe in any part of an URL. _always_safe = (b'abcdefghijklmnopqrstuvwxyz' b'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.-+') _hexdigits = '0123456789ABCDEFabcdef' _hextobyte = dict( ((a + b).encode(), int(a + b, 16)) for a in _hexdigits for b in _hexdigits ) _URLTuple = fix_tuple_repr(namedtuple('_URLTuple', ['scheme', 'netloc', 'path', 'query', 'fragment'])) class _URLMixin(object): __slots__ = () def replace(self, **kwargs): """Return an URL with the same values, except for those parameters given new values by whichever keyword arguments are specified.""" return self._replace(**kwargs) @property def host(self): """The host part of the URL if available, otherwise `None`. The host is either the hostname or the IP address mentioned in the URL. It will not contain the port. """ return self._split_host()[0] @property def ascii_host(self): """Works exactly like :attr:`host` but will return a result that is restricted to ASCII. If it finds a netloc that is not ASCII it will attempt to idna decode it. This is useful for socket operations when the URL might include internationalized characters. """ rv = self.host if rv is not None and isinstance(rv, text_type): rv = _encode_idna(rv) return to_native(rv, 'ascii', 'ignore') @property def port(self): """The port in the URL as an integer if it was present, `None` otherwise. This does not fill in default ports. """ try: rv = int(to_native(self._split_host()[1])) if 0 <= rv <= 65535: return rv except (ValueError, TypeError): pass @property def auth(self): """The authentication part in the URL if available, `None` otherwise. """ return self._split_netloc()[0] @property def username(self): """The username if it was part of the URL, `None` otherwise. This undergoes URL decoding and will always be a unicode string. """ rv = self._split_auth()[0] if rv is not None: return _url_unquote_legacy(rv) @property def raw_username(self): """The username if it was part of the URL, `None` otherwise. Unlike :attr:`username` this one is not being decoded. """ return self._split_auth()[0] @property def password(self): """The password if it was part of the URL, `None` otherwise. This undergoes URL decoding and will always be a unicode string. """ rv = self._split_auth()[1] if rv is not None: return _url_unquote_legacy(rv) @property def raw_password(self): """The password if it was part of the URL, `None` otherwise. Unlike :attr:`password` this one is not being decoded. """ return self._split_auth()[1] def decode_query(self, *args, **kwargs): """Decodes the query part of the URL. Ths is a shortcut for calling :func:`url_decode` on the query argument. The arguments and keyword arguments are forwarded to :func:`url_decode` unchanged. """ return url_decode(self.query, *args, **kwargs) def join(self, *args, **kwargs): """Joins this URL with another one. This is just a convenience function for calling into :meth:`url_join` and then parsing the return value again. """ return url_parse(url_join(self, *args, **kwargs)) def to_url(self): """Returns a URL string or bytes depending on the type of the information stored. This is just a convenience function for calling :meth:`url_unparse` for this URL. """ return url_unparse(self) def decode_netloc(self): """Decodes the netloc part into a string.""" rv = _decode_idna(self.host or '') if ':' in rv: rv = '[%s]' % rv port = self.port if port is not None: rv = '%s:%d' % (rv, port) auth = ':'.join(filter(None, [ _url_unquote_legacy(self.raw_username or '', '/:%@'), _url_unquote_legacy(self.raw_password or '', '/:%@'), ])) if auth: rv = '%s@%s' % (auth, rv) return rv def to_uri_tuple(self): """Returns a :class:`BytesURL` tuple that holds a URI. This will encode all the information in the URL properly to ASCII using the rules a web browser would follow. It's usually more interesting to directly call :meth:`iri_to_uri` which will return a string. """ return url_parse(iri_to_uri(self).encode('ascii')) def to_iri_tuple(self): """Returns a :class:`URL` tuple that holds a IRI. This will try to decode as much information as possible in the URL without losing information similar to how a web browser does it for the URL bar. It's usually more interesting to directly call :meth:`uri_to_iri` which will return a string. """ return url_parse(uri_to_iri(self)) def _split_netloc(self): if self._at in self.netloc: return self.netloc.split(self._at, 1) return None, self.netloc def _split_auth(self): auth = self._split_netloc()[0] if not auth: return None, None if self._colon not in auth: return auth, None return auth.split(self._colon, 1) def _split_host(self): rv = self._split_netloc()[1] if not rv: return None, None if not rv.startswith(self._lbracket): if self._colon in rv: return rv.split(self._colon, 1) return rv, None idx = rv.find(self._rbracket) if idx < 0: return rv, None host = rv[1:idx] rest = rv[idx + 1:] if rest.startswith(self._colon): return host, rest[1:] return host, None @implements_to_string class URL(_URLTuple, _URLMixin): """Represents a parsed URL. This behaves like a regular tuple but also has some extra attributes that give further insight into the URL. """ __slots__ = () _at = '@' _colon = ':' _lbracket = '[' _rbracket = ']' def __str__(self): return self.to_url() def encode_netloc(self): """Encodes the netloc part to an ASCII safe URL as bytes.""" rv = self.ascii_host or '' if ':' in rv: rv = '[%s]' % rv port = self.port if port is not None: rv = '%s:%d' % (rv, port) auth = ':'.join(filter(None, [ url_quote(self.raw_username or '', 'utf-8', 'strict', '/:%'), url_quote(self.raw_password or '', 'utf-8', 'strict', '/:%'), ])) if auth: rv = '%s@%s' % (auth, rv) return rv.encode('ascii') def encode(self, charset='utf-8', errors='replace'): """Encodes the URL to a tuple made out of bytes. The charset is only being used for the path, query and fragment. """ return BytesURL( self.scheme.encode('ascii'), self.encode_netloc(), self.path.encode(charset, errors), self.query.encode(charset, errors), self.fragment.encode(charset, errors) ) class BytesURL(_URLTuple, _URLMixin): """Represents a parsed URL in bytes.""" __slots__ = () _at = b'@' _colon = b':' _lbracket = b'[' _rbracket = b']' def __str__(self): return self.to_url().decode('utf-8', 'replace') def encode_netloc(self): """Returns the netloc unchanged as bytes.""" return self.netloc def decode(self, charset='utf-8', errors='replace'): """Decodes the URL to a tuple made out of strings. The charset is only being used for the path, query and fragment. """ return URL( self.scheme.decode('ascii'), self.decode_netloc(), self.path.decode(charset, errors), self.query.decode(charset, errors), self.fragment.decode(charset, errors) ) def _unquote_to_bytes(string, unsafe=''): if isinstance(string, text_type): string = string.encode('utf-8') if isinstance(unsafe, text_type): unsafe = unsafe.encode('utf-8') unsafe = frozenset(bytearray(unsafe)) bits = iter(string.split(b'%')) result = bytearray(next(bits, b'')) for item in bits: try: char = _hextobyte[item[:2]] if char in unsafe: raise KeyError() result.append(char) result.extend(item[2:]) except KeyError: result.extend(b'%') result.extend(item) return bytes(result) def _url_encode_impl(obj, charset, encode_keys, sort, key): iterable = iter_multi_items(obj) if sort: iterable = sorted(iterable, key=key) for key, value in iterable: if value is None: continue if not isinstance(key, bytes): key = text_type(key).encode(charset) if not isinstance(value, bytes): value = text_type(value).encode(charset) yield url_quote(key) + '=' + url_quote_plus(value) def _url_unquote_legacy(value, unsafe=''): try: return url_unquote(value, charset='utf-8', errors='strict', unsafe=unsafe) except UnicodeError: return url_unquote(value, charset='latin1', unsafe=unsafe) def url_parse(url, scheme=None, allow_fragments=True): """Parses a URL from a string into a :class:`URL` tuple. If the URL is lacking a scheme it can be provided as second argument. Otherwise, it is ignored. Optionally fragments can be stripped from the URL by setting `allow_fragments` to `False`. The inverse of this function is :func:`url_unparse`. :param url: the URL to parse. :param scheme: the default schema to use if the URL is schemaless. :param allow_fragments: if set to `False` a fragment will be removed from the URL. """ s = make_literal_wrapper(url) is_text_based = isinstance(url, text_type) if scheme is None: scheme = s('') netloc = query = fragment = s('') i = url.find(s(':')) if i > 0 and _scheme_re.match(to_native(url[:i], errors='replace')): # make sure "iri" is not actually a port number (in which case # "scheme" is really part of the path) rest = url[i + 1:] if not rest or any(c not in s('0123456789') for c in rest): # not a port number scheme, url = url[:i].lower(), rest if url[:2] == s('//'): delim = len(url) for c in s('/?#'): wdelim = url.find(c, 2) if wdelim >= 0: delim = min(delim, wdelim) netloc, url = url[2:delim], url[delim:] if ((s('[') in netloc and s(']') not in netloc) or (s(']') in netloc and s('[') not in netloc)): raise ValueError('Invalid IPv6 URL') if allow_fragments and s('#') in url: url, fragment = url.split(s('#'), 1) if s('?') in url: url, query = url.split(s('?'), 1) result_type = is_text_based and URL or BytesURL return result_type(scheme, netloc, url, query, fragment) def url_quote(string, charset='utf-8', errors='strict', safe='/:', unsafe=''): """URL encode a single string with a given encoding. :param s: the string to quote. :param charset: the charset to be used. :param safe: an optional sequence of safe characters. :param unsafe: an optional sequence of unsafe characters. .. versionadded:: 0.9.2 The `unsafe` parameter was added. """ if not isinstance(string, (text_type, bytes, bytearray)): string = text_type(string) if isinstance(string, text_type): string = string.encode(charset, errors) if isinstance(safe, text_type): safe = safe.encode(charset, errors) if isinstance(unsafe, text_type): unsafe = unsafe.encode(charset, errors) safe = frozenset(bytearray(safe) + _always_safe) - frozenset(bytearray(unsafe)) rv = bytearray() for char in bytearray(string): if char in safe: rv.append(char) else: rv.extend(('%%%02X' % char).encode('ascii')) return to_native(bytes(rv)) def url_quote_plus(string, charset='utf-8', errors='strict', safe=''): """URL encode a single string with the given encoding and convert whitespace to "+". :param s: The string to quote. :param charset: The charset to be used. :param safe: An optional sequence of safe characters. """ return url_quote(string, charset, errors, safe + ' ', '+').replace(' ', '+') def url_unparse(components): """The reverse operation to :meth:`url_parse`. This accepts arbitrary as well as :class:`URL` tuples and returns a URL as a string. :param components: the parsed URL as tuple which should be converted into a URL string. """ scheme, netloc, path, query, fragment = \ normalize_string_tuple(components) s = make_literal_wrapper(scheme) url = s('') # We generally treat file:///x and file:/x the same which is also # what browsers seem to do. This also allows us to ignore a schema # register for netloc utilization or having to differenciate between # empty and missing netloc. if netloc or (scheme and path.startswith(s('/'))): if path and path[:1] != s('/'): path = s('/') + path url = s('//') + (netloc or s('')) + path elif path: url += path if scheme: url = scheme + s(':') + url if query: url = url + s('?') + query if fragment: url = url + s('#') + fragment return url def url_unquote(string, charset='utf-8', errors='replace', unsafe=''): """URL decode a single string with a given encoding. If the charset is set to `None` no unicode decoding is performed and raw bytes are returned. :param s: the string to unquote. :param charset: the charset of the query string. If set to `None` no unicode decoding will take place. :param errors: the error handling for the charset decoding. """ rv = _unquote_to_bytes(string, unsafe) if charset is not None: rv = rv.decode(charset, errors) return rv def url_unquote_plus(s, charset='utf-8', errors='replace'): """URL decode a single string with the given `charset` and decode "+" to whitespace. Per default encoding errors are ignored. If you want a different behavior you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a :exc:`HTTPUnicodeError` is raised. :param s: The string to unquote. :param charset: the charset of the query string. If set to `None` no unicode decoding will take place. :param errors: The error handling for the `charset` decoding. """ if isinstance(s, text_type): s = s.replace(u'+', u' ') else: s = s.replace(b'+', b' ') return url_unquote(s, charset, errors) def url_fix(s, charset='utf-8'): r"""Sometimes you get an URL by a user that just isn't a real URL because it contains unsafe characters like ' ' and so on. This function can fix some of the problems in a similar way browsers handle data entered by the user: >>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)') 'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)' :param s: the string with the URL to fix. :param charset: The target charset for the URL if the url was given as unicode string. """ scheme, netloc, path, qs, anchor = url_parse(to_unicode(s, charset, 'replace')) path = url_quote(path, charset, safe='/%+$!*\'(),') qs = url_quote_plus(qs, charset, safe=':&%=+$!*\'(),') return to_native(url_unparse((scheme, netloc, path, qs, anchor))) def uri_to_iri(uri, charset='utf-8', errors='replace'): r""" Converts a URI in a given charset to a IRI. Examples for URI versus IRI: >>> uri_to_iri(b'http://xn--n3h.net/') u'http://\u2603.net/' >>> uri_to_iri(b'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th') u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th' Query strings are left unchanged: >>> uri_to_iri('/?foo=24&x=%26%2f') u'/?foo=24&x=%26%2f' .. versionadded:: 0.6 :param uri: The URI to convert. :param charset: The charset of the URI. :param errors: The error handling on decode. """ if isinstance(uri, tuple): uri = url_unparse(uri) uri = url_parse(to_unicode(uri, charset)) path = url_unquote(uri.path, charset, errors, '%/;?') query = url_unquote(uri.query, charset, errors, '%;/?:@&=+,$') fragment = url_unquote(uri.fragment, charset, errors, '%;/?:@&=+,$') return url_unparse((uri.scheme, uri.decode_netloc(), path, query, fragment)) def iri_to_uri(iri, charset='utf-8', errors='strict'): r""" Converts any unicode based IRI to an acceptable ASCII URI. Werkzeug always uses utf-8 URLs internally because this is what browsers and HTTP do as well. In some places where it accepts an URL it also accepts a unicode IRI and converts it into a URI. Examples for IRI versus URI: >>> iri_to_uri(u'http://☃.net/') 'http://xn--n3h.net/' >>> iri_to_uri(u'http://üser:pässword@☃.net/påth') 'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th' .. versionadded:: 0.6 :param iri: The IRI to convert. :param charset: The charset for the URI. """ if isinstance(iri, tuple): iri = url_unparse(iri) iri = url_parse(to_unicode(iri, charset, errors)) netloc = iri.encode_netloc().decode('ascii') path = url_quote(iri.path, charset, errors, '/:~+%') query = url_quote(iri.query, charset, errors, '%&[]:;$*()+,!?*/=') fragment = url_quote(iri.fragment, charset, errors, '=%&[]:;$()+,!?*/') return to_native(url_unparse((iri.scheme, netloc, path, query, fragment))) def url_decode(s, charset='utf-8', decode_keys=False, include_empty=True, errors='replace', separator='&', cls=None): """ Parse a querystring and return it as :class:`MultiDict`. There is a difference in key decoding on different Python versions. On Python 3 keys will always be fully decoded whereas on Python 2, keys will remain bytestrings if they fit into ASCII. On 2.x keys can be forced to be unicode by setting `decode_keys` to `True`. If the charset is set to `None` no unicode decoding will happen and raw bytes will be returned. Per default a missing value for a key will default to an empty key. If you don't want that behavior you can set `include_empty` to `False`. Per default encoding errors are ignored. If you want a different behavior you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a `HTTPUnicodeError` is raised. .. versionchanged:: 0.5 In previous versions ";" and "&" could be used for url decoding. This changed in 0.5 where only "&" is supported. If you want to use ";" instead a different `separator` can be provided. The `cls` parameter was added. :param s: a string with the query string to decode. :param charset: the charset of the query string. If set to `None` no unicode decoding will take place. :param decode_keys: Used on Python 2.x to control whether keys should be forced to be unicode objects. If set to `True` then keys will be unicode in all cases. Otherwise, they remain `str` if they fit into ASCII. :param include_empty: Set to `False` if you don't want empty values to appear in the dict. :param errors: the decoding error behavior. :param separator: the pair separator to be used, defaults to ``&`` :param cls: an optional dict class to use. If this is not specified or `None` the default :class:`MultiDict` is used. """ if cls is None: cls = MultiDict if isinstance(s, text_type) and not isinstance(separator, text_type): separator = separator.decode(charset or 'ascii') elif isinstance(s, bytes) and not isinstance(separator, bytes): separator = separator.encode(charset or 'ascii') return cls(_url_decode_impl(s.split(separator), charset, decode_keys, include_empty, errors)) def url_decode_stream(stream, charset='utf-8', decode_keys=False, include_empty=True, errors='replace', separator='&', cls=None, limit=None, return_iterator=False): """Works like :func:`url_decode` but decodes a stream. The behavior of stream and limit follows functions like :func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is directly fed to the `cls` so you can consume the data while it's parsed. .. versionadded:: 0.8 :param stream: a stream with the encoded querystring :param charset: the charset of the query string. If set to `None` no unicode decoding will take place. :param decode_keys: Used on Python 2.x to control whether keys should be forced to be unicode objects. If set to `True`, keys will be unicode in all cases. Otherwise, they remain `str` if they fit into ASCII. :param include_empty: Set to `False` if you don't want empty values to appear in the dict. :param errors: the decoding error behavior. :param separator: the pair separator to be used, defaults to ``&`` :param cls: an optional dict class to use. If this is not specified or `None` the default :class:`MultiDict` is used. :param limit: the content length of the URL data. Not necessary if a limited stream is provided. :param return_iterator: if set to `True` the `cls` argument is ignored and an iterator over all decoded pairs is returned """ from werkzeug.wsgi import make_chunk_iter if return_iterator: cls = lambda x: x elif cls is None: cls = MultiDict pair_iter = make_chunk_iter(stream, separator, limit) return cls(_url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors)) def _url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors): for pair in pair_iter: if not pair: continue s = make_literal_wrapper(pair) equal = s('=') if equal in pair: key, value = pair.split(equal, 1) else: if not include_empty: continue key = pair value = s('') key = url_unquote_plus(key, charset, errors) if charset is not None and PY2 and not decode_keys: key = try_coerce_native(key) yield key, url_unquote_plus(value, charset, errors) def url_encode(obj, charset='utf-8', encode_keys=False, sort=False, key=None, separator=b'&'): """URL encode a dict/`MultiDict`. If a value is `None` it will not appear in the result string. Per default only values are encoded into the target charset strings. If `encode_keys` is set to ``True`` unicode keys are supported too. If `sort` is set to `True` the items are sorted by `key` or the default sorting algorithm. .. versionadded:: 0.5 `sort`, `key`, and `separator` were added. :param obj: the object to encode into a query string. :param charset: the charset of the query string. :param encode_keys: set to `True` if you have unicode keys. (Ignored on Python 3.x) :param sort: set to `True` if you want parameters to be sorted by `key`. :param separator: the separator to be used for the pairs. :param key: an optional function to be used for sorting. For more details check out the :func:`sorted` documentation. """ separator = to_native(separator, 'ascii') return separator.join(_url_encode_impl(obj, charset, encode_keys, sort, key)) def url_encode_stream(obj, stream=None, charset='utf-8', encode_keys=False, sort=False, key=None, separator=b'&'): """Like :meth:`url_encode` but writes the results to a stream object. If the stream is `None` a generator over all encoded pairs is returned. .. versionadded:: 0.8 :param obj: the object to encode into a query string. :param stream: a stream to write the encoded object into or `None` if an iterator over the encoded pairs should be returned. In that case the separator argument is ignored. :param charset: the charset of the query string. :param encode_keys: set to `True` if you have unicode keys. (Ignored on Python 3.x) :param sort: set to `True` if you want parameters to be sorted by `key`. :param separator: the separator to be used for the pairs. :param key: an optional function to be used for sorting. For more details check out the :func:`sorted` documentation. """ separator = to_native(separator, 'ascii') gen = _url_encode_impl(obj, charset, encode_keys, sort, key) if stream is None: return gen for idx, chunk in enumerate(gen): if idx: stream.write(separator) stream.write(chunk) def url_join(base, url, allow_fragments=True): """Join a base URL and a possibly relative URL to form an absolute interpretation of the latter. :param base: the base URL for the join operation. :param url: the URL to join. :param allow_fragments: indicates whether fragments should be allowed. """ if isinstance(base, tuple): base = url_unparse(base) if isinstance(url, tuple): url = url_unparse(url) base, url = normalize_string_tuple((base, url)) s = make_literal_wrapper(base) if not base: return url if not url: return base bscheme, bnetloc, bpath, bquery, bfragment = \ url_parse(base, allow_fragments=allow_fragments) scheme, netloc, path, query, fragment = \ url_parse(url, bscheme, allow_fragments) if scheme != bscheme: return url if netloc: return url_unparse((scheme, netloc, path, query, fragment)) netloc = bnetloc if path[:1] == s('/'): segments = path.split(s('/')) elif not path: segments = bpath.split(s('/')) if not query: query = bquery else: segments = bpath.split(s('/'))[:-1] + path.split(s('/')) # If the rightmost part is "./" we want to keep the slash but # remove the dot. if segments[-1] == s('.'): segments[-1] = s('') # Resolve ".." and "." segments = [segment for segment in segments if segment != s('.')] while 1: i = 1 n = len(segments) - 1 while i < n: if segments[i] == s('..') and \ segments[i - 1] not in (s(''), s('..')): del segments[i - 1:i + 1] break i += 1 else: break # Remove trailing ".." if the URL is absolute unwanted_marker = [s(''), s('..')] while segments[:2] == unwanted_marker: del segments[1] path = s('/').join(segments) return url_unparse((scheme, netloc, path, query, fragment)) class Href(object): """Implements a callable that constructs URLs with the given base. The function can be called with any number of positional and keyword arguments which than are used to assemble the URL. Works with URLs and posix paths. Positional arguments are appended as individual segments to the path of the URL: >>> href = Href('/foo') >>> href('bar', 23) '/foo/bar/23' >>> href('foo', bar=23) '/foo/foo?bar=23' If any of the arguments (positional or keyword) evaluates to `None` it will be skipped. If no keyword arguments are given the last argument can be a :class:`dict` or :class:`MultiDict` (or any other dict subclass), otherwise the keyword arguments are used for the query parameters, cutting off the first trailing underscore of the parameter name: >>> href(is_=42) '/foo?is=42' >>> href({'foo': 'bar'}) '/foo?foo=bar' Combining of both methods is not allowed: >>> href({'foo': 'bar'}, bar=42) Traceback (most recent call last): ... TypeError: keyword arguments and query-dicts can't be combined Accessing attributes on the href object creates a new href object with the attribute name as prefix: >>> bar_href = href.bar >>> bar_href("blub") '/foo/bar/blub' If `sort` is set to `True` the items are sorted by `key` or the default sorting algorithm: >>> href = Href("/", sort=True) >>> href(a=1, b=2, c=3) '/?a=1&b=2&c=3' .. versionadded:: 0.5 `sort` and `key` were added. """ def __init__(self, base='./', charset='utf-8', sort=False, key=None): if not base: base = './' self.base = base self.charset = charset self.sort = sort self.key = key def __getattr__(self, name): if name[:2] == '__': raise AttributeError(name) base = self.base if base[-1:] != '/': base += '/' return Href(url_join(base, name), self.charset, self.sort, self.key) def __call__(self, *path, **query): if path and isinstance(path[-1], dict): if query: raise TypeError('keyword arguments and query-dicts ' 'can\'t be combined') query, path = path[-1], path[:-1] elif query: query = dict([(k.endswith('_') and k[:-1] or k, v) for k, v in query.items()]) path = '/'.join([to_unicode(url_quote(x, self.charset), 'ascii') for x in path if x is not None]).lstrip('/') rv = self.base if path: if not rv.endswith('/'): rv += '/' rv = url_join(rv, './' + path) if query: rv += '?' + to_unicode(url_encode(query, self.charset, sort=self.sort, key=self.key), 'ascii') return to_native(rv)
apache-2.0
vivekananda/fbeats
django/contrib/auth/tokens.py
96
2583
from datetime import date from django.conf import settings from django.utils.http import int_to_base36, base36_to_int from django.utils.crypto import constant_time_compare, salted_hmac class PasswordResetTokenGenerator(object): """ Strategy object used to generate and check tokens for the password reset mechanism. """ def make_token(self, user): """ Returns a token that can be used once to do a password reset for the given user. """ return self._make_token_with_timestamp(user, self._num_days(self._today())) def check_token(self, user, token): """ Check that a password reset token is correct for a given user. """ # Parse the token try: ts_b36, hash = token.split("-") except ValueError: return False try: ts = base36_to_int(ts_b36) except ValueError: return False # Check that the timestamp/uid has not been tampered with if not constant_time_compare(self._make_token_with_timestamp(user, ts), token): return False # Check the timestamp is within limit if (self._num_days(self._today()) - ts) > settings.PASSWORD_RESET_TIMEOUT_DAYS: return False return True def _make_token_with_timestamp(self, user, timestamp): # timestamp is number of days since 2001-1-1. Converted to # base 36, this gives us a 3 digit string until about 2121 ts_b36 = int_to_base36(timestamp) # By hashing on the internal state of the user and using state # that is sure to change (the password salt will change as soon as # the password is set, at least for current Django auth, and # last_login will also change), we produce a hash that will be # invalid as soon as it is used. # We limit the hash to 20 chars to keep URL short key_salt = "django.contrib.auth.tokens.PasswordResetTokenGenerator" # Ensure results are consistent across DB backends login_timestamp = user.last_login.replace(microsecond=0, tzinfo=None) value = (unicode(user.id) + user.password + unicode(login_timestamp) + unicode(timestamp)) hash = salted_hmac(key_salt, value).hexdigest()[::2] return "%s-%s" % (ts_b36, hash) def _num_days(self, dt): return (dt - date(2001, 1, 1)).days def _today(self): # Used for mocking in tests return date.today() default_token_generator = PasswordResetTokenGenerator()
bsd-3-clause
lmprice/ansible
lib/ansible/utils/module_docs_fragments/ironware.py
22
3850
# # (c) 2017, Paul Baker <@paulquack> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. class ModuleDocFragment(object): # Standard files documentation fragment DOCUMENTATION = """ options: authorize: description: - Instructs the module to enter privileged mode on the remote device before sending any commands. If not specified, the device will attempt to execute all commands in non-privileged mode. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_AUTHORIZE) will be used instead. type: bool default: 'no' provider: description: - A dict object containing connection details. suboptions: host: description: - Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport. port: description: - Specifies the port to use when building the connection to the remote device. default: 22 username: description: - Configures the username to use to authenticate the connection to the remote device. This value is used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead. password: description: - Specifies the password to use to authenticate the connection to the remote device. This value is used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead. ssh_keyfile: description: - Specifies the SSH key to use to authenticate the connection to the remote device. This value is the path to the key used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead. authorize: description: - Instructs the module to enter privileged mode on the remote device before sending any commands. If not specified, the device will attempt to execute all commands in non-privileged mode. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_AUTHORIZE) will be used instead. type: bool default: 'no' auth_pass: description: - Specifies the password to use if required to enter privileged mode on the remote device. If I(authorize) is false, then this argument does nothing. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_AUTH_PASS) will be used instead. timeout: description: - Specifies idle timeout in seconds for the connection, in seconds. Useful if the console freezes before continuing. For example when saving configurations. default: 10 """
gpl-3.0
voer-platform/vp.repo
vpr/vpr_admin/templatetags/dashboard.py
1
1502
from django import template register = template.Library() @register.inclusion_tag('dashboard_nav.html', takes_context=True) def render_dashboard_nav(context): """docstring for render""" navItems = getNavigationItems(None) return {'navItems': navItems} def getNavigationItems(request): """ """ dashboard_items = ( ('Overview', adminURL()), ('API SERVICE', ''), ('Client Management', adminURL('clients/')), ('Active Tokens', adminURL('tokens/')), ('Records', adminURL('api-records/')), ('SYSTEM', ''), ('Processes', adminURL('processes/')), ('Resource Usages', adminURL('resources/')), ('Database', adminURL('database')), ('CONTENT', ''), ('Materials', adminURL('materials/')), ('OASIS', adminURL('oasis/')), ('Persons', adminURL('persons/')), ('Other Content', '-'), ('Statistics', '-'), ('VP COMPONENTS', ''), ('VP Web', '-'), ('VP Repository', '-'), ('VP Transformer', '-'), ) return dashboard_items def adminURL(path=''): BASE_URL = '/dashboard/' return BASE_URL + path MTYPE_NAMES = ['', 'Module', 'Collection'] MTYPE_SHORT_NAMES = ['', 'm', 'c'] @register.filter def nameType(mtype, short=False): """ Return type of material as string, full or short name """ if short: name = MTYPE_SHORT_NAMES[mtype] else: name = MTYPE_NAMES[mtype] return name
agpl-3.0
cgar/servo
tests/wpt/web-platform-tests/tools/wptserve/tests/functional/test_request.py
109
2919
import unittest import wptserve from .base import TestUsingServer class TestInputFile(TestUsingServer): def test_seek(self): @wptserve.handlers.handler def handler(request, response): rv = [] f = request.raw_input f.seek(5) rv.append(f.read(2)) rv.append(f.tell()) f.seek(0) rv.append(f.readline()) rv.append(f.tell()) rv.append(f.read(-1)) rv.append(f.tell()) f.seek(0) rv.append(f.read()) f.seek(0) rv.extend(f.readlines()) return " ".join(str(item) for item in rv) route = ("POST", "/test/test_seek", handler) self.server.router.register(*route) resp = self.request(route[1], method="POST", body="12345ab\ncdef") self.assertEqual(200, resp.getcode()) self.assertEqual(["ab", "7", "12345ab\n", "8", "cdef", "12", "12345ab\ncdef", "12345ab\n", "cdef"], resp.read().split(" ")) def test_iter(self): @wptserve.handlers.handler def handler(request, response): f = request.raw_input return " ".join(line for line in f) route = ("POST", "/test/test_iter", handler) self.server.router.register(*route) resp = self.request(route[1], method="POST", body="12345\nabcdef\r\nzyxwv") self.assertEqual(200, resp.getcode()) self.assertEqual(["12345\n", "abcdef\r\n", "zyxwv"], resp.read().split(" ")) class TestRequest(TestUsingServer): def test_body(self): @wptserve.handlers.handler def handler(request, response): request.raw_input.seek(5) return request.body route = ("POST", "/test/test_body", handler) self.server.router.register(*route) resp = self.request(route[1], method="POST", body="12345ab\ncdef") self.assertEqual("12345ab\ncdef", resp.read()) def test_route_match(self): @wptserve.handlers.handler def handler(request, response): return request.route_match["match"] + " " + request.route_match["*"] route = ("GET", "/test/{match}_*", handler) self.server.router.register(*route) resp = self.request("/test/some_route") self.assertEqual("some route", resp.read()) class TestAuth(TestUsingServer): def test_auth(self): @wptserve.handlers.handler def handler(request, response): return " ".join((request.auth.username, request.auth.password)) route = ("GET", "/test/test_auth", handler) self.server.router.register(*route) resp = self.request(route[1], auth=("test", "PASS")) self.assertEqual(200, resp.getcode()) self.assertEqual(["test", "PASS"], resp.read().split(" ")) if __name__ == '__main__': unittest.main()
mpl-2.0
petewarden/tensorflow_makefile
tensorflow/python/kernel_tests/batch_matrix_band_part_op_test.py
9
3196
# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf class BatchMatrixBandPartTest(tf.test.TestCase): pass # Filled in below def _GetBatchMatrixBandPartTest(dtype_, batch_shape_, shape_, use_gpu_): def Test(self): mat = np.ones(shape_).astype(dtype_) batch_mat = np.tile(mat, batch_shape + (1, 1)) with self.test_session(use_gpu=use_gpu_): for lower in -1, 0, 1, shape_[-2] - 1: for upper in -1, 0, 1, shape_[-1] - 1: band_np = mat if lower >= 0: band_np = np.triu(band_np, -lower) if upper >= 0: band_np = np.tril(band_np, upper) if batch_shape is not (): band_np = np.tile(band_np, batch_shape + (1, 1)) band = tf.batch_matrix_band_part(batch_mat, lower, upper) self.assertAllEqual(band_np, band.eval()) return Test class BatchMatrixBandPartGradTest(tf.test.TestCase): pass # Filled in below def _GetBatchMatrixBandPartGradTest(dtype_, batch_shape_, shape_, use_gpu_): def Test(self): shape = batch_shape_ + shape_ x = tf.constant(np.random.rand(*shape), dtype=dtype_) with self.test_session(use_gpu=use_gpu_): for lower in -1, 0, 1, shape_[-2] - 1: for upper in -1, 0, 1, shape_[-1] - 1: y = tf.batch_matrix_band_part(x, lower, upper) error = tf.test.compute_gradient_error(x, x.get_shape().as_list(), y, y.get_shape().as_list()) self.assertLess(error, 1e-4) return Test if __name__ == '__main__': for use_gpu in False, True: for dtype in np.int32, np.int64, np.float32, np.float64: for batch_shape in ((), (2,), (1, 3, 2)): for rows in 1, 2, 7: for cols in 1, 2, 7: shape = (rows, cols) name = '%s_%s_%s' % (dtype.__name__, '_'.join(map(str, shape)), use_gpu) setattr( BatchMatrixBandPartTest, 'testBatchMatrixBandPart_' + name, _GetBatchMatrixBandPartTest(dtype, batch_shape, shape, use_gpu)) if dtype == np.float32 or dtype == np.float64: setattr(BatchMatrixBandPartGradTest, 'testBatchMatrixBandPartGrad_' + name, _GetBatchMatrixBandPartGradTest(dtype, batch_shape, shape, use_gpu)) tf.test.main()
apache-2.0
rbovard/qgis-plugin-settings-manager
SettingsManager/ui_settingsmanager.py
2
2117
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'ui_settingsmanager.ui' # # Created: Thu Apr 3 15:51:39 2014 # by: PyQt4 UI code generator 4.10.3 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_SettingsManager(object): def setupUi(self, SettingsManager): SettingsManager.setObjectName(_fromUtf8("SettingsManager")) SettingsManager.resize(298, 68) self.gridLayout = QtGui.QGridLayout(SettingsManager) self.gridLayout.setObjectName(_fromUtf8("gridLayout")) self.label = QtGui.QLabel(SettingsManager) self.label.setObjectName(_fromUtf8("label")) self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.buttonBox = QtGui.QDialogButtonBox(SettingsManager) self.buttonBox.setOrientation(QtCore.Qt.Horizontal) self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.No|QtGui.QDialogButtonBox.Yes) self.buttonBox.setObjectName(_fromUtf8("buttonBox")) self.gridLayout.addWidget(self.buttonBox, 1, 0, 1, 1) self.retranslateUi(SettingsManager) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), SettingsManager.accept) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), SettingsManager.reject) QtCore.QMetaObject.connectSlotsByName(SettingsManager) def retranslateUi(self, SettingsManager): SettingsManager.setWindowTitle(_translate("SettingsManager", "Settings Manager", None)) self.label.setText(_translate("SettingsManager", "Voulez-vous installer les paramètres SITNyon ?", None)) import resources_rc
gpl-2.0
yongshengwang/hue
desktop/core/ext-py/django-extensions-1.5.0/django_extensions/validators.py
65
2090
import unicodedata from django.core.exceptions import ValidationError from django.utils.deconstruct import deconstructible from django.utils.encoding import force_text from django.utils.translation import ugettext_lazy as _ @deconstructible class NoControlCharactersValidator(object): message = _("Control Characters like new lines or tabs are not allowed.") code = "no_control_characters" whitelist = None def __init__(self, message=None, code=None, whitelist=None): if message: self.message = message if code: self.code = code if whitelist: self.whitelist = whitelist def __call__(self, value): value = force_text(value) whitelist = self.whitelist category = unicodedata.category for character in value: if whitelist and character in whitelist: continue if category(character)[0] == "C": params = {'value': value, 'whitelist': whitelist} raise ValidationError(self.message, code=self.code, params=params) def __eq__(self, other): return ( isinstance(other, NoControlCharactersValidator) and (self.whitelist == other.whitelist) and (self.message == other.message) and (self.code == other.code) ) @deconstructible class NoWhitespaceValidator(object): message = _("Leading and Trailing whitespace is not allowed.") code = "no_whitespace" def __init__(self, message=None, code=None, whitelist=None): if message: self.message = message if code: self.code = code def __call__(self, value): value = force_text(value) if value != value.strip(): params = {'value': value} raise ValidationError(self.message, code=self.code, params=params) def __eq__(self, other): return ( isinstance(other, NoWhitespaceValidator) and (self.message == other.message) and (self.code == other.code) )
apache-2.0
Krolov18/Languages
soxCutting/DecoupeurRecursif.py
2
1831
from DecoupeurChaine import DecoupeurChaine import yaml class DecoupeurRecursif(DecoupeurChaine): def decouper(self, separateurs): if len(separateurs[0]) == 3: (separator, side, number) = separateurs[0] if side == "l": temp = self.chaine.split(separator, number) elif side == "r": temp = self.chaine.rsplit(separator, number) elif len(separateurs[0]) == 2: if not isinstance(separateurs[0][0],str): raise TypeError("il ne peut pas y avoir autre chose qu'un string dans cette position.") else: (separator, side) = separateurs[0] if side == "l": temp = self.chaine.split(separator) elif side == "r": temp = self.chaine.rsplit(separator) elif side not in ["r","l"]: raise TypeError("Cette position n'accepte que deux solutions 'l' ou 'r' et rien d'autre.") elif len(separateurs[0]) == 1: if not isinstance(separateurs[0][0],str): raise TypeError("il ne peut pas y avoir autre chose qu'un string dans cette position.") else: separator = separateurs[0][0] temp = self.chaine.split(separator) else: assert "il faut au moins 1 séparateur." if not any(x in y for x in [g[0] for g in separateurs] for y in temp) or len(separateurs) == 1: self.liste.append(tuple(temp)) else: for element in temp: self.chaine = element self.decouper(separateurs[1:]) #if __name__ == '__main__': # essai = DecoupeurRecursif("bonjour,maman") # essai.decouper(',') # print(essai.liste)
apache-2.0
peteers/android_kernel_acer_a100
tools/perf/scripts/python/check-perf-trace.py
11214
2503
# perf script event handlers, generated by perf script -g python # (c) 2010, Tom Zanussi <tzanussi@gmail.com> # Licensed under the terms of the GNU GPL License version 2 # # This script tests basic functionality such as flag and symbol # strings, common_xxx() calls back into perf, begin, end, unhandled # events, etc. Basically, if this script runs successfully and # displays expected results, Python scripting support should be ok. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from Core import * from perf_trace_context import * unhandled = autodict() def trace_begin(): print "trace_begin" pass def trace_end(): print_unhandled() def irq__softirq_entry(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, vec): print_header(event_name, common_cpu, common_secs, common_nsecs, common_pid, common_comm) print_uncommon(context) print "vec=%s\n" % \ (symbol_str("irq__softirq_entry", "vec", vec)), def kmem__kmalloc(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, call_site, ptr, bytes_req, bytes_alloc, gfp_flags): print_header(event_name, common_cpu, common_secs, common_nsecs, common_pid, common_comm) print_uncommon(context) print "call_site=%u, ptr=%u, bytes_req=%u, " \ "bytes_alloc=%u, gfp_flags=%s\n" % \ (call_site, ptr, bytes_req, bytes_alloc, flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)), def trace_unhandled(event_name, context, event_fields_dict): try: unhandled[event_name] += 1 except TypeError: unhandled[event_name] = 1 def print_header(event_name, cpu, secs, nsecs, pid, comm): print "%-20s %5u %05u.%09u %8u %-20s " % \ (event_name, cpu, secs, nsecs, pid, comm), # print trace fields not included in handler args def print_uncommon(context): print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \ % (common_pc(context), trace_flag_str(common_flags(context)), \ common_lock_depth(context)) def print_unhandled(): keys = unhandled.keys() if not keys: return print "\nunhandled events:\n\n", print "%-40s %10s\n" % ("event", "count"), print "%-40s %10s\n" % ("----------------------------------------", \ "-----------"), for event_name in keys: print "%-40s %10d\n" % (event_name, unhandled[event_name])
gpl-2.0
caesar2164/edx-platform
common/lib/xmodule/xmodule/tests/test_import.py
14
26979
# -*- coding: utf-8 -*- import datetime import ddt import unittest from fs.memoryfs import MemoryFS from lxml import etree from mock import Mock, patch from django.utils.timezone import UTC from xmodule.xml_module import is_pointer_tag from opaque_keys.edx.locations import Location from xmodule.modulestore import only_xmodules from xmodule.modulestore.xml import ImportSystem, XMLModuleStore, LibraryXMLModuleStore from xmodule.modulestore.inheritance import compute_inherited_metadata from xmodule.x_module import XModuleMixin from xmodule.fields import Date from xmodule.tests import DATA_DIR from xmodule.modulestore.inheritance import InheritanceMixin from opaque_keys.edx.locations import SlashSeparatedCourseKey from xblock.core import XBlock from xblock.fields import Scope, String, Integer from xblock.runtime import KvsFieldData, DictKeyValueStore ORG = 'test_org' COURSE = 'test_course' class DummySystem(ImportSystem): @patch('xmodule.modulestore.xml.OSFS', lambda dir: MemoryFS()) def __init__(self, load_error_modules, library=False): if library: xmlstore = LibraryXMLModuleStore("data_dir", source_dirs=[], load_error_modules=load_error_modules) else: xmlstore = XMLModuleStore("data_dir", source_dirs=[], load_error_modules=load_error_modules) course_id = SlashSeparatedCourseKey(ORG, COURSE, 'test_run') course_dir = "test_dir" error_tracker = Mock() super(DummySystem, self).__init__( xmlstore=xmlstore, course_id=course_id, course_dir=course_dir, error_tracker=error_tracker, load_error_modules=load_error_modules, mixins=(InheritanceMixin, XModuleMixin), field_data=KvsFieldData(DictKeyValueStore()), ) def render_template(self, _template, _context): raise Exception("Shouldn't be called") class BaseCourseTestCase(unittest.TestCase): '''Make sure module imports work properly, including for malformed inputs''' @staticmethod def get_system(load_error_modules=True, library=False): '''Get a dummy system''' return DummySystem(load_error_modules, library=library) def get_course(self, name): """Get a test course by directory name. If there's more than one, error.""" print "Importing {0}".format(name) modulestore = XMLModuleStore( DATA_DIR, source_dirs=[name], xblock_mixins=(InheritanceMixin,), xblock_select=only_xmodules, ) courses = modulestore.get_courses() self.assertEquals(len(courses), 1) return courses[0] class GenericXBlock(XBlock): """XBlock for testing pure xblock xml import""" has_children = True field1 = String(default="something", scope=Scope.user_state) field2 = Integer(scope=Scope.user_state) @ddt.ddt class PureXBlockImportTest(BaseCourseTestCase): """ Tests of import pure XBlocks (not XModules) from xml """ def assert_xblocks_are_good(self, block): """Assert a number of conditions that must be true for `block` to be good.""" scope_ids = block.scope_ids self.assertIsNotNone(scope_ids.usage_id) self.assertIsNotNone(scope_ids.def_id) for child_id in block.children: child = block.runtime.get_block(child_id) self.assert_xblocks_are_good(child) @XBlock.register_temp_plugin(GenericXBlock) @ddt.data( "<genericxblock/>", "<genericxblock field1='abc' field2='23' />", "<genericxblock field1='abc' field2='23'><genericxblock/></genericxblock>", ) @patch('xmodule.x_module.XModuleMixin.location') def test_parsing_pure_xblock(self, xml, mock_location): system = self.get_system(load_error_modules=False) descriptor = system.process_xml(xml) self.assertIsInstance(descriptor, GenericXBlock) self.assert_xblocks_are_good(descriptor) self.assertFalse(mock_location.called) class ImportTestCase(BaseCourseTestCase): date = Date() def test_fallback(self): '''Check that malformed xml loads as an ErrorDescriptor.''' # Use an exotic character to also flush out Unicode issues. bad_xml = u'''<sequential display_name="oops\N{SNOWMAN}"><video url="hi"></sequential>''' system = self.get_system() descriptor = system.process_xml(bad_xml) self.assertEqual(descriptor.__class__.__name__, 'ErrorDescriptorWithMixins') def test_unique_url_names(self): '''Check that each error gets its very own url_name''' bad_xml = '''<sequential display_name="oops"><video url="hi"></sequential>''' bad_xml2 = '''<sequential url_name="oops"><video url="hi"></sequential>''' system = self.get_system() descriptor1 = system.process_xml(bad_xml) descriptor2 = system.process_xml(bad_xml2) self.assertNotEqual(descriptor1.location, descriptor2.location) # Check that each vertical gets its very own url_name bad_xml = '''<vertical display_name="abc"><problem url_name="exam1:2013_Spring:abc"/></vertical>''' bad_xml2 = '''<vertical display_name="abc"><problem url_name="exam2:2013_Spring:abc"/></vertical>''' descriptor1 = system.process_xml(bad_xml) descriptor2 = system.process_xml(bad_xml2) self.assertNotEqual(descriptor1.location, descriptor2.location) def test_reimport(self): '''Make sure an already-exported error xml tag loads properly''' self.maxDiff = None bad_xml = '''<sequential display_name="oops"><video url="hi"></sequential>''' system = self.get_system() descriptor = system.process_xml(bad_xml) node = etree.Element('unknown') descriptor.add_xml_to_node(node) re_import_descriptor = system.process_xml(etree.tostring(node)) self.assertEqual(re_import_descriptor.__class__.__name__, 'ErrorDescriptorWithMixins') self.assertEqual(descriptor.contents, re_import_descriptor.contents) self.assertEqual(descriptor.error_msg, re_import_descriptor.error_msg) def test_fixed_xml_tag(self): """Make sure a tag that's been fixed exports as the original tag type""" # create a error tag with valid xml contents root = etree.Element('error') good_xml = '''<sequential display_name="fixed"><video url="hi"/></sequential>''' root.text = good_xml xml_str_in = etree.tostring(root) # load it system = self.get_system() descriptor = system.process_xml(xml_str_in) # export it node = etree.Element('unknown') descriptor.add_xml_to_node(node) # Now make sure the exported xml is a sequential self.assertEqual(node.tag, 'sequential') def course_descriptor_inheritance_check(self, descriptor, from_date_string, unicorn_color, url_name): """ Checks to make sure that metadata inheritance on a course descriptor is respected. """ # pylint: disable=protected-access print(descriptor, descriptor._field_data) self.assertEqual(descriptor.due, ImportTestCase.date.from_json(from_date_string)) # Check that the child inherits due correctly child = descriptor.get_children()[0] self.assertEqual(child.due, ImportTestCase.date.from_json(from_date_string)) # need to convert v to canonical json b4 comparing self.assertEqual( ImportTestCase.date.to_json(ImportTestCase.date.from_json(from_date_string)), child.xblock_kvs.inherited_settings['due'] ) # Now export and check things descriptor.runtime.export_fs = MemoryFS() node = etree.Element('unknown') descriptor.add_xml_to_node(node) # Check that the exported xml is just a pointer print("Exported xml:", etree.tostring(node)) self.assertTrue(is_pointer_tag(node)) # but it's a special case course pointer self.assertEqual(node.attrib['course'], COURSE) self.assertEqual(node.attrib['org'], ORG) # Does the course still have unicorns? with descriptor.runtime.export_fs.open('course/{url_name}.xml'.format(url_name=url_name)) as f: course_xml = etree.fromstring(f.read()) self.assertEqual(course_xml.attrib['unicorn'], unicorn_color) # the course and org tags should be _only_ in the pointer self.assertNotIn('course', course_xml.attrib) self.assertNotIn('org', course_xml.attrib) # did we successfully strip the url_name from the definition contents? self.assertNotIn('url_name', course_xml.attrib) # Does the chapter tag now have a due attribute? # hardcoded path to child with descriptor.runtime.export_fs.open('chapter/ch.xml') as f: chapter_xml = etree.fromstring(f.read()) self.assertEqual(chapter_xml.tag, 'chapter') self.assertNotIn('due', chapter_xml.attrib) def test_metadata_import_export(self): """Two checks: - unknown metadata is preserved across import-export - inherited metadata doesn't leak to children. """ system = self.get_system() from_date_string = 'March 20 17:00' url_name = 'test1' unicorn_color = 'purple' start_xml = ''' <course org="{org}" course="{course}" due="{due}" url_name="{url_name}" unicorn="{unicorn_color}"> <chapter url="hi" url_name="ch" display_name="CH"> <html url_name="h" display_name="H">Two houses, ...</html> </chapter> </course>'''.format( due=from_date_string, org=ORG, course=COURSE, url_name=url_name, unicorn_color=unicorn_color ) descriptor = system.process_xml(start_xml) compute_inherited_metadata(descriptor) self.course_descriptor_inheritance_check(descriptor, from_date_string, unicorn_color, url_name) def test_library_metadata_import_export(self): """Two checks: - unknown metadata is preserved across import-export - inherited metadata doesn't leak to children. """ system = self.get_system(library=True) from_date_string = 'March 26 17:00' url_name = 'test2' unicorn_color = 'rainbow' start_xml = ''' <library org="TestOrg" library="TestLib" display_name="stuff"> <course org="{org}" course="{course}" due="{due}" url_name="{url_name}" unicorn="{unicorn_color}"> <chapter url="hi" url_name="ch" display_name="CH"> <html url_name="h" display_name="H">Two houses, ...</html> </chapter> </course> </library>'''.format( due=from_date_string, org=ORG, course=COURSE, url_name=url_name, unicorn_color=unicorn_color ) descriptor = system.process_xml(start_xml) # pylint: disable=protected-access original_unwrapped = descriptor._unwrapped_field_data LibraryXMLModuleStore.patch_descriptor_kvs(descriptor) # '_unwrapped_field_data' is reset in `patch_descriptor_kvs` # pylint: disable=protected-access self.assertIsNot(original_unwrapped, descriptor._unwrapped_field_data) compute_inherited_metadata(descriptor) # Check the course module, since it has inheritance descriptor = descriptor.get_children()[0] self.course_descriptor_inheritance_check(descriptor, from_date_string, unicorn_color, url_name) def test_metadata_no_inheritance(self): """ Checks that default value of None (for due) does not get marked as inherited when a course is the root block. """ system = self.get_system() url_name = 'test1' start_xml = ''' <course org="{org}" course="{course}" url_name="{url_name}" unicorn="purple"> <chapter url="hi" url_name="ch" display_name="CH"> <html url_name="h" display_name="H">Two houses, ...</html> </chapter> </course>'''.format(org=ORG, course=COURSE, url_name=url_name) descriptor = system.process_xml(start_xml) compute_inherited_metadata(descriptor) self.course_descriptor_no_inheritance_check(descriptor) def test_library_metadata_no_inheritance(self): """ Checks that the default value of None (for due) does not get marked as inherited when a library is the root block. """ system = self.get_system() url_name = 'test1' start_xml = ''' <library org="TestOrg" library="TestLib" display_name="stuff"> <course org="{org}" course="{course}" url_name="{url_name}" unicorn="purple"> <chapter url="hi" url_name="ch" display_name="CH"> <html url_name="h" display_name="H">Two houses, ...</html> </chapter> </course> </library>'''.format(org=ORG, course=COURSE, url_name=url_name) descriptor = system.process_xml(start_xml) LibraryXMLModuleStore.patch_descriptor_kvs(descriptor) compute_inherited_metadata(descriptor) # Run the checks on the course node instead. descriptor = descriptor.get_children()[0] self.course_descriptor_no_inheritance_check(descriptor) def course_descriptor_no_inheritance_check(self, descriptor): """ Verifies that a default value of None (for due) does not get marked as inherited. """ self.assertEqual(descriptor.due, None) # Check that the child does not inherit a value for due child = descriptor.get_children()[0] self.assertEqual(child.due, None) # Check that the child hasn't started yet self.assertLessEqual( datetime.datetime.now(UTC()), child.start ) def override_metadata_check(self, descriptor, child, course_due, child_due): """ Verifies that due date can be overriden at child level. """ self.assertEqual(descriptor.due, ImportTestCase.date.from_json(course_due)) self.assertEqual(child.due, ImportTestCase.date.from_json(child_due)) # Test inherited metadata. Due does not appear here (because explicitly set on child). self.assertEqual( ImportTestCase.date.to_json(ImportTestCase.date.from_json(course_due)), child.xblock_kvs.inherited_settings['due'] ) def test_metadata_override_default(self): """ Checks that due date can be overriden at child level when a course is the root. """ system = self.get_system() course_due = 'March 20 17:00' child_due = 'April 10 00:00' url_name = 'test1' start_xml = ''' <course org="{org}" course="{course}" due="{due}" url_name="{url_name}" unicorn="purple"> <chapter url="hi" url_name="ch" display_name="CH"> <html url_name="h" display_name="H">Two houses, ...</html> </chapter> </course>'''.format(due=course_due, org=ORG, course=COURSE, url_name=url_name) descriptor = system.process_xml(start_xml) child = descriptor.get_children()[0] # pylint: disable=protected-access child._field_data.set(child, 'due', child_due) compute_inherited_metadata(descriptor) self.override_metadata_check(descriptor, child, course_due, child_due) def test_library_metadata_override_default(self): """ Checks that due date can be overriden at child level when a library is the root. """ system = self.get_system() course_due = 'March 20 17:00' child_due = 'April 10 00:00' url_name = 'test1' start_xml = ''' <library org="TestOrg" library="TestLib" display_name="stuff"> <course org="{org}" course="{course}" due="{due}" url_name="{url_name}" unicorn="purple"> <chapter url="hi" url_name="ch" display_name="CH"> <html url_name="h" display_name="H">Two houses, ...</html> </chapter> </course> </library>'''.format(due=course_due, org=ORG, course=COURSE, url_name=url_name) descriptor = system.process_xml(start_xml) LibraryXMLModuleStore.patch_descriptor_kvs(descriptor) # Chapter is two levels down here. child = descriptor.get_children()[0].get_children()[0] # pylint: disable=protected-access child._field_data.set(child, 'due', child_due) compute_inherited_metadata(descriptor) descriptor = descriptor.get_children()[0] self.override_metadata_check(descriptor, child, course_due, child_due) def test_is_pointer_tag(self): """ Check that is_pointer_tag works properly. """ yes = ["""<html url_name="blah"/>""", """<html url_name="blah"></html>""", """<html url_name="blah"> </html>""", """<problem url_name="blah"/>""", """<course org="HogwartsX" course="Mathemagics" url_name="3.14159"/>"""] no = ["""<html url_name="blah" also="this"/>""", """<html url_name="blah">some text</html>""", """<problem url_name="blah"><sub>tree</sub></problem>""", """<course org="HogwartsX" course="Mathemagics" url_name="3.14159"> <chapter>3</chapter> </course> """] for xml_str in yes: print "should be True for {0}".format(xml_str) self.assertTrue(is_pointer_tag(etree.fromstring(xml_str))) for xml_str in no: print "should be False for {0}".format(xml_str) self.assertFalse(is_pointer_tag(etree.fromstring(xml_str))) def test_metadata_inherit(self): """Make sure that metadata is inherited properly""" print "Starting import" course = self.get_course('toy') def check_for_key(key, node, value): "recursive check for presence of key" print "Checking {0}".format(node.location.to_deprecated_string()) self.assertEqual(getattr(node, key), value) for c in node.get_children(): check_for_key(key, c, value) check_for_key('graceperiod', course, course.graceperiod) def test_policy_loading(self): """Make sure that when two courses share content with the same org and course names, policy applies to the right one.""" toy = self.get_course('toy') two_toys = self.get_course('two_toys') self.assertEqual(toy.url_name, "2012_Fall") self.assertEqual(two_toys.url_name, "TT_2012_Fall") toy_ch = toy.get_children()[0] two_toys_ch = two_toys.get_children()[0] self.assertEqual(toy_ch.display_name, "Overview") self.assertEqual(two_toys_ch.display_name, "Two Toy Overview") # Also check that the grading policy loaded self.assertEqual(two_toys.grade_cutoffs['C'], 0.5999) # Also check that keys from policy are run through the # appropriate attribute maps -- 'graded' should be True, not 'true' self.assertEqual(toy.graded, True) def test_static_tabs_import(self): """Make sure that the static tabs are imported correctly""" modulestore = XMLModuleStore(DATA_DIR, source_dirs=['toy']) location_tab_syllabus = Location("edX", "toy", "2012_Fall", "static_tab", "syllabus", None) toy_tab_syllabus = modulestore.get_item(location_tab_syllabus) self.assertEqual(toy_tab_syllabus.display_name, 'Syllabus') self.assertEqual(toy_tab_syllabus.course_staff_only, False) location_tab_resources = Location("edX", "toy", "2012_Fall", "static_tab", "resources", None) toy_tab_resources = modulestore.get_item(location_tab_resources) self.assertEqual(toy_tab_resources.display_name, 'Resources') self.assertEqual(toy_tab_resources.course_staff_only, True) def test_definition_loading(self): """When two courses share the same org and course name and both have a module with the same url_name, the definitions shouldn't clash. TODO (vshnayder): once we have a CMS, this shouldn't happen--locations should uniquely name definitions. But in our imperfect XML world, it can (and likely will) happen.""" modulestore = XMLModuleStore(DATA_DIR, source_dirs=['toy', 'two_toys']) location = Location("edX", "toy", "2012_Fall", "video", "Welcome", None) toy_video = modulestore.get_item(location) location_two = Location("edX", "toy", "TT_2012_Fall", "video", "Welcome", None) two_toy_video = modulestore.get_item(location_two) self.assertEqual(toy_video.youtube_id_1_0, "p2Q6BrNhdh8") self.assertEqual(two_toy_video.youtube_id_1_0, "p2Q6BrNhdh9") def test_colon_in_url_name(self): """Ensure that colons in url_names convert to file paths properly""" print "Starting import" # Not using get_courses because we need the modulestore object too afterward modulestore = XMLModuleStore(DATA_DIR, source_dirs=['toy']) courses = modulestore.get_courses() self.assertEquals(len(courses), 1) course = courses[0] print "course errors:" for (msg, err) in modulestore.get_course_errors(course.id): print msg print err chapters = course.get_children() self.assertEquals(len(chapters), 5) ch2 = chapters[1] self.assertEquals(ch2.url_name, "secret:magic") print "Ch2 location: ", ch2.location also_ch2 = modulestore.get_item(ch2.location) self.assertEquals(ch2, also_ch2) print "making sure html loaded" loc = course.id.make_usage_key('html', 'secret:toylab') html = modulestore.get_item(loc) self.assertEquals(html.display_name, "Toy lab") def test_unicode(self): """Check that courses with unicode characters in filenames and in org/course/name import properly. Currently, this means: (a) Having files with unicode names does not prevent import; (b) if files are not loaded because of unicode filenames, there are appropriate exceptions/errors to that effect.""" print "Starting import" modulestore = XMLModuleStore(DATA_DIR, source_dirs=['test_unicode']) courses = modulestore.get_courses() self.assertEquals(len(courses), 1) course = courses[0] print "course errors:" # Expect to find an error/exception about characters in "®esources" expect = "InvalidKeyError" errors = [ (msg.encode("utf-8"), err.encode("utf-8")) for msg, err in modulestore.get_course_errors(course.id) ] self.assertTrue(any( expect in msg or expect in err for msg, err in errors )) chapters = course.get_children() self.assertEqual(len(chapters), 4) def test_url_name_mangling(self): """ Make sure that url_names are only mangled once. """ modulestore = XMLModuleStore(DATA_DIR, source_dirs=['toy']) toy_id = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall') course = modulestore.get_course(toy_id) chapters = course.get_children() ch1 = chapters[0] sections = ch1.get_children() self.assertEqual(len(sections), 4) for i in (2, 3): video = sections[i] # Name should be 'video_{hash}' print "video {0} url_name: {1}".format(i, video.url_name) self.assertEqual(len(video.url_name), len('video_') + 12) def test_poll_and_conditional_import(self): modulestore = XMLModuleStore(DATA_DIR, source_dirs=['conditional_and_poll']) course = modulestore.get_courses()[0] chapters = course.get_children() ch1 = chapters[0] sections = ch1.get_children() self.assertEqual(len(sections), 1) conditional_location = course.id.make_usage_key('conditional', 'condone') module = modulestore.get_item(conditional_location) self.assertEqual(len(module.children), 1) poll_location = course.id.make_usage_key('poll_question', 'first_poll') module = modulestore.get_item(poll_location) self.assertEqual(len(module.get_children()), 0) self.assertEqual(module.voted, False) self.assertEqual(module.poll_answer, '') self.assertEqual(module.poll_answers, {}) self.assertEqual( module.answers, [ {'text': u'Yes', 'id': 'Yes'}, {'text': u'No', 'id': 'No'}, {'text': u"Don't know", 'id': 'Dont_know'} ] ) def test_error_on_import(self): '''Check that when load_error_module is false, an exception is raised, rather than returning an ErrorModule''' bad_xml = '''<sequential display_name="oops"><video url="hi"></sequential>''' system = self.get_system(False) self.assertRaises(etree.XMLSyntaxError, system.process_xml, bad_xml) def test_word_cloud_import(self): modulestore = XMLModuleStore(DATA_DIR, source_dirs=['word_cloud']) course = modulestore.get_courses()[0] chapters = course.get_children() ch1 = chapters[0] sections = ch1.get_children() self.assertEqual(len(sections), 1) location = course.id.make_usage_key('word_cloud', 'cloud1') module = modulestore.get_item(location) self.assertEqual(len(module.get_children()), 0) self.assertEqual(module.num_inputs, 5) self.assertEqual(module.num_top_words, 250) def test_cohort_config(self): """ Check that cohort config parsing works right. Note: The cohort config on the CourseModule is no longer used. See openedx.core.djangoapps.course_groups.models.CourseCohortSettings. """ modulestore = XMLModuleStore(DATA_DIR, source_dirs=['toy']) toy_id = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall') course = modulestore.get_course(toy_id) # No config -> False self.assertFalse(course.is_cohorted) # empty config -> False course.cohort_config = {} self.assertFalse(course.is_cohorted) # false config -> False course.cohort_config = {'cohorted': False} self.assertFalse(course.is_cohorted) # and finally... course.cohort_config = {'cohorted': True} self.assertTrue(course.is_cohorted)
agpl-3.0
aattaran/Machine-Learning-with-Python
naive_bayes/GaussianNB Deployment on Terrain Data/main.py
2
1533
#!/usr/bin/python """ Complete the code in ClassifyNB.py with the sklearn Naive Bayes classifier to classify the terrain data. The objective of this exercise is to recreate the decision boundary found in the lesson video, and make a plot that visually shows the decision boundary """ from prep_terrain_data import makeTerrainData from class_vis import prettyPicture, output_image from ClassifyNB import classify import numpy as np import pylab as pl features_train, labels_train, features_test, labels_test = makeTerrainData() ### the training data (features_train, labels_train) have both "fast" and "slow" points mixed ### in together--separate them so we can give them different colors in the scatterplot, ### and visually identify them grade_fast = [features_train[ii][0] for ii in range(0, len(features_train)) if labels_train[ii]==0] bumpy_fast = [features_train[ii][1] for ii in range(0, len(features_train)) if labels_train[ii]==0] grade_slow = [features_train[ii][0] for ii in range(0, len(features_train)) if labels_train[ii]==1] bumpy_slow = [features_train[ii][1] for ii in range(0, len(features_train)) if labels_train[ii]==1] # You will need to complete this function imported from the ClassifyNB script. # Be sure to change to that code tab to complete this quiz. clf = classify(features_train, labels_train) ### draw the decision boundary with the text points overlaid prettyPicture(clf, features_test, labels_test) output_image("test.png", "png", open("test.png", "rb").read())
bsd-3-clause
TeamTwisted/leanKernel-shamu
arch/ia64/scripts/unwcheck.py
13143
1714
#!/usr/bin/python # # Usage: unwcheck.py FILE # # This script checks the unwind info of each function in file FILE # and verifies that the sum of the region-lengths matches the total # length of the function. # # Based on a shell/awk script originally written by Harish Patil, # which was converted to Perl by Matthew Chapman, which was converted # to Python by David Mosberger. # import os import re import sys if len(sys.argv) != 2: print "Usage: %s FILE" % sys.argv[0] sys.exit(2) readelf = os.getenv("READELF", "readelf") start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]") rlen_pattern = re.compile(".*rlen=([0-9]+)") def check_func (func, slots, rlen_sum): if slots != rlen_sum: global num_errors num_errors += 1 if not func: func = "[%#x-%#x]" % (start, end) print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum) return num_funcs = 0 num_errors = 0 func = False slots = 0 rlen_sum = 0 for line in os.popen("%s -u %s" % (readelf, sys.argv[1])): m = start_pattern.match(line) if m: check_func(func, slots, rlen_sum) func = m.group(1) start = long(m.group(2), 16) end = long(m.group(3), 16) slots = 3 * (end - start) / 16 rlen_sum = 0L num_funcs += 1 else: m = rlen_pattern.match(line) if m: rlen_sum += long(m.group(1)) check_func(func, slots, rlen_sum) if num_errors == 0: print "No errors detected in %u functions." % num_funcs else: if num_errors > 1: err="errors" else: err="error" print "%u %s detected in %u functions." % (num_errors, err, num_funcs) sys.exit(1)
gpl-2.0
AlexSnet/FabricDeployer
fabfile.py
17
1182
from fabric.api import * def update(): """Requires code_root env variable. Does a git pull and restarts the web server""" require('code_root') git_pull() restart_web_server() def git_pull(): """Does a git stash then a git pull on the project""" run('cd %s; git stash; git pull' % (env.code_root)) def restart_web_server(): """Restart the web server""" run('%s/apache2/bin/restart' % env.code_root_parent) def migrate(): """Runs python manage.py migrate""" run('cd %s; python manage.py migrate --settings=%s' % (env.code_root, env.settings_file)) def collect_static(): """Runs python manage.py collect_static --noinput""" run('cd %s; python manage.py collectstatic --settings=%s --noinput' % (env.code_root, env.settings_file)) def pip_install(): """Runs pip install -r requirements/frozen.txt (for example site)""" run('cd %s; pip install -r requirements/frozen.txt' % (env.code_root)) def publish_changes(): """Runs these functions in order (git_pull, pip_install, migrate, collect_static, restart_web_server)""" git_pull() pip_install() migrate() collect_static() restart_web_server()
mit
annarev/tensorflow
tensorflow/python/ops/ragged/ragged_util_test.py
14
8266
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for ragged_util.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing import parameterized import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops.ragged import ragged_util from tensorflow.python.platform import googletest # Example 3d tensor for test cases. Has shape [4, 2, 3]. TENSOR_3D = [[[('%d%d%d' % (i, j, k)).encode('utf-8') for k in range(3)] for j in range(2)] for i in range(4)] # Example 4d tensor for test cases. Has shape [4, 2, 3, 5]. TENSOR_4D = [[[[('%d%d%d%d' % (i, j, k, l)).encode('utf-8') for l in range(5)] for k in range(3)] for j in range(2)] for i in range(4)] @test_util.run_all_in_graph_and_eager_modes class RaggedUtilTest(test_util.TensorFlowTestCase, parameterized.TestCase): @parameterized.parameters([ # Docstring examples dict( data=['a', 'b', 'c'], repeats=[3, 0, 2], axis=0, expected=[b'a', b'a', b'a', b'c', b'c']), dict( data=[[1, 2], [3, 4]], repeats=[2, 3], axis=0, expected=[[1, 2], [1, 2], [3, 4], [3, 4], [3, 4]]), dict( data=[[1, 2], [3, 4]], repeats=[2, 3], axis=1, expected=[[1, 1, 2, 2, 2], [3, 3, 4, 4, 4]]), # Scalar repeats value dict( data=['a', 'b', 'c'], repeats=2, axis=0, expected=[b'a', b'a', b'b', b'b', b'c', b'c']), dict( data=[[1, 2], [3, 4]], repeats=2, axis=0, expected=[[1, 2], [1, 2], [3, 4], [3, 4]]), dict( data=[[1, 2], [3, 4]], repeats=2, axis=1, expected=[[1, 1, 2, 2], [3, 3, 4, 4]]), # data & repeats are broadcast to have at least one dimension, # so these are all equivalent: dict(data=3, repeats=4, axis=0, expected=[3, 3, 3, 3]), dict(data=[3], repeats=4, axis=0, expected=[3, 3, 3, 3]), dict(data=3, repeats=[4], axis=0, expected=[3, 3, 3, 3]), dict(data=[3], repeats=[4], axis=0, expected=[3, 3, 3, 3]), # Empty tensor dict(data=[], repeats=[], axis=0, expected=[]), ]) def testRepeat(self, data, repeats, expected, axis=None): result = ragged_util.repeat(data, repeats, axis) self.assertAllEqual(result, expected) @parameterized.parameters([ dict(mode=mode, **args) for mode in ['constant', 'dynamic', 'unknown_shape'] for args in [ # data & repeats are broadcast to have at least one dimension, # so these are all equivalent: dict(data=3, repeats=4, axis=0), dict(data=[3], repeats=4, axis=0), dict(data=3, repeats=[4], axis=0), dict(data=[3], repeats=[4], axis=0), # 1-dimensional data tensor. dict(data=[], repeats=5, axis=0), dict(data=[1, 2, 3], repeats=5, axis=0), dict(data=[1, 2, 3], repeats=[3, 0, 2], axis=0), dict(data=[1, 2, 3], repeats=[3, 0, 2], axis=-1), dict(data=[b'a', b'b', b'c'], repeats=[3, 0, 2], axis=0), # 2-dimensional data tensor. dict(data=[[1, 2, 3], [4, 5, 6]], repeats=3, axis=0), dict(data=[[1, 2, 3], [4, 5, 6]], repeats=3, axis=1), dict(data=[[1, 2, 3], [4, 5, 6]], repeats=[3, 5], axis=0), dict(data=[[1, 2, 3], [4, 5, 6]], repeats=[3, 5, 7], axis=1), # 3-dimensional data tensor: shape=[4, 2, 3]. dict(data=TENSOR_3D, repeats=2, axis=0), dict(data=TENSOR_3D, repeats=2, axis=1), dict(data=TENSOR_3D, repeats=2, axis=2), dict(data=TENSOR_3D, repeats=[2, 0, 4, 1], axis=0), dict(data=TENSOR_3D, repeats=[3, 2], axis=1), dict(data=TENSOR_3D, repeats=[1, 3, 1], axis=2), # 4-dimensional data tensor: shape=[4, 2, 3, 5]. dict(data=TENSOR_4D, repeats=2, axis=0), dict(data=TENSOR_4D, repeats=2, axis=1), dict(data=TENSOR_4D, repeats=2, axis=2), dict(data=TENSOR_4D, repeats=2, axis=3), dict(data=TENSOR_4D, repeats=[2, 0, 4, 1], axis=0), dict(data=TENSOR_4D, repeats=[3, 2], axis=1), dict(data=TENSOR_4D, repeats=[1, 3, 1], axis=2), dict(data=TENSOR_4D, repeats=[1, 3, 0, 0, 2], axis=3), ] ]) def testValuesMatchesNumpy(self, mode, data, repeats, axis): # Exception: we can't handle negative axis if data.ndims is unknown. if axis < 0 and mode == 'unknown_shape': return expected = np.repeat(data, repeats, axis) if mode == 'constant': data = constant_op.constant(data) repeats = constant_op.constant(repeats) elif mode == 'dynamic': data = constant_op.constant(data) repeats = constant_op.constant(repeats) data = array_ops.placeholder_with_default(data, data.shape) repeats = array_ops.placeholder_with_default(repeats, repeats.shape) elif mode == 'unknown_shape': data = array_ops.placeholder_with_default(data, None) repeats = array_ops.placeholder_with_default(repeats, None) result = ragged_util.repeat(data, repeats, axis) self.assertAllEqual(result, expected) @parameterized.parameters([ dict( descr='axis >= rank(data)', mode='dynamic', data=[1, 2, 3], repeats=[3, 0, 2], axis=1, error='axis=1 out of bounds: expected -1<=axis<1'), dict( descr='axis < -rank(data)', mode='dynamic', data=[1, 2, 3], repeats=[3, 0, 2], axis=-2, error='axis=-2 out of bounds: expected -1<=axis<1'), dict( descr='len(repeats) != data.shape[axis]', mode='dynamic', data=[[1, 2, 3], [4, 5, 6]], repeats=[2, 3], axis=1, error='Dimensions 3 and 2 are not compatible'), dict( descr='rank(repeats) > 1', mode='dynamic', data=[[1, 2, 3], [4, 5, 6]], repeats=[[3], [5]], axis=1, error=r'Shape \(2, 1\) must have rank at most 1'), dict( descr='non-integer axis', mode='constant', data=[1, 2, 3], repeats=2, axis='foo', exception=TypeError, error='axis must be an int'), ]) def testError(self, descr, mode, data, repeats, axis, exception=ValueError, error=None): # Make sure that this is also an error case for numpy. with self.assertRaises(exception): np.repeat(data, repeats, axis) if mode == 'constant': data = constant_op.constant(data) repeats = constant_op.constant(repeats) elif mode == 'dynamic': data = constant_op.constant(data) repeats = constant_op.constant(repeats) data = array_ops.placeholder_with_default(data, data.shape) repeats = array_ops.placeholder_with_default(repeats, repeats.shape) elif mode == 'unknown_shape': data = array_ops.placeholder_with_default(data, None) repeats = array_ops.placeholder_with_default(repeats, None) with self.assertRaisesRegex(exception, error): ragged_util.repeat(data, repeats, axis) if __name__ == '__main__': googletest.main()
apache-2.0
citrix-openstack-build/nova
nova/tests/virt/test_driver.py
9
1889
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2013 Citrix Systems, Inc. # Copyright 2013 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova import test from nova.virt import driver class FakeDriver(object): def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs class FakeDriver2(FakeDriver): pass class ToDriverRegistryTestCase(test.NoDBTestCase): def assertDriverInstance(self, inst, class_, *args, **kwargs): self.assertEquals(class_, inst.__class__) self.assertEquals(args, inst.args) self.assertEquals(kwargs, inst.kwargs) def test_driver_dict_from_config(self): drvs = driver.driver_dict_from_config( [ 'key1=nova.tests.virt.test_driver.FakeDriver', 'key2=nova.tests.virt.test_driver.FakeDriver2', ], 'arg1', 'arg2', param1='value1', param2='value2' ) self.assertEquals( sorted(['key1', 'key2']), sorted(drvs.keys()) ) self.assertDriverInstance( drvs['key1'], FakeDriver, 'arg1', 'arg2', param1='value1', param2='value2') self.assertDriverInstance( drvs['key2'], FakeDriver2, 'arg1', 'arg2', param1='value1', param2='value2')
apache-2.0
olituks/sentinella
frontend/library/web2py/scripts/sessions2trash.py
2
7888
#!/usr/bin/env python # -*- coding: utf-8 -*- """ sessions2trash.py Run this script in a web2py environment shell e.g. python web2py.py -S app If models are loaded (-M option) auth.settings.expiration is assumed for sessions without an expiration. If models are not loaded, sessions older than 60 minutes are removed. Use the --expiration option to override these values. Typical usage: # Delete expired sessions every 5 minutes nohup python web2py.py -S app -M -R scripts/sessions2trash.py & # Delete sessions older than 60 minutes regardless of expiration, # with verbose output, then exit. python web2py.py -S app -M -R scripts/sessions2trash.py -A -o -x 3600 -f -v # Delete all sessions regardless of expiry and exit. python web2py.py -S app -M -R scripts/sessions2trash.py -A -o -x 0 # Delete session in a module (move to the modules folder) from sessions2trash import single_loop def delete_sessions(): single_loop() """ from __future__ import with_statement import sys import os print os.path.join(*__file__.split(os.sep)[:-2] or ['.']) sys.path.append(os.path.join(*__file__.split(os.sep)[:-2] or ['.'])) from gluon import current from gluon.storage import Storage from optparse import OptionParser import cPickle import datetime import os import stat import time import glob EXPIRATION_MINUTES = 60 SLEEP_MINUTES = 5 VERSION = 0.3 class SessionSet(object): """Class representing a set of sessions""" def __init__(self, expiration, force, verbose): self.expiration = expiration self.force = force self.verbose = verbose def get(self): """Get session files/records.""" raise NotImplementedError def trash(self): """Trash expired sessions.""" now = datetime.datetime.now() for item in self.get(): status = 'OK' last_visit = item.last_visit_default() try: session = item.get() if session.auth: if session.auth.expiration and not self.force: self.expiration = session.auth.expiration if session.auth.last_visit: last_visit = session.auth.last_visit except: pass age = 0 if last_visit: age = total_seconds(now - last_visit) if age > self.expiration or not self.expiration: item.delete() status = 'trashed' if self.verbose > 1: print 'key: %s' % str(item) print 'expiration: %s seconds' % self.expiration print 'last visit: %s' % str(last_visit) print 'age: %s seconds' % age print 'status: %s' % status print '' elif self.verbose > 0: print('%s %s' % (str(item), status)) class SessionSetDb(SessionSet): """Class representing a set of sessions stored in database""" def __init__(self, expiration, force, verbose): SessionSet.__init__(self, expiration, force, verbose) def get(self): """Return list of SessionDb instances for existing sessions.""" sessions = [] table = current.response.session_db_table if table: for row in table._db(table.id > 0).select(): sessions.append(SessionDb(row)) return sessions class SessionSetFiles(SessionSet): """Class representing a set of sessions stored in flat files""" def __init__(self, expiration, force, verbose): SessionSet.__init__(self, expiration, force, verbose) def get(self): """Return list of SessionFile instances for existing sessions.""" root_path = os.path.join(current.request.folder, 'sessions') return [SessionFile(os.path.join(path, x)) for path,dirs,files in os.walk(root_path) for x in files] class SessionDb(object): """Class representing a single session stored in database""" def __init__(self, row): self.row = row def delete(self): table = current.response.session_db_table self.row.delete_record() table._db.commit() def get(self): session = Storage() session.update(cPickle.loads(self.row.session_data)) return session def last_visit_default(self): if isinstance(self.row.modified_datetime, datetime.datetime): return self.row.modified_datetime else: try: return datetime.datetime.strptime(self.row.modified_datetime, '%Y-%m-%d %H:%M:%S.%f') except: print 'failed to retrieve last modified time (value: %s)' % self.row.modified_datetime def __str__(self): return self.row.unique_key class SessionFile(object): """Class representing a single session stored as a flat file""" def __init__(self, filename): self.filename = filename def delete(self): try: os.unlink(self.filename) path = os.path.dirname(filename) if not path.endswith('sessions') and len(os.listdir(path))==0: os.rmdir(path) except: pass def get(self): session = Storage() with open(self.filename, 'rb+') as f: session.update(cPickle.load(f)) return session def last_visit_default(self): return datetime.datetime.fromtimestamp( os.stat(self.filename)[stat.ST_MTIME]) def __str__(self): return self.filename def total_seconds(delta): """ Adapted from Python 2.7's timedelta.total_seconds() method. Args: delta: datetime.timedelta instance. """ return (delta.microseconds + (delta.seconds + (delta.days * 24 * 3600)) * 10 ** 6) / 10 ** 6 def single_loop(expiration=None, force=False, verbose=False): if expiration is None: try: expiration = auth.settings.expiration except: expiration = EXPIRATION_MINUTES * 60 set_files = SessionSetFiles(expiration, force, verbose) set_files.trash() set_db = SessionSetDb(expiration, force, verbose) set_db.trash() def main(): """Main processing.""" usage = '%prog [options]' + '\nVersion: %s' % VERSION parser = OptionParser(usage=usage) parser.add_option('-f', '--force', action='store_true', dest='force', default=False, help=('Ignore session expiration. ' 'Force expiry based on -x option or auth.settings.expiration.') ) parser.add_option('-o', '--once', action='store_true', dest='once', default=False, help='Delete sessions, then exit.', ) parser.add_option('-s', '--sleep', dest='sleep', default=SLEEP_MINUTES * 60, type="int", help='Number of seconds to sleep between executions. Default 300.', ) parser.add_option('-v', '--verbose', default=0, action='count', help="print verbose output, a second -v increases verbosity") parser.add_option('-x', '--expiration', dest='expiration', default=None, type="int", help='Expiration value for sessions without expiration (in seconds)', ) (options, unused_args) = parser.parse_args() expiration = options.expiration while True: single_loop(expiration, options.force, options.verbose) if options.once: break else: if options.verbose: print 'Sleeping %s seconds' % (options.sleep) time.sleep(options.sleep) if __name__ == '__main__': main()
lgpl-2.1
RedhawkSDR/integration-gnuhawk
components/vector_source_s/tests/test_vector_source_s.py
1
4537
#!/usr/bin/env python # # This file is protected by Copyright. Please refer to the COPYRIGHT file # distributed with this source distribution. # # This file is part of GNUHAWK. # # GNUHAWK is free software: you can redistribute it and/or modify is under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # You should have received a copy of the GNU General Public License along with # this program. If not, see http://www.gnu.org/licenses/. # import unittest import ossie.utils.testing import os from omniORB import any class ComponentTests(ossie.utils.testing.ScaComponentTestCase): """Test for all component implementations in vector_source_s""" def testScaBasicBehavior(self): ####################################################################### # Launch the component with the default execparams execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False) execparams = dict([(x.id, any.from_any(x.value)) for x in execparams]) self.launch(execparams) ####################################################################### # Verify the basic state of the component self.assertNotEqual(self.comp, None) self.assertEqual(self.comp.ref._non_existent(), False) self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True) self.assertEqual(self.spd.get_id(), self.comp.ref._get_identifier()) ####################################################################### # Simulate regular component startup # Verify that initialize nor configure throw errors self.comp.initialize() configureProps = self.getPropertySet(kinds=("configure",), modes=("readwrite", "writeonly"), includeNil=False) self.comp.configure(configureProps) ####################################################################### # Validate that query returns all expected parameters # Query of '[]' should return the following set of properties expectedProps = [] expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True)) expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True)) props = self.comp.query([]) props = dict((x.id, any.from_any(x.value)) for x in props) # Query may return more than expected, but not less for expectedProp in expectedProps: self.assertEquals(props.has_key(expectedProp.id), True) ####################################################################### # Verify that all expected ports are available for port in self.scd.get_componentfeatures().get_ports().get_uses(): port_obj = self.comp.getPort(str(port.get_usesname())) self.assertNotEqual(port_obj, None) self.assertEqual(port_obj._non_existent(), False) self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True) for port in self.scd.get_componentfeatures().get_ports().get_provides(): port_obj = self.comp.getPort(str(port.get_providesname())) self.assertNotEqual(port_obj, None) self.assertEqual(port_obj._non_existent(), False) self.assertEqual(port_obj._is_a(port.get_repid()), True) ####################################################################### # Make sure start and stop can be called without throwing exceptions self.comp.start() self.comp.stop() ####################################################################### # Simulate regular component shutdown self.comp.releaseObject() # TODO Add additional tests here # # See: # ossie.utils.bulkio.bulkio_helpers, # ossie.utils.bluefile.bluefile_helpers # for modules that will assist with testing components with BULKIO ports if __name__ == "__main__": ossie.utils.testing.main("../vector_source_s.spd.xml") # By default tests all implementations
gpl-3.0
UnrememberMe/pants
src/python/pants/bin/engine_initializer.py
1
8479
# coding=utf-8 # Copyright 2016 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import logging from collections import namedtuple from pants.base.build_environment import get_buildroot, get_scm from pants.base.file_system_project_tree import FileSystemProjectTree from pants.base.specs import Specs from pants.engine.build_files import create_graph_rules from pants.engine.fs import create_fs_rules from pants.engine.isolated_process import create_process_rules from pants.engine.legacy.address_mapper import LegacyAddressMapper from pants.engine.legacy.graph import (LegacyBuildGraph, TransitiveHydratedTargets, create_legacy_graph_tasks) from pants.engine.legacy.parser import LegacyPythonCallbacksParser from pants.engine.legacy.structs import (GoTargetAdaptor, JavaLibraryAdaptor, JunitTestsAdaptor, JvmAppAdaptor, PythonLibraryAdaptor, PythonTargetAdaptor, PythonTestsAdaptor, RemoteSourcesAdaptor, ScalaLibraryAdaptor, TargetAdaptor) from pants.engine.mapper import AddressMapper from pants.engine.native import Native from pants.engine.parser import SymbolTable from pants.engine.scheduler import LocalScheduler from pants.init.options_initializer import OptionsInitializer from pants.option.options_bootstrapper import OptionsBootstrapper from pants.scm.change_calculator import EngineChangeCalculator logger = logging.getLogger(__name__) class LegacySymbolTable(SymbolTable): """A v1 SymbolTable facade for use with the v2 engine.""" def __init__(self, build_file_aliases): """ :param build_file_aliases: BuildFileAliases to register. :type build_file_aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases` """ self._build_file_aliases = build_file_aliases self._table = {alias: TargetAdaptor for alias in build_file_aliases.target_types} # TODO: The alias replacement here is to avoid elevating "TargetAdaptors" into the public # API until after https://github.com/pantsbuild/pants/issues/3560 has been completed. # These should likely move onto Target subclasses as the engine gets deeper into beta # territory. for alias in ['java_library', 'java_agent', 'javac_plugin']: self._table[alias] = JavaLibraryAdaptor for alias in ['scala_library', 'scalac_plugin']: self._table[alias] = ScalaLibraryAdaptor for alias in ['python_library', 'pants_plugin']: self._table[alias] = PythonLibraryAdaptor for alias in ['go_library', 'go_binary']: self._table[alias] = GoTargetAdaptor self._table['junit_tests'] = JunitTestsAdaptor self._table['jvm_app'] = JvmAppAdaptor self._table['python_tests'] = PythonTestsAdaptor self._table['python_binary'] = PythonTargetAdaptor self._table['remote_sources'] = RemoteSourcesAdaptor def aliases(self): return self._build_file_aliases def table(self): return self._table class LegacyGraphHelper(namedtuple('LegacyGraphHelper', ['scheduler', 'symbol_table', 'change_calculator'])): """A container for the components necessary to construct a legacy BuildGraph facade.""" def warm_product_graph(self, target_roots): """Warm the scheduler's `ProductGraph` with `TransitiveHydratedTargets` products. :param TargetRoots target_roots: The targets root of the request. """ logger.debug('warming target_roots for: %r', target_roots) subjects = [Specs(tuple(target_roots.specs))] request = self.scheduler.execution_request([TransitiveHydratedTargets], subjects) result = self.scheduler.execute(request) if result.error: raise result.error def create_build_graph(self, target_roots, build_root=None): """Construct and return a `BuildGraph` given a set of input specs. :param TargetRoots target_roots: The targets root of the request. :param string build_root: The build root. :returns: A tuple of (BuildGraph, AddressMapper). """ logger.debug('target_roots are: %r', target_roots) graph = LegacyBuildGraph.create(self.scheduler, self.symbol_table) logger.debug('build_graph is: %s', graph) # Ensure the entire generator is unrolled. for _ in graph.inject_roots_closure(target_roots): pass address_mapper = LegacyAddressMapper(self.scheduler, build_root or get_buildroot()) logger.debug('address_mapper is: %s', address_mapper) return graph, address_mapper class EngineInitializer(object): """Constructs the components necessary to run the v2 engine with v1 BuildGraph compatibility.""" @staticmethod def setup_legacy_graph(pants_ignore_patterns, workdir, build_file_imports_behavior, build_root=None, native=None, build_file_aliases=None, build_ignore_patterns=None, exclude_target_regexps=None, subproject_roots=None, include_trace_on_error=True): """Construct and return the components necessary for LegacyBuildGraph construction. :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree, usually taken from the '--pants-ignore' global option. :param str workdir: The pants workdir. :param build_file_imports_behavior: How to behave if a BUILD file being parsed tries to use import statements. Valid values: "allow", "warn", "error". :type build_file_imports_behavior: string :param str build_root: A path to be used as the build root. If None, then default is used. :param Native native: An instance of the native-engine subsystem. :param build_file_aliases: BuildFileAliases to register. :type build_file_aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases` :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD files, usually taken from the '--build-ignore' global option. :param list exclude_target_regexps: A list of regular expressions for excluding targets. :param list subproject_roots: Paths that correspond with embedded build roots under the current build root. :param bool include_trace_on_error: If True, when an error occurs, the error message will include the graph trace. :returns: A tuple of (scheduler, engine, symbol_table, build_graph_cls). """ build_root = build_root or get_buildroot() scm = get_scm() if not build_file_aliases: _, build_config = OptionsInitializer(OptionsBootstrapper()).setup(init_logging=False) build_file_aliases = build_config.registered_aliases() symbol_table = LegacySymbolTable(build_file_aliases) project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns) # Register "literal" subjects required for these tasks. parser = LegacyPythonCallbacksParser( symbol_table, build_file_aliases, build_file_imports_behavior ) address_mapper = AddressMapper(parser=parser, build_ignore_patterns=build_ignore_patterns, exclude_target_regexps=exclude_target_regexps, subproject_roots=subproject_roots) # Load the native backend. native = native or Native.create() # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The # LegacyBuildGraph will explicitly request the products it needs. tasks = ( create_legacy_graph_tasks(symbol_table) + create_fs_rules() + create_graph_rules(address_mapper, symbol_table) + create_process_rules() ) scheduler = LocalScheduler(workdir, dict(), tasks, project_tree, native, include_trace_on_error=include_trace_on_error) change_calculator = EngineChangeCalculator(scheduler, symbol_table, scm) if scm else None return LegacyGraphHelper(scheduler, symbol_table, change_calculator)
apache-2.0
3s3s/SmailCoin
qa/pull-tester/rpc-tests.py
5
7957
#!/usr/bin/env python2 # Copyright (c) 2014-2015 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ Run Regression Test Suite This module calls down into individual test cases via subprocess. It will forward all unrecognized arguments onto the individual test scripts, other than: - `-extended`: run the "extended" test suite in addition to the basic one. - `-win`: signal that this is running in a Windows environment, and we should run the tests. - `--coverage`: this generates a basic coverage report for the RPC interface. For a description of arguments recognized by test scripts, see `qa/pull-tester/test_framework/test_framework.py:BitcoinTestFramework.main`. """ import os import time import shutil import sys import subprocess import tempfile import re from tests_config import * #If imported values are not defined then set to zero (or disabled) if not vars().has_key('ENABLE_WALLET'): ENABLE_WALLET=0 if not vars().has_key('ENABLE_BITCOIND'): ENABLE_BITCOIND=0 if not vars().has_key('ENABLE_UTILS'): ENABLE_UTILS=0 if not vars().has_key('ENABLE_ZMQ'): ENABLE_ZMQ=0 ENABLE_COVERAGE=0 #Create a set to store arguments and create the passOn string opts = set() passOn = "" p = re.compile("^--") bold = ("","") if (os.name == 'posix'): bold = ('\033[0m', '\033[1m') for arg in sys.argv[1:]: if arg == '--coverage': ENABLE_COVERAGE = 1 elif (p.match(arg) or arg == "-h"): passOn += " " + arg else: opts.add(arg) #Set env vars buildDir = BUILDDIR if "BITCOIND" not in os.environ: os.environ["BITCOIND"] = buildDir + '/src/bitcoind' + EXEEXT if "BITCOINCLI" not in os.environ: os.environ["BITCOINCLI"] = buildDir + '/src/bitcoin-cli' + EXEEXT #Disable Windows tests by default if EXEEXT == ".exe" and "-win" not in opts: print "Win tests currently disabled. Use -win option to enable" sys.exit(0) #Tests testScripts = [ 'bip68-112-113-p2p.py', 'wallet.py', 'listtransactions.py', 'receivedby.py', 'mempool_resurrect_test.py', 'txn_doublespend.py --mineblock', 'txn_clone.py', 'getchaintips.py', 'rawtransactions.py', 'rest.py', 'mempool_spendcoinbase.py', 'mempool_reorg.py', 'mempool_limit.py', 'httpbasics.py', 'multi_rpc.py', 'zapwallettxes.py', 'proxy_test.py', 'merkle_blocks.py', 'fundrawtransaction.py', 'signrawtransactions.py', 'walletbackup.py', 'nodehandling.py', 'reindex.py', 'decodescript.py', 'p2p-fullblocktest.py', 'blockchain.py', 'disablewallet.py', 'sendheaders.py', 'keypool.py', 'prioritise_transaction.py', 'invalidblockrequest.py', 'invalidtxrequest.py', 'abandonconflict.py', 'p2p-versionbits-warning.py', ] testScriptsExt = [ 'bip9-softforks.py', 'bip65-cltv.py', 'bip65-cltv-p2p.py', 'bip68-sequence.py', 'bipdersig-p2p.py', 'bipdersig.py', 'getblocktemplate_longpoll.py', 'getblocktemplate_proposals.py', 'txn_doublespend.py', 'txn_clone.py --mineblock', 'pruning.py', 'forknotify.py', 'invalidateblock.py', # 'rpcbind_test.py', #temporary, bug in libevent, see #6655 'smartfees.py', 'maxblocksinflight.py', 'p2p-acceptblock.py', 'mempool_packages.py', 'maxuploadtarget.py', 'replace-by-fee.py', ] #Enable ZMQ tests if ENABLE_ZMQ == 1: testScripts.append('zmq_test.py') def runtests(): coverage = None if ENABLE_COVERAGE: coverage = RPCCoverage() print("Initializing coverage directory at %s\n" % coverage.dir) if(ENABLE_WALLET == 1 and ENABLE_UTILS == 1 and ENABLE_BITCOIND == 1): rpcTestDir = buildDir + '/qa/rpc-tests/' run_extended = '-extended' in opts cov_flag = coverage.flag if coverage else '' flags = " --srcdir %s/src %s %s" % (buildDir, cov_flag, passOn) #Run Tests for i in range(len(testScripts)): if (len(opts) == 0 or (len(opts) == 1 and "-win" in opts ) or run_extended or testScripts[i] in opts or re.sub(".py$", "", testScripts[i]) in opts ): print("Running testscript %s%s%s ..." % (bold[1], testScripts[i], bold[0])) time0 = time.time() subprocess.check_call( rpcTestDir + testScripts[i] + flags, shell=True) print("Duration: %s s\n" % (int(time.time() - time0))) # exit if help is called so we print just one set of # instructions p = re.compile(" -h| --help") if p.match(passOn): sys.exit(0) # Run Extended Tests for i in range(len(testScriptsExt)): if (run_extended or testScriptsExt[i] in opts or re.sub(".py$", "", testScriptsExt[i]) in opts): print( "Running 2nd level testscript " + "%s%s%s ..." % (bold[1], testScriptsExt[i], bold[0])) time0 = time.time() subprocess.check_call( rpcTestDir + testScriptsExt[i] + flags, shell=True) print("Duration: %s s\n" % (int(time.time() - time0))) if coverage: coverage.report_rpc_coverage() print("Cleaning up coverage data") coverage.cleanup() else: print "No rpc tests to run. Wallet, utils, and bitcoind must all be enabled" class RPCCoverage(object): """ Coverage reporting utilities for pull-tester. Coverage calculation works by having each test script subprocess write coverage files into a particular directory. These files contain the RPC commands invoked during testing, as well as a complete listing of RPC commands per `bitcoin-cli help` (`rpc_interface.txt`). After all tests complete, the commands run are combined and diff'd against the complete list to calculate uncovered RPC commands. See also: qa/rpc-tests/test_framework/coverage.py """ def __init__(self): self.dir = tempfile.mkdtemp(prefix="coverage") self.flag = '--coveragedir %s' % self.dir def report_rpc_coverage(self): """ Print out RPC commands that were unexercised by tests. """ uncovered = self._get_uncovered_rpc_commands() if uncovered: print("Uncovered RPC commands:") print("".join((" - %s\n" % i) for i in sorted(uncovered))) else: print("All RPC commands covered.") def cleanup(self): return shutil.rmtree(self.dir) def _get_uncovered_rpc_commands(self): """ Return a set of currently untested RPC commands. """ # This is shared from `qa/rpc-tests/test-framework/coverage.py` REFERENCE_FILENAME = 'rpc_interface.txt' COVERAGE_FILE_PREFIX = 'coverage.' coverage_ref_filename = os.path.join(self.dir, REFERENCE_FILENAME) coverage_filenames = set() all_cmds = set() covered_cmds = set() if not os.path.isfile(coverage_ref_filename): raise RuntimeError("No coverage reference found") with open(coverage_ref_filename, 'r') as f: all_cmds.update([i.strip() for i in f.readlines()]) for root, dirs, files in os.walk(self.dir): for filename in files: if filename.startswith(COVERAGE_FILE_PREFIX): coverage_filenames.add(os.path.join(root, filename)) for filename in coverage_filenames: with open(filename, 'r') as f: covered_cmds.update([i.strip() for i in f.readlines()]) return all_cmds - covered_cmds if __name__ == '__main__': runtests()
mit
TangHao1987/intellij-community
python/helpers/pydev/pydevd_frame.py
23
26160
import linecache import os.path import re import traceback # @Reimport import pydev_log from pydevd_breakpoints import get_exception_breakpoint, get_exception_name from pydevd_comm import CMD_STEP_CAUGHT_EXCEPTION, CMD_STEP_RETURN, CMD_STEP_OVER, CMD_SET_BREAK, \ CMD_STEP_INTO, CMD_SMART_STEP_INTO, CMD_RUN_TO_LINE, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO_MY_CODE from pydevd_constants import * # @UnusedWildImport from pydevd_file_utils import GetFilenameAndBase from pydevd_frame_utils import add_exception_to_frame, just_raised try: from pydevd_signature import sendSignatureCallTrace except ImportError: def sendSignatureCallTrace(*args, **kwargs): pass import pydevd_vars import pydevd_dont_trace basename = os.path.basename IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') #======================================================================================================================= # PyDBFrame #======================================================================================================================= class PyDBFrame: '''This makes the tracing for a given frame, so, the trace_dispatch is used initially when we enter into a new context ('call') and then is reused for the entire context. ''' #Note: class (and not instance) attributes. #Same thing in the main debugger but only considering the file contents, while the one in the main debugger #considers the user input (so, the actual result must be a join of both). filename_to_lines_where_exceptions_are_ignored = {} filename_to_stat_info = {} def __init__(self, args): #args = mainDebugger, filename, base, info, t, frame #yeap, much faster than putting in self and then getting it from self later on self._args = args[:-1] def setSuspend(self, *args, **kwargs): self._args[0].setSuspend(*args, **kwargs) def doWaitSuspend(self, *args, **kwargs): self._args[0].doWaitSuspend(*args, **kwargs) def trace_exception(self, frame, event, arg): if event == 'exception': flag, frame = self.should_stop_on_exception(frame, event, arg) if flag: self.handle_exception(frame, event, arg) return self.trace_dispatch return self.trace_exception def should_stop_on_exception(self, frame, event, arg): mainDebugger, _filename, info, thread = self._args flag = False if info.pydev_state != STATE_SUSPEND: #and breakpoint is not None: exception, value, trace = arg if trace is not None: #on jython trace is None on the first event exception_breakpoint = get_exception_breakpoint( exception, mainDebugger.break_on_caught_exceptions) if exception_breakpoint is not None: if exception_breakpoint.ignore_libraries: if exception_breakpoint.notify_on_first_raise_only: if mainDebugger.first_appearance_in_scope(trace): add_exception_to_frame(frame, (exception, value, trace)) thread.additionalInfo.message = exception_breakpoint.qname flag = True else: pydev_log.debug("Ignore exception %s in library %s" % (exception, frame.f_code.co_filename)) flag = False else: if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace): add_exception_to_frame(frame, (exception, value, trace)) thread.additionalInfo.message = exception_breakpoint.qname flag = True else: flag = False else: try: if mainDebugger.plugin is not None: result = mainDebugger.plugin.exception_break(mainDebugger, self, frame, self._args, arg) if result: (flag, frame) = result except: flag = False return flag, frame def handle_exception(self, frame, event, arg): try: # print 'handle_exception', frame.f_lineno, frame.f_code.co_name # We have 3 things in arg: exception type, description, traceback object trace_obj = arg[2] mainDebugger = self._args[0] if not hasattr(trace_obj, 'tb_next'): return #Not always there on Jython... initial_trace_obj = trace_obj if trace_obj.tb_next is None and trace_obj.tb_frame is frame: #I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). if mainDebugger.break_on_exceptions_thrown_in_same_context: #Option: Don't break if an exception is caught in the same function from which it is thrown return else: #Get the trace_obj from where the exception was raised... while trace_obj.tb_next is not None: trace_obj = trace_obj.tb_next if mainDebugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: for check_trace_obj in (initial_trace_obj, trace_obj): filename = GetFilenameAndBase(check_trace_obj.tb_frame)[0] filename_to_lines_where_exceptions_are_ignored = self.filename_to_lines_where_exceptions_are_ignored lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(filename) if lines_ignored is None: lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {} try: curr_stat = os.stat(filename) curr_stat = (curr_stat.st_size, curr_stat.st_mtime) except: curr_stat = None last_stat = self.filename_to_stat_info.get(filename) if last_stat != curr_stat: self.filename_to_stat_info[filename] = curr_stat lines_ignored.clear() try: linecache.checkcache(filename) except: #Jython 2.1 linecache.checkcache() from_user_input = mainDebugger.filename_to_lines_where_exceptions_are_ignored.get(filename) if from_user_input: merged = {} merged.update(lines_ignored) #Override what we have with the related entries that the user entered merged.update(from_user_input) else: merged = lines_ignored exc_lineno = check_trace_obj.tb_lineno # print ('lines ignored', lines_ignored) # print ('user input', from_user_input) # print ('merged', merged, 'curr', exc_lineno) if not DictContains(merged, exc_lineno): #Note: check on merged but update lines_ignored. try: line = linecache.getline(filename, exc_lineno, check_trace_obj.tb_frame.f_globals) except: #Jython 2.1 line = linecache.getline(filename, exc_lineno) if IGNORE_EXCEPTION_TAG.match(line) is not None: lines_ignored[exc_lineno] = 1 return else: #Put in the cache saying not to ignore lines_ignored[exc_lineno] = 0 else: #Ok, dict has it already cached, so, let's check it... if merged.get(exc_lineno, 0): return thread = self._args[3] try: frame_id_to_frame = {} frame_id_to_frame[id(frame)] = frame f = trace_obj.tb_frame while f is not None: frame_id_to_frame[id(f)] = f f = f.f_back f = None thread_id = GetThreadId(thread) pydevd_vars.addAdditionalFrameById(thread_id, frame_id_to_frame) try: mainDebugger.sendCaughtExceptionStack(thread, arg, id(frame)) self.setSuspend(thread, CMD_STEP_CAUGHT_EXCEPTION) self.doWaitSuspend(thread, frame, event, arg) mainDebugger.sendCaughtExceptionStackProceeded(thread) finally: pydevd_vars.removeAdditionalFrameById(thread_id) except: traceback.print_exc() mainDebugger.SetTraceForFrameAndParents(frame) finally: #Clear some local variables... trace_obj = None initial_trace_obj = None check_trace_obj = None f = None frame_id_to_frame = None mainDebugger = None thread = None def trace_dispatch(self, frame, event, arg): main_debugger, filename, info, thread = self._args try: info.is_tracing = True if main_debugger._finishDebuggingSession: return None if getattr(thread, 'pydev_do_not_trace', None): return None if event == 'call' and main_debugger.signature_factory: sendSignatureCallTrace(main_debugger, frame, filename) plugin_manager = main_debugger.plugin is_exception_event = event == 'exception' has_exception_breakpoints = main_debugger.break_on_caught_exceptions or main_debugger.has_plugin_exception_breaks if is_exception_event: if has_exception_breakpoints: flag, frame = self.should_stop_on_exception(frame, event, arg) if flag: self.handle_exception(frame, event, arg) return self.trace_dispatch elif event not in ('line', 'call', 'return'): #I believe this can only happen in jython on some frontiers on jython and java code, which we don't want to trace. return None stop_frame = info.pydev_step_stop step_cmd = info.pydev_step_cmd if is_exception_event: breakpoints_for_file = None else: # If we are in single step mode and something causes us to exit the current frame, we need to make sure we break # eventually. Force the step mode to step into and the step stop frame to None. # I.e.: F6 in the end of a function should stop in the next possible position (instead of forcing the user # to make a step in or step over at that location). # Note: this is especially troublesome when we're skipping code with the # @DontTrace comment. if stop_frame is frame and event in ('return', 'exception') and step_cmd in (CMD_STEP_RETURN, CMD_STEP_OVER): info.pydev_step_cmd = CMD_STEP_INTO info.pydev_step_stop = None breakpoints_for_file = main_debugger.breakpoints.get(filename) can_skip = False if info.pydev_state == STATE_RUN: #we can skip if: #- we have no stop marked #- we should make a step return/step over and we're not in the current frame can_skip = (step_cmd is None and stop_frame is None)\ or (step_cmd in (CMD_STEP_RETURN, CMD_STEP_OVER) and stop_frame is not frame) if can_skip and plugin_manager is not None and main_debugger.has_plugin_line_breaks: can_skip = not plugin_manager.can_not_skip(main_debugger, self, frame) # Let's check to see if we are in a function that has a breakpoint. If we don't have a breakpoint, # we will return nothing for the next trace #also, after we hit a breakpoint and go to some other debugging state, we have to force the set trace anyway, #so, that's why the additional checks are there. if not breakpoints_for_file: if can_skip: if has_exception_breakpoints: return self.trace_exception else: return None else: #checks the breakpoint to see if there is a context match in some function curr_func_name = frame.f_code.co_name #global context is set with an empty name if curr_func_name in ('?', '<module>'): curr_func_name = '' for breakpoint in DictIterValues(breakpoints_for_file): #jython does not support itervalues() #will match either global or some function if breakpoint.func_name in ('None', curr_func_name): break else: # if we had some break, it won't get here (so, that's a context that we want to skip) if can_skip: if has_exception_breakpoints: return self.trace_exception else: return None #We may have hit a breakpoint or we are already in step mode. Either way, let's check what we should do in this frame #print 'NOT skipped', frame.f_lineno, frame.f_code.co_name, event try: line = frame.f_lineno flag = False #return is not taken into account for breakpoint hit because we'd have a double-hit in this case #(one for the line and the other for the return). stop_info = {} breakpoint = None exist_result = False stop = False bp_type = None if not flag and event != 'return' and info.pydev_state != STATE_SUSPEND and breakpoints_for_file is not None \ and DictContains(breakpoints_for_file, line): breakpoint = breakpoints_for_file[line] new_frame = frame stop = True if step_cmd == CMD_STEP_OVER and stop_frame is frame and event in ('line', 'return'): stop = False #we don't stop on breakpoint if we have to stop by step-over (it will be processed later) elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) if result: exist_result = True (flag, breakpoint, new_frame, bp_type) = result if breakpoint: #ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint # lets do the conditional stuff here if stop or exist_result: condition = breakpoint.condition if condition is not None: try: val = eval(condition, new_frame.f_globals, new_frame.f_locals) if not val: return self.trace_dispatch except: if type(condition) != type(''): if hasattr(condition, 'encode'): condition = condition.encode('utf-8') msg = 'Error while evaluating expression: %s\n' % (condition,) sys.stderr.write(msg) traceback.print_exc() if not main_debugger.suspend_on_breakpoint_exception: return self.trace_dispatch else: stop = True try: additional_info = None try: additional_info = thread.additionalInfo except AttributeError: pass #that's ok, no info currently set if additional_info is not None: # add exception_type and stacktrace into thread additional info etype, value, tb = sys.exc_info() try: error = ''.join(traceback.format_exception_only(etype, value)) stack = traceback.extract_stack(f=tb.tb_frame.f_back) # On self.setSuspend(thread, CMD_SET_BREAK) this info will be # sent to the client. additional_info.conditional_breakpoint_exception = \ ('Condition:\n' + condition + '\n\nError:\n' + error, stack) finally: etype, value, tb = None, None, None except: traceback.print_exc() if breakpoint.expression is not None: try: try: val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals) except: val = sys.exc_info()[1] finally: if val is not None: thread.additionalInfo.message = val if stop: self.setSuspend(thread, CMD_SET_BREAK) elif flag and plugin_manager is not None: result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) if result: frame = result # if thread has a suspend flag, we suspend with a busy wait if info.pydev_state == STATE_SUSPEND: self.doWaitSuspend(thread, frame, event, arg) return self.trace_dispatch except: traceback.print_exc() raise #step handling. We stop when we hit the right frame try: should_skip = False if pydevd_dont_trace.should_trace_hook is not None: if not hasattr(self, 'should_skip'): # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code # Which will be handled by this frame is read-only, so, we can cache it safely. should_skip = self.should_skip = not pydevd_dont_trace.should_trace_hook(frame, filename) else: should_skip = self.should_skip plugin_stop = False if should_skip: stop = False elif step_cmd == CMD_STEP_INTO: stop = event in ('line', 'return') if plugin_manager is not None: result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) if result: stop, plugin_stop = result elif step_cmd == CMD_STEP_INTO_MY_CODE: if not main_debugger.not_in_scope(frame.f_code.co_filename): stop = event == 'line' elif step_cmd == CMD_STEP_OVER: stop = stop_frame is frame and event in ('line', 'return') if plugin_manager is not None: result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) if result: stop, plugin_stop = result elif step_cmd == CMD_SMART_STEP_INTO: stop = False if info.pydev_smart_step_stop is frame: info.pydev_func_name = None info.pydev_smart_step_stop = None if event == 'line' or event == 'exception': curr_func_name = frame.f_code.co_name #global context is set with an empty name if curr_func_name in ('?', '<module>') or curr_func_name is None: curr_func_name = '' if curr_func_name == info.pydev_func_name: stop = True elif step_cmd == CMD_STEP_RETURN: stop = event == 'return' and stop_frame is frame elif step_cmd == CMD_RUN_TO_LINE or step_cmd == CMD_SET_NEXT_STATEMENT: stop = False if event == 'line' or event == 'exception': #Yes, we can only act on line events (weird hum?) #Note: This code is duplicated at pydevd.py #Acting on exception events after debugger breaks with exception curr_func_name = frame.f_code.co_name #global context is set with an empty name if curr_func_name in ('?', '<module>'): curr_func_name = '' if curr_func_name == info.pydev_func_name: line = info.pydev_next_line if frame.f_lineno == line: stop = True else: if frame.f_trace is None: frame.f_trace = self.trace_dispatch frame.f_lineno = line frame.f_trace = None stop = True else: stop = False if plugin_stop: stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) elif stop: if event == 'line': self.setSuspend(thread, step_cmd) self.doWaitSuspend(thread, frame, event, arg) else: #return event back = frame.f_back if back is not None: #When we get to the pydevd run function, the debugging has actually finished for the main thread #(note that it can still go on for other threads, but for this one, we just make it finish) #So, just setting it to None should be OK base = basename(back.f_code.co_filename) if base == 'pydevd.py' and back.f_code.co_name == 'run': back = None elif base == 'pydevd_traceproperty.py': # We dont want to trace the return event of pydevd_traceproperty (custom property for debugging) #if we're in a return, we want it to appear to the user in the previous frame! return None if back is not None: #if we're in a return, we want it to appear to the user in the previous frame! self.setSuspend(thread, step_cmd) self.doWaitSuspend(thread, back, event, arg) else: #in jython we may not have a back frame info.pydev_step_stop = None info.pydev_step_cmd = None info.pydev_state = STATE_RUN except: try: traceback.print_exc() info.pydev_step_cmd = None except: return None #if we are quitting, let's stop the tracing retVal = None if not main_debugger.quitting: retVal = self.trace_dispatch return retVal finally: info.is_tracing = False #end trace_dispatch if USE_PSYCO_OPTIMIZATION: try: import psyco trace_dispatch = psyco.proxy(trace_dispatch) except ImportError: if hasattr(sys, 'exc_clear'): #jython does not have it sys.exc_clear() #don't keep the traceback pass #ok, psyco not available
apache-2.0
def-/commandergenius
project/jni/python/src/Lib/plat-riscos/riscospath.py
68
9996
# Module 'riscospath' -- common operations on RISC OS pathnames. # contributed by Andrew Clover ( andrew@oaktree.co.uk ) # The "os.path" name is an alias for this module on RISC OS systems; # on other systems (e.g. Mac, Windows), os.path provides the same # operations in a manner specific to that platform, and is an alias # to another module (e.g. macpath, ntpath). """ Instead of importing this module directly, import os and refer to this module as os.path. """ # strings representing various path-related bits and pieces curdir = '@' pardir = '^' extsep = '/' sep = '.' pathsep = ',' defpath = '<Run$Dir>' altsep = None # Imports - make an error-generating swi object if the swi module is not # available (ie. we are not running on RISC OS Python) import os, stat, string try: import swi except ImportError: class _swi: def swi(*a): raise AttributeError, 'This function only available under RISC OS' block= swi swi= _swi() [_false, _true]= range(2) _roots= ['$', '&', '%', '@', '\\'] # _allowMOSFSNames # After importing riscospath, set _allowMOSFSNames true if you want the module # to understand the "-SomeFS-" notation left over from the old BBC Master MOS, # as well as the standard "SomeFS:" notation. Set this to be fully backwards # compatible but remember that "-SomeFS-" can also be a perfectly valid file # name so care must be taken when splitting and joining paths. _allowMOSFSNames= _false ## Path manipulation, RISC OS stylee. def _split(p): """ split filing system name (including special field) and drive specifier from rest of path. This is needed by many riscospath functions. """ dash= _allowMOSFSNames and p[:1]=='-' if dash: q= string.find(p, '-', 1)+1 else: if p[:1]==':': q= 0 else: q= string.find(p, ':')+1 # q= index of start of non-FS portion of path s= string.find(p, '#') if s==-1 or s>q: s= q # find end of main FS name, not including special field else: for c in p[dash:s]: if c not in string.ascii_letters: q= 0 break # disallow invalid non-special-field characters in FS name r= q if p[q:q+1]==':': r= string.find(p, '.', q+1)+1 if r==0: r= len(p) # find end of drive name (if any) following FS name (if any) return (p[:q], p[q:r], p[r:]) def normcase(p): """ Normalize the case of a pathname. This converts to lowercase as the native RISC OS filesystems are case-insensitive. However, not all filesystems have to be, and there's no simple way to find out what type an FS is argh. """ return string.lower(p) def isabs(p): """ Return whether a path is absolute. Under RISC OS, a file system specifier does not make a path absolute, but a drive name or number does, and so does using the symbol for root, URD, library, CSD or PSD. This means it is perfectly possible to have an "absolute" URL dependent on the current working directory, and equally you can have a "relative" URL that's on a completely different device to the current one argh. """ (fs, drive, path)= _split(p) return drive!='' or path[:1] in _roots def join(a, *p): """ Join path elements with the directory separator, replacing the entire path when an absolute or FS-changing path part is found. """ j= a for b in p: (fs, drive, path)= _split(b) if j=='' or fs!='' or drive!='' or path[:1] in _roots: j= b elif j[-1]==':': j= j+b else: j= j+'.'+b return j def split(p): """ Split a path in head (everything up to the last '.') and tail (the rest). FS name must still be dealt with separately since special field may contain '.'. """ (fs, drive, path)= _split(p) q= string.rfind(path, '.') if q!=-1: return (fs+drive+path[:q], path[q+1:]) return ('', p) def splitext(p): """ Split a path in root and extension. This assumes the 'using slash for dot and dot for slash with foreign files' convention common in RISC OS is in force. """ (tail, head)= split(p) if '/' in head: q= len(head)-string.rfind(head, '/') return (p[:-q], p[-q:]) return (p, '') def splitdrive(p): """ Split a pathname into a drive specification (including FS name) and the rest of the path. The terminating dot of the drive name is included in the drive specification. """ (fs, drive, path)= _split(p) return (fs+drive, p) def basename(p): """ Return the tail (basename) part of a path. """ return split(p)[1] def dirname(p): """ Return the head (dirname) part of a path. """ return split(p)[0] def commonprefix(m): "Given a list of pathnames, returns the longest common leading component" if not m: return '' s1 = min(m) s2 = max(m) n = min(len(s1), len(s2)) for i in xrange(n): if s1[i] != s2[i]: return s1[:i] return s1[:n] ## File access functions. Why are we in os.path? def getsize(p): """ Return the size of a file, reported by os.stat(). """ st= os.stat(p) return st[stat.ST_SIZE] def getmtime(p): """ Return the last modification time of a file, reported by os.stat(). """ st = os.stat(p) return st[stat.ST_MTIME] getatime= getmtime # RISC OS-specific file access functions def exists(p): """ Test whether a path exists. """ try: return swi.swi('OS_File', '5s;i', p)!=0 except swi.error: return 0 lexists = exists def isdir(p): """ Is a path a directory? Includes image files. """ try: return swi.swi('OS_File', '5s;i', p) in [2, 3] except swi.error: return 0 def isfile(p): """ Test whether a path is a file, including image files. """ try: return swi.swi('OS_File', '5s;i', p) in [1, 3] except swi.error: return 0 def islink(p): """ RISC OS has no links or mounts. """ return _false ismount= islink # Same-file testing. # samefile works on filename comparison since there is no ST_DEV and ST_INO is # not reliably unique (esp. directories). First it has to normalise the # pathnames, which it can do 'properly' using OS_FSControl since samefile can # assume it's running on RISC OS (unlike normpath). def samefile(fa, fb): """ Test whether two pathnames reference the same actual file. """ l= 512 b= swi.block(l) swi.swi('OS_FSControl', 'isb..i', 37, fa, b, l) fa= b.ctrlstring() swi.swi('OS_FSControl', 'isb..i', 37, fb, b, l) fb= b.ctrlstring() return fa==fb def sameopenfile(a, b): """ Test whether two open file objects reference the same file. """ return os.fstat(a)[stat.ST_INO]==os.fstat(b)[stat.ST_INO] ## Path canonicalisation # 'user directory' is taken as meaning the User Root Directory, which is in # practice never used, for anything. def expanduser(p): (fs, drive, path)= _split(p) l= 512 b= swi.block(l) if path[:1]!='@': return p if fs=='': fsno= swi.swi('OS_Args', '00;i') swi.swi('OS_FSControl', 'iibi', 33, fsno, b, l) fsname= b.ctrlstring() else: if fs[:1]=='-': fsname= fs[1:-1] else: fsname= fs[:-1] fsname= string.split(fsname, '#', 1)[0] # remove special field from fs x= swi.swi('OS_FSControl', 'ib2s.i;.....i', 54, b, fsname, l) if x<l: urd= b.tostring(0, l-x-1) else: # no URD! try CSD x= swi.swi('OS_FSControl', 'ib0s.i;.....i', 54, b, fsname, l) if x<l: urd= b.tostring(0, l-x-1) else: # no CSD! use root urd= '$' return fsname+':'+urd+path[1:] # Environment variables are in angle brackets. def expandvars(p): """ Expand environment variables using OS_GSTrans. """ l= 512 b= swi.block(l) return b.tostring(0, swi.swi('OS_GSTrans', 'sbi;..i', p, b, l)) # Return an absolute path. RISC OS' osfscontrol_canonicalise_path does this among others abspath = os.expand # realpath is a no-op on systems without islink support realpath = abspath # Normalize a path. Only special path element under RISC OS is "^" for "..". def normpath(p): """ Normalize path, eliminating up-directory ^s. """ (fs, drive, path)= _split(p) rhs= '' ups= 0 while path!='': (path, el)= split(path) if el=='^': ups= ups+1 else: if ups>0: ups= ups-1 else: if rhs=='': rhs= el else: rhs= el+'.'+rhs while ups>0: ups= ups-1 rhs= '^.'+rhs return fs+drive+rhs # Directory tree walk. # Independent of host system. Why am I in os.path? def walk(top, func, arg): """Directory tree walk with callback function. For each directory in the directory tree rooted at top (including top itself, but excluding '.' and '..'), call func(arg, dirname, fnames). dirname is the name of the directory, and fnames a list of the names of the files and subdirectories in dirname (excluding '.' and '..'). func may modify the fnames list in-place (e.g. via del or slice assignment), and walk will only recurse into the subdirectories whose names remain in fnames; this can be used to implement a filter, or to impose a specific order of visiting. No semantics are defined for, or required of, arg, beyond that arg is always passed to func. It can be used, e.g., to pass a filename pattern, or a mutable object designed to accumulate statistics. Passing None for arg is common.""" try: names= os.listdir(top) except os.error: return func(arg, top, names) for name in names: name= join(top, name) if isdir(name) and not islink(name): walk(name, func, arg)
lgpl-2.1
sisuani/pjproject
pjsip-apps/src/pygui/accountsetting.py
28
19205
# $Id: accountsetting.py 4704 2014-01-16 05:30:46Z ming $ # # pjsua Python GUI Demo # # Copyright (C)2013 Teluu Inc. (http://www.teluu.com) # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # import sys if sys.version_info[0] >= 3: # Python 3 import tkinter as tk from tkinter import ttk from tkinter import messagebox as msgbox else: import Tkinter as tk import tkMessageBox as msgbox import ttk import pjsua2 as pj import endpoint import application class Dialog(tk.Toplevel): """ This implements account settings dialog to manipulate account settings. """ def __init__(self, parent, cfg): tk.Toplevel.__init__(self, parent) self.transient(parent) self.parent = parent self.geometry("+100+100") self.title('Account settings') self.frm = ttk.Frame(self) self.frm.pack(expand='yes', fill='both') self.isOk = False self.cfg = cfg self.createWidgets() def doModal(self): if self.parent: self.parent.wait_window(self) else: self.wait_window(self) return self.isOk def createWidgets(self): # The notebook self.frm.rowconfigure(0, weight=1) self.frm.rowconfigure(1, weight=0) self.frm.columnconfigure(0, weight=1) self.frm.columnconfigure(1, weight=1) self.wTab = ttk.Notebook(self.frm) self.wTab.grid(column=0, row=0, columnspan=2, padx=10, pady=10, ipadx=20, ipady=20, sticky=tk.N+tk.S+tk.W+tk.E) # Main buttons btnOk = ttk.Button(self.frm, text='Ok', command=self.onOk) btnOk.grid(column=0, row=1, sticky=tk.E, padx=20, pady=10) btnCancel = ttk.Button(self.frm, text='Cancel', command=self.onCancel) btnCancel.grid(column=1, row=1, sticky=tk.W, padx=20, pady=10) # Tabs self.createBasicTab() self.createSipTab() self.createMediaTab() self.createMediaNatTab() def createBasicTab(self): # Prepare the variables to set/receive values from GUI self.cfgPriority = tk.IntVar(value=self.cfg.priority) self.cfgAccId = tk.StringVar(value=self.cfg.idUri) self.cfgRegistrar = tk.StringVar(value=self.cfg.regConfig.registrarUri) self.cfgRegisterOnAdd = tk.IntVar(value=self.cfg.regConfig.registerOnAdd) self.cfgUsername = tk.StringVar() self.cfgPassword = tk.StringVar() if len(self.cfg.sipConfig.authCreds): self.cfgUsername.set( self.cfg.sipConfig.authCreds[0].username ) self.cfgPassword.set( self.cfg.sipConfig.authCreds[0].data ) self.cfgProxy = tk.StringVar() if len(self.cfg.sipConfig.proxies): self.cfgProxy.set( self.cfg.sipConfig.proxies[0] ) # Build the tab page frm = ttk.Frame(self.frm) frm.columnconfigure(0, weight=1) frm.columnconfigure(1, weight=2) row = 0 ttk.Label(frm, text='Priority:').grid(row=row, column=0, sticky=tk.E, pady=2) tk.Spinbox(frm, from_=0, to=9, textvariable=self.cfgPriority, width=2).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='ID (URI):').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Entry(frm, textvariable=self.cfgAccId, width=32).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='Registrar URI:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Entry(frm, textvariable=self.cfgRegistrar, width=32).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Checkbutton(frm, text='Register on add', variable=self.cfgRegisterOnAdd).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Label(frm, text='Optional proxy URI:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Entry(frm, textvariable=self.cfgProxy, width=32).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='Auth username:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Entry(frm, textvariable=self.cfgUsername, width=16).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='Password:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Entry(frm, textvariable=self.cfgPassword, show='*', width=16).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) self.wTab.add(frm, text='Basic Settings') def createSipTab(self): # Prepare the variables to set/receive values from GUI self.cfgPrackUse = tk.IntVar(value=self.cfg.callConfig.prackUse) self.cfgTimerUse = tk.IntVar(value=self.cfg.callConfig.timerUse) self.cfgTimerExpires = tk.IntVar(value=self.cfg.callConfig.timerSessExpiresSec) self.cfgPublish = tk.BooleanVar(value=self.cfg.presConfig.publishEnabled) self.cfgMwiEnabled = tk.BooleanVar(value=self.cfg.mwiConfig.enabled) self.cfgEnableContactRewrite = tk.BooleanVar(value=self.cfg.natConfig.contactRewriteUse != 0) self.cfgEnableViaRewrite = tk.BooleanVar(value=self.cfg.natConfig.viaRewriteUse != 0) self.cfgEnableSdpRewrite = tk.BooleanVar(value=self.cfg.natConfig.sdpNatRewriteUse != 0) self.cfgEnableSipOutbound = tk.BooleanVar(value=self.cfg.natConfig.sipOutboundUse != 0) self.cfgKaInterval = tk.IntVar(value=self.cfg.natConfig.udpKaIntervalSec) # Build the tab page frm = ttk.Frame(self.frm) frm.columnconfigure(0, weight=1) frm.columnconfigure(1, weight=2) row = 0 ttk.Label(frm, text='100rel/PRACK:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Radiobutton(frm, text='Only offer PRACK', value=pj.PJSUA_100REL_NOT_USED, variable=self.cfgPrackUse).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text='Offer and use if remote supports', value=pj.PJSUA_100REL_OPTIONAL, variable=self.cfgPrackUse).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text='Required', value=pj.PJSUA_100REL_MANDATORY, variable=self.cfgPrackUse).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='Session Timer:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Radiobutton(frm, text='Not offered', value=pj.PJSUA_SIP_TIMER_INACTIVE, variable=self.cfgTimerUse).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text='Optional', value=pj.PJSUA_SIP_TIMER_OPTIONAL, variable=self.cfgTimerUse).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text='Required', value=pj.PJSUA_SIP_TIMER_REQUIRED, variable=self.cfgTimerUse).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text="Always use", value=pj.PJSUA_SIP_TIMER_ALWAYS, variable=self.cfgTimerUse).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='Session Timer Expiration:').grid(row=row, column=0, sticky=tk.E, pady=2) tk.Spinbox(frm, from_=90, to=7200, textvariable=self.cfgTimerExpires, width=5).grid(row=row, column=1, sticky=tk.W, padx=6) ttk.Label(frm, text='(seconds)').grid(row=row, column=1, sticky=tk.E) row += 1 ttk.Label(frm, text='Presence:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Checkbutton(frm, text='Enable PUBLISH', variable=self.cfgPublish).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Label(frm, text='Message Waiting Indication:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Checkbutton(frm, text='Enable MWI', variable=self.cfgMwiEnabled).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Label(frm, text='NAT Traversal:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Checkbutton(frm, text='Enable Contact Rewrite', variable=self.cfgEnableContactRewrite).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Checkbutton(frm, text='Enable Via Rewrite', variable=self.cfgEnableViaRewrite).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Checkbutton(frm, text='Enable SDP IP Address Rewrite', variable=self.cfgEnableSdpRewrite).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Checkbutton(frm, text='Enable SIP Outbound Extension', variable=self.cfgEnableSipOutbound).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Label(frm, text='UDP Keep-Alive Interval:').grid(row=row, column=0, sticky=tk.E, pady=2) tk.Spinbox(frm, from_=0, to=3600, textvariable=self.cfgKaInterval, width=5).grid(row=row, column=1, sticky=tk.W, padx=6) ttk.Label(frm, text='(seconds) Zero to disable.').grid(row=row, column=1, sticky=tk.E) self.wTab.add(frm, text='SIP Features') def createMediaTab(self): # Prepare the variables to set/receive values from GUI self.cfgMedPort = tk.IntVar(value=self.cfg.mediaConfig.transportConfig.port) self.cfgMedPortRange = tk.IntVar(value=self.cfg.mediaConfig.transportConfig.portRange) self.cfgMedLockCodec = tk.BooleanVar(value=self.cfg.mediaConfig.lockCodecEnabled) self.cfgMedSrtp = tk.IntVar(value=self.cfg.mediaConfig.srtpUse) self.cfgMedSrtpSecure = tk.IntVar(value=self.cfg.mediaConfig.srtpSecureSignaling) self.cfgMedIpv6 = tk.BooleanVar(value=self.cfg.mediaConfig.ipv6Use==pj.PJSUA_IPV6_ENABLED) # Build the tab page frm = ttk.Frame(self.frm) frm.columnconfigure(0, weight=1) frm.columnconfigure(1, weight=21) row = 0 ttk.Label(frm, text='Secure RTP (SRTP):').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Radiobutton(frm, text='Disable', value=pj.PJMEDIA_SRTP_DISABLED, variable=self.cfgMedSrtp).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text='Mandatory', value=pj.PJMEDIA_SRTP_MANDATORY, variable=self.cfgMedSrtp).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text='Optional (non-standard)', value=pj.PJMEDIA_SRTP_OPTIONAL, variable=self.cfgMedSrtp).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='SRTP signaling:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Radiobutton(frm, text='Does not require secure signaling', value=0, variable=self.cfgMedSrtpSecure).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text='Require secure next hop (TLS)', value=1, variable=self.cfgMedSrtpSecure).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text='Require secure end-to-end (SIPS)', value=2, variable=self.cfgMedSrtpSecure).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='RTP transport start port:').grid(row=row, column=0, sticky=tk.E, pady=2) tk.Spinbox(frm, from_=0, to=65535, textvariable=self.cfgMedPort, width=5).grid(row=row, column=1, sticky=tk.W, padx=6) ttk.Label(frm, text='(0: any)').grid(row=row, column=1, sticky=tk.E, pady=2) row += 1 ttk.Label(frm, text='Port range:').grid(row=row, column=0, sticky=tk.E, pady=2) tk.Spinbox(frm, from_=0, to=65535, textvariable=self.cfgMedPortRange, width=5).grid(row=row, column=1, sticky=tk.W, padx=6) ttk.Label(frm, text='(0: not limited)').grid(row=row, column=1, sticky=tk.E, pady=2) row += 1 ttk.Label(frm, text='Lock codec:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Checkbutton(frm, text='Enable', variable=self.cfgMedLockCodec).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Label(frm, text='Use IPv6:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Checkbutton(frm, text='Yes', variable=self.cfgMedIpv6).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) self.wTab.add(frm, text='Media settings') def createMediaNatTab(self): # Prepare the variables to set/receive values from GUI self.cfgSipUseStun = tk.IntVar(value = self.cfg.natConfig.sipStunUse) self.cfgMediaUseStun = tk.IntVar(value = self.cfg.natConfig.mediaStunUse) self.cfgIceEnabled = tk.BooleanVar(value = self.cfg.natConfig.iceEnabled) self.cfgIceAggressive = tk.BooleanVar(value = self.cfg.natConfig.iceAggressiveNomination) self.cfgAlwaysUpdate = tk.BooleanVar(value = True if self.cfg.natConfig.iceAlwaysUpdate else False) self.cfgIceNoHostCands = tk.BooleanVar(value = True if self.cfg.natConfig.iceMaxHostCands == 0 else False) self.cfgTurnEnabled = tk.BooleanVar(value = self.cfg.natConfig.turnEnabled) self.cfgTurnServer = tk.StringVar(value = self.cfg.natConfig.turnServer) self.cfgTurnConnType = tk.IntVar(value = self.cfg.natConfig.turnConnType) self.cfgTurnUser = tk.StringVar(value = self.cfg.natConfig.turnUserName) self.cfgTurnPasswd = tk.StringVar(value = self.cfg.natConfig.turnPassword) # Build the tab page frm = ttk.Frame(self.frm) frm.columnconfigure(0, weight=1) frm.columnconfigure(1, weight=2) row = 0 ttk.Label(frm, text='SIP STUN Usage:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Radiobutton(frm, text='Default', value=pj.PJSUA_STUN_USE_DEFAULT, variable=self.cfgSipUseStun).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text='Disable', value=pj.PJSUA_STUN_USE_DISABLED, variable=self.cfgSipUseStun).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='Media STUN Usage:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Radiobutton(frm, text='Default', value=pj.PJSUA_STUN_USE_DEFAULT, variable=self.cfgMediaUseStun).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text='Disable', value=pj.PJSUA_STUN_USE_DISABLED, variable=self.cfgMediaUseStun).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='ICE:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Checkbutton(frm, text='Enable', variable=self.cfgIceEnabled).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Checkbutton(frm, text='Use aggresive nomination', variable=self.cfgIceAggressive).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Checkbutton(frm, text='Always re-INVITE after negotiation', variable=self.cfgAlwaysUpdate).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Checkbutton(frm, text='Disable host candidates', variable=self.cfgIceNoHostCands).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Label(frm, text='TURN:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Checkbutton(frm, text='Enable', variable=self.cfgTurnEnabled).grid(row=row, column=1, sticky=tk.W, padx=6, pady=2) row += 1 ttk.Label(frm, text='TURN server:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Entry(frm, textvariable=self.cfgTurnServer, width=20).grid(row=row, column=1, sticky=tk.W, padx=6) ttk.Label(frm, text='host[:port]').grid(row=row, column=1, sticky=tk.E, pady=6) row += 1 ttk.Label(frm, text='TURN connection:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Radiobutton(frm, text='UDP', value=pj.PJ_TURN_TP_UDP, variable=self.cfgTurnConnType).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Radiobutton(frm, text='TCP', value=pj.PJ_TURN_TP_TCP, variable=self.cfgTurnConnType).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='TURN username:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Entry(frm, textvariable=self.cfgTurnUser, width=16).grid(row=row, column=1, sticky=tk.W, padx=6) row += 1 ttk.Label(frm, text='TURN password:').grid(row=row, column=0, sticky=tk.E, pady=2) ttk.Entry(frm, textvariable=self.cfgTurnPasswd, show='*', width=16).grid(row=row, column=1, sticky=tk.W, padx=6) self.wTab.add(frm, text='NAT settings') def onOk(self): # Check basic settings errors = ""; if not self.cfgAccId.get(): errors += "Account ID is required\n" if self.cfgAccId.get(): if not endpoint.validateSipUri(self.cfgAccId.get()): errors += "Invalid SIP ID URI: '%s'\n" % (self.cfgAccId.get()) if self.cfgRegistrar.get(): if not endpoint.validateSipUri(self.cfgRegistrar.get()): errors += "Invalid SIP registrar URI: '%s'\n" % (self.cfgRegistrar.get()) if self.cfgProxy.get(): if not endpoint.validateSipUri(self.cfgProxy.get()): errors += "Invalid SIP proxy URI: '%s'\n" % (self.cfgProxy.get()) if self.cfgTurnEnabled.get(): if not self.cfgTurnServer.get(): errors += "TURN server is required\n" if errors: msgbox.showerror("Error detected:", errors) return # Basic settings self.cfg.priority = self.cfgPriority.get() self.cfg.idUri = self.cfgAccId.get() self.cfg.regConfig.registrarUri = self.cfgRegistrar.get() self.cfg.regConfig.registerOnAdd = self.cfgRegisterOnAdd.get() while len(self.cfg.sipConfig.authCreds): self.cfg.sipConfig.authCreds.pop() if self.cfgUsername.get(): cred = pj.AuthCredInfo() cred.scheme = "digest" cred.realm = "*" cred.username = self.cfgUsername.get() cred.data = self.cfgPassword.get() self.cfg.sipConfig.authCreds.append(cred) while len(self.cfg.sipConfig.proxies): self.cfg.sipConfig.proxies.pop() if self.cfgProxy.get(): self.cfg.sipConfig.proxies.append(self.cfgProxy.get()) # SIP features self.cfg.callConfig.prackUse = self.cfgPrackUse.get() self.cfg.callConfig.timerUse = self.cfgTimerUse.get() self.cfg.callConfig.timerSessExpiresSec = self.cfgTimerExpires.get() self.cfg.presConfig.publishEnabled = self.cfgPublish.get() self.cfg.mwiConfig.enabled = self.cfgMwiEnabled.get() self.cfg.natConfig.contactRewriteUse = 1 if self.cfgEnableContactRewrite.get() else 0 self.cfg.natConfig.viaRewriteUse = 1 if self.cfgEnableViaRewrite.get() else 0 self.cfg.natConfig.sdpNatRewriteUse = 1 if self.cfgEnableSdpRewrite.get() else 0 self.cfg.natConfig.sipOutboundUse = 1 if self.cfgEnableSipOutbound.get() else 0 self.cfg.natConfig.udpKaIntervalSec = self.cfgKaInterval.get() # Media self.cfg.mediaConfig.transportConfig.port = self.cfgMedPort.get() self.cfg.mediaConfig.transportConfig.portRange = self.cfgMedPortRange.get() self.cfg.mediaConfig.lockCodecEnabled = self.cfgMedLockCodec.get() self.cfg.mediaConfig.srtpUse = self.cfgMedSrtp.get() self.cfg.mediaConfig.srtpSecureSignaling = self.cfgMedSrtpSecure.get() self.cfg.mediaConfig.ipv6Use = pj.PJSUA_IPV6_ENABLED if self.cfgMedIpv6.get() else pj.PJSUA_IPV6_DISABLED # NAT self.cfg.natConfig.sipStunUse = self.cfgSipUseStun.get() self.cfg.natConfig.mediaStunUse = self.cfgMediaUseStun.get() self.cfg.natConfig.iceEnabled = self.cfgIceEnabled.get() self.cfg.natConfig.iceAggressiveNomination = self.cfgIceAggressive .get() self.cfg.natConfig.iceAlwaysUpdate = self.cfgAlwaysUpdate.get() self.cfg.natConfig.iceMaxHostCands = 0 if self.cfgIceNoHostCands.get() else -1 self.cfg.natConfig.turnEnabled = self.cfgTurnEnabled.get() self.cfg.natConfig.turnServer = self.cfgTurnServer.get() self.cfg.natConfig.turnConnType = self.cfgTurnConnType.get() self.cfg.natConfig.turnUserName = self.cfgTurnUser.get() self.cfg.natConfig.turnPasswordType = 0 self.cfg.natConfig.turnPassword = self.cfgTurnPasswd.get() self.isOk = True self.destroy() def onCancel(self): self.destroy() if __name__ == '__main__': application.main()
gpl-2.0
fenginx/django
django/core/mail/backends/console.py
132
1402
""" Email backend that writes messages to console instead of sending them. """ import sys import threading from django.core.mail.backends.base import BaseEmailBackend class EmailBackend(BaseEmailBackend): def __init__(self, *args, **kwargs): self.stream = kwargs.pop('stream', sys.stdout) self._lock = threading.RLock() super().__init__(*args, **kwargs) def write_message(self, message): msg = message.message() msg_data = msg.as_bytes() charset = msg.get_charset().get_output_charset() if msg.get_charset() else 'utf-8' msg_data = msg_data.decode(charset) self.stream.write('%s\n' % msg_data) self.stream.write('-' * 79) self.stream.write('\n') def send_messages(self, email_messages): """Write all messages to the stream in a thread-safe way.""" if not email_messages: return msg_count = 0 with self._lock: try: stream_created = self.open() for message in email_messages: self.write_message(message) self.stream.flush() # flush after each message msg_count += 1 if stream_created: self.close() except Exception: if not self.fail_silently: raise return msg_count
bsd-3-clause
mycodeday/crm-platform
website_sale/models/payment.py
104
1117
# -*- coding: utf-8 -*- from openerp import SUPERUSER_ID from openerp.osv import orm, fields class PaymentTransaction(orm.Model): _inherit = 'payment.transaction' _columns = { # link with the sale order 'sale_order_id': fields.many2one('sale.order', 'Sale Order'), } def form_feedback(self, cr, uid, data, acquirer_name, context=None): """ Override to confirm the sale order, if defined, and if the transaction is done. """ tx = None res = super(PaymentTransaction, self).form_feedback(cr, uid, data, acquirer_name, context=context) # fetch the tx, check its state, confirm the potential SO tx_find_method_name = '_%s_form_get_tx_from_data' % acquirer_name if hasattr(self, tx_find_method_name): tx = getattr(self, tx_find_method_name)(cr, uid, data, context=context) if tx and tx.state == 'done' and tx.sale_order_id and tx.sale_order_id.state in ['draft', 'sent']: self.pool['sale.order'].action_button_confirm(cr, SUPERUSER_ID, [tx.sale_order_id.id], context=context) return res
gpl-3.0
jose36/plugin.video.ProyectoLuzDigital1
servers/turbobit.py
44
1401
# -*- coding: utf-8 -*- #------------------------------------------------------------ # pelisalacarta - XBMC Plugin # Conector para turbobit # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/ #------------------------------------------------------------ import urlparse,urllib2,urllib,re import os from core import scrapertools from core import logger from core import config def get_video_url( page_url , premium = False , user="" , password="", video_password="" ): logger.info("[turbobit.py] get_video_url(page_url='%s')" % page_url) video_urls = [] return video_urls # Encuentra vídeos del servidor en el texto pasado def find_videos(data): encontrados = set() devuelve = [] # http://turbobit.net/scz8lxrrgllr.html # http://www.turbobit.net/uzo3gcyfmt4b.html # http://turbobit.net/eaz9ha3gop65/deadliest.catch.s08e09-killers.mp4.html patronvideos = '(turbobit.net/[0-9a-z]+)' logger.info("[turbobit.py] find_videos #"+patronvideos+"#") matches = re.compile(patronvideos,re.DOTALL).findall(data) for match in matches: titulo = "[turbobit]" url = "http://"+match+".html" if url not in encontrados: logger.info(" url="+url) devuelve.append( [ titulo , url , 'turbobit' ] ) encontrados.add(url) else: logger.info(" url duplicada="+url) return devuelve
gpl-2.0
mancoast/CPythonPyc_test
cpython/230_test_copy_reg.py
19
3456
import copy_reg import unittest from test import test_support from test.pickletester import ExtensionSaver class C: pass class CopyRegTestCase(unittest.TestCase): def test_class(self): self.assertRaises(TypeError, copy_reg.pickle, C, None, None) def test_noncallable_reduce(self): self.assertRaises(TypeError, copy_reg.pickle, type(1), "not a callable") def test_noncallable_constructor(self): self.assertRaises(TypeError, copy_reg.pickle, type(1), int, "not a callable") def test_bool(self): import copy self.assertEquals(True, copy.copy(True)) def test_extension_registry(self): mod, func, code = 'junk1 ', ' junk2', 0xabcd e = ExtensionSaver(code) try: # Shouldn't be in registry now. self.assertRaises(ValueError, copy_reg.remove_extension, mod, func, code) copy_reg.add_extension(mod, func, code) # Should be in the registry. self.assert_(copy_reg._extension_registry[mod, func] == code) self.assert_(copy_reg._inverted_registry[code] == (mod, func)) # Shouldn't be in the cache. self.assert_(code not in copy_reg._extension_cache) # Redundant registration should be OK. copy_reg.add_extension(mod, func, code) # shouldn't blow up # Conflicting code. self.assertRaises(ValueError, copy_reg.add_extension, mod, func, code + 1) self.assertRaises(ValueError, copy_reg.remove_extension, mod, func, code + 1) # Conflicting module name. self.assertRaises(ValueError, copy_reg.add_extension, mod[1:], func, code ) self.assertRaises(ValueError, copy_reg.remove_extension, mod[1:], func, code ) # Conflicting function name. self.assertRaises(ValueError, copy_reg.add_extension, mod, func[1:], code) self.assertRaises(ValueError, copy_reg.remove_extension, mod, func[1:], code) # Can't remove one that isn't registered at all. if code + 1 not in copy_reg._inverted_registry: self.assertRaises(ValueError, copy_reg.remove_extension, mod[1:], func[1:], code + 1) finally: e.restore() # Shouldn't be there anymore. self.assert_((mod, func) not in copy_reg._extension_registry) # The code *may* be in copy_reg._extension_registry, though, if # we happened to pick on a registered code. So don't check for # that. # Check valid codes at the limits. for code in 1, 0x7fffffff: e = ExtensionSaver(code) try: copy_reg.add_extension(mod, func, code) copy_reg.remove_extension(mod, func, code) finally: e.restore() # Ensure invalid codes blow up. for code in -1, 0, 0x80000000L: self.assertRaises(ValueError, copy_reg.add_extension, mod, func, code) def test_main(): test_support.run_unittest(CopyRegTestCase) if __name__ == "__main__": test_main()
gpl-3.0
sserrot/champion_relationships
venv/Lib/site-packages/pythonwin/pywin/framework/intpyapp.py
7
15281
# intpyapp.py - Interactive Python application class # import win32con import win32api import win32ui import __main__ import sys import string from . import app import traceback from pywin.mfc import window, afxres, dialog import commctrl from . import dbgcommands lastLocateFileName = ".py" # used in the "File/Locate" dialog... # todo - _SetupSharedMenu should be moved to a framework class. def _SetupSharedMenu_(self): sharedMenu = self.GetSharedMenu() from pywin.framework import toolmenu toolmenu.SetToolsMenu(sharedMenu) from pywin.framework import help help.SetHelpMenuOtherHelp(sharedMenu) from pywin.mfc import docview docview.DocTemplate._SetupSharedMenu_=_SetupSharedMenu_ class MainFrame(app.MainFrame): def OnCreate(self, createStruct): self.closing = 0 if app.MainFrame.OnCreate(self, createStruct)==-1: return -1 style = win32con.WS_CHILD | afxres.CBRS_SIZE_DYNAMIC | afxres.CBRS_TOP | afxres.CBRS_TOOLTIPS | afxres.CBRS_FLYBY self.EnableDocking(afxres.CBRS_ALIGN_ANY) tb = win32ui.CreateToolBar (self, style | win32con.WS_VISIBLE) tb.ModifyStyle(0, commctrl.TBSTYLE_FLAT) tb.LoadToolBar(win32ui.IDR_MAINFRAME) tb.EnableDocking(afxres.CBRS_ALIGN_ANY) tb.SetWindowText("Standard") self.DockControlBar(tb) # Any other packages which use toolbars from pywin.debugger.debugger import PrepareControlBars PrepareControlBars(self) # Note "interact" also uses dockable windows, but they already happen # And a "Tools" menu on the main frame. menu = self.GetMenu() from . import toolmenu toolmenu.SetToolsMenu(menu, 2) # And fix the "Help" menu on the main frame from pywin.framework import help help.SetHelpMenuOtherHelp(menu) def OnClose(self): try: import pywin.debugger if pywin.debugger.currentDebugger is not None and pywin.debugger.currentDebugger.pumping: try: pywin.debugger.currentDebugger.close(1) except: traceback.print_exc() return except win32ui.error: pass self.closing = 1 self.SaveBarState("ToolbarDefault") self.SetActiveView(None) # Otherwise MFC's OnClose may _not_ prompt for save. from pywin.framework import help help.FinalizeHelp() self.DestroyControlBar(afxres.AFX_IDW_TOOLBAR) self.DestroyControlBar(win32ui.ID_VIEW_TOOLBAR_DBG) return self._obj_.OnClose() def DestroyControlBar(self, id): try: bar = self.GetControlBar(id) except win32ui.error: return bar.DestroyWindow() def OnCommand(self, wparam, lparam): # By default, the current MDI child frame will process WM_COMMAND # messages before any docked control bars - even if the control bar # has focus. This is a problem for the interactive window when docked. # Therefore, we detect the situation of a view having the main frame # as its parent, and assume it must be a docked view (which it will in an MDI app) try: v = self.GetActiveView() # Raise an exception if none - good - then we want default handling # Main frame _does_ have a current view (ie, a docking view) - see if it wants it. if v.OnCommand(wparam, lparam): return 1 except (win32ui.error, AttributeError): pass return self._obj_.OnCommand(wparam, lparam) class InteractivePythonApp(app.CApp): # This works if necessary - just we dont need to override the Run method. # def Run(self): # return self._obj_.Run() def HookCommands(self): app.CApp.HookCommands(self) dbgcommands.DebuggerCommandHandler().HookCommands() self.HookCommand(self.OnViewBrowse,win32ui.ID_VIEW_BROWSE) self.HookCommand(self.OnFileImport,win32ui.ID_FILE_IMPORT) self.HookCommand(self.OnFileCheck,win32ui.ID_FILE_CHECK) self.HookCommandUpdate(self.OnUpdateFileCheck, win32ui.ID_FILE_CHECK) self.HookCommand(self.OnFileRun,win32ui.ID_FILE_RUN) self.HookCommand(self.OnFileLocate,win32ui.ID_FILE_LOCATE) self.HookCommand(self.OnInteractiveWindow, win32ui.ID_VIEW_INTERACTIVE) self.HookCommandUpdate(self.OnUpdateInteractiveWindow, win32ui.ID_VIEW_INTERACTIVE) self.HookCommand(self.OnViewOptions, win32ui.ID_VIEW_OPTIONS) self.HookCommand(self.OnHelpIndex, afxres.ID_HELP_INDEX) self.HookCommand(self.OnFileSaveAll, win32ui.ID_FILE_SAVE_ALL) self.HookCommand(self.OnViewToolbarDbg, win32ui.ID_VIEW_TOOLBAR_DBG) self.HookCommandUpdate(self.OnUpdateViewToolbarDbg, win32ui.ID_VIEW_TOOLBAR_DBG) def CreateMainFrame(self): return MainFrame() def MakeExistingDDEConnection(self): # Use DDE to connect to an existing instance # Return None if no existing instance try: from . import intpydde except ImportError: # No dde support! return None conv = intpydde.CreateConversation(self.ddeServer) try: conv.ConnectTo("Pythonwin", "System") return conv except intpydde.error: return None def InitDDE(self): # Do all the magic DDE handling. # Returns TRUE if we have pumped the arguments to our # remote DDE app, and we should terminate. try: from . import intpydde except ImportError: self.ddeServer = None intpydde = None if intpydde is not None: self.ddeServer = intpydde.DDEServer(self) self.ddeServer.Create("Pythonwin", intpydde.CBF_FAIL_SELFCONNECTIONS ) try: # If there is an existing instance, pump the arguments to it. connection = self.MakeExistingDDEConnection() if connection is not None: if self.ProcessArgs(sys.argv, connection) is None: return 1 except: # It is too early to 'print' an exception - we # don't have stdout setup yet! win32ui.DisplayTraceback(sys.exc_info(), " - error in DDE conversation with Pythonwin") def InitInstance(self): # Allow "/nodde" and "/new" to optimize this! if "/nodde" not in sys.argv and "/new" not in sys.argv: if self.InitDDE(): return 1 # A remote DDE client is doing it for us! else: self.ddeServer = None win32ui.SetRegistryKey("Python %s" % (sys.winver,)) # MFC automatically puts the main frame caption on! app.CApp.InitInstance(self) # Create the taskbar icon win32ui.CreateDebuggerThread() # Allow Pythonwin to host OCX controls. win32ui.EnableControlContainer() # Display the interactive window if the user wants it. from . import interact interact.CreateInteractiveWindowUserPreference() # Load the modules we use internally. self.LoadSystemModules() # Load additional module the user may want. self.LoadUserModules() # Load the ToolBar state near the end of the init process, as # there may be Toolbar IDs created by the user or other modules. # By now all these modules should be loaded, so all the toolbar IDs loaded. try: self.frame.LoadBarState("ToolbarDefault") except win32ui.error: # MFC sucks. It does essentially "GetDlgItem(x)->Something", so if the # toolbar with ID x does not exist, MFC crashes! Pythonwin has a trap for this # but I need to investigate more how to prevent it (AFAIK, ensuring all the # toolbars are created by now _should_ stop it!) pass # Finally process the command line arguments. self.ProcessArgs(sys.argv) def ExitInstance(self): win32ui.DestroyDebuggerThread() try: from . import interact interact.DestroyInteractiveWindow() except: pass if self.ddeServer is not None: self.ddeServer.Shutdown() self.ddeServer = None return app.CApp.ExitInstance(self) def Activate(self): # Bring to the foreground. Mainly used when another app starts up, it asks # this one to activate itself, then it terminates. frame = win32ui.GetMainFrame() frame.SetForegroundWindow() if frame.GetWindowPlacement()[1]==win32con.SW_SHOWMINIMIZED: frame.ShowWindow(win32con.SW_RESTORE) def ProcessArgs(self, args, dde = None): # If we are going to talk to a remote app via DDE, then # activate it! if dde is not None: dde.Exec("self.Activate()") if len(args) and args[0] in ['/nodde','/new']: del args[0] # already handled. if len(args)<1 or not args[0]: # argv[0]=='' when started without args, just like Python.exe! return try: if args[0] and args[0][0]!='/': argStart = 0 argType = win32ui.GetProfileVal("Python","Default Arg Type","/edit").lower() else: argStart = 1 argType = args[0] if argStart >= len(args): raise TypeError("The command line requires an additional arg.") if argType=="/edit": # Load up the default application. if dde: fname = win32api.GetFullPathName(args[argStart]) dde.Exec("win32ui.GetApp().OpenDocumentFile(%s)" % (repr(fname))) else: win32ui.GetApp().OpenDocumentFile(args[argStart]) elif argType=="/rundlg": if dde: dde.Exec("from pywin.framework import scriptutils;scriptutils.RunScript('%s', '%s', 1)" % (args[argStart], ' '.join(args[argStart+1:]))) else: from . import scriptutils scriptutils.RunScript(args[argStart], ' '.join(args[argStart+1:])) elif argType=="/run": if dde: dde.Exec("from pywin.framework import scriptutils;scriptutils.RunScript('%s', '%s', 0)" % (args[argStart], ' '.join(args[argStart+1:]))) else: from . import scriptutils scriptutils.RunScript(args[argStart], ' '.join(args[argStart+1:]), 0) elif argType=="/app": raise RuntimeError("/app only supported for new instances of Pythonwin.exe") elif argType=='/dde': # Send arbitary command if dde is not None: dde.Exec(args[argStart]) else: win32ui.MessageBox("The /dde command can only be used\r\nwhen Pythonwin is already running") else: raise TypeError("Command line arguments not recognised") except: # too early for print anything. win32ui.DisplayTraceback(sys.exc_info(), " - error processing command line args") def LoadSystemModules(self): self.DoLoadModules("pywin.framework.editor,pywin.framework.stdin") def LoadUserModules(self, moduleNames = None): # Load the users modules. if moduleNames is None: default = "pywin.framework.sgrepmdi,pywin.framework.mdi_pychecker" moduleNames=win32ui.GetProfileVal('Python','Startup Modules',default) self.DoLoadModules(moduleNames) def DoLoadModules(self, moduleNames): # ", sep string of module names. if not moduleNames: return modules = moduleNames.split(",") for module in modules: try: __import__(module) except: # Catch em all, else the app itself dies! 'ImportError: traceback.print_exc() msg = 'Startup import of user module "%s" failed' % module print(msg) win32ui.MessageBox(msg) # # DDE Callback # def OnDDECommand(self, command): try: exec(command + "\n") except: print("ERROR executing DDE command: ", command) traceback.print_exc() raise # # General handlers # def OnViewBrowse( self, id, code ): " Called when ViewBrowse message is received " from pywin.mfc import dialog from pywin.tools import browser obName = dialog.GetSimpleInput('Object', '__builtins__', 'Browse Python Object') if obName is None: return try: browser.Browse(eval(obName, __main__.__dict__, __main__.__dict__)) except NameError: win32ui.MessageBox('This is no object with this name') except AttributeError: win32ui.MessageBox('The object has no attribute of that name') except: traceback.print_exc() win32ui.MessageBox('This object can not be browsed') def OnFileImport( self, id, code ): " Called when a FileImport message is received. Import the current or specified file" from . import scriptutils scriptutils.ImportFile() def OnFileCheck( self, id, code ): " Called when a FileCheck message is received. Check the current file." from . import scriptutils scriptutils.CheckFile() def OnUpdateFileCheck(self, cmdui): from . import scriptutils cmdui.Enable( scriptutils.GetActiveFileName(0) is not None ) def OnFileRun( self, id, code ): " Called when a FileRun message is received. " from . import scriptutils showDlg = win32api.GetKeyState(win32con.VK_SHIFT) >= 0 scriptutils.RunScript(None, None, showDlg) def OnFileLocate( self, id, code ): from pywin.mfc import dialog from . import scriptutils import os global lastLocateFileName # save the new version away for next time... name = dialog.GetSimpleInput('File name', lastLocateFileName, 'Locate Python File') if name is None: # Cancelled. return lastLocateFileName = name # if ".py" supplied, rip it off! # should also check for .pys and .pyw if lastLocateFileName[-3:].lower()=='.py': lastLocateFileName = lastLocateFileName[:-3] lastLocateFileName = lastLocateFileName.replace(".","\\") newName = scriptutils.LocatePythonFile(lastLocateFileName) if newName is None: win32ui.MessageBox("The file '%s' can not be located" % lastLocateFileName) else: win32ui.GetApp().OpenDocumentFile(newName) # Display all the "options" proprety pages we can find def OnViewOptions(self, id, code): win32ui.InitRichEdit() sheet = dialog.PropertySheet("Pythonwin Options") # Add property pages we know about that need manual work. from pywin.dialogs import ideoptions sheet.AddPage( ideoptions.OptionsPropPage() ) from . import toolmenu sheet.AddPage( toolmenu.ToolMenuPropPage() ) # Get other dynamic pages from templates. pages = [] for template in self.GetDocTemplateList(): try: # Dont actually call the function with the exception handler. getter = template.GetPythonPropertyPages except AttributeError: # Template does not provide property pages! continue pages = pages + getter() # Debugger template goes at the end try: from pywin.debugger import configui except ImportError: configui = None if configui is not None: pages.append(configui.DebuggerOptionsPropPage()) # Now simply add the pages, and display the dialog. for page in pages: sheet.AddPage(page) if sheet.DoModal()==win32con.IDOK: win32ui.SetStatusText("Applying configuration changes...", 1) win32ui.DoWaitCursor(1) # Tell every Window in our app that win.ini has changed! win32ui.GetMainFrame().SendMessageToDescendants(win32con.WM_WININICHANGE, 0, 0) win32ui.DoWaitCursor(0) def OnInteractiveWindow(self, id, code): # toggle the existing state. from . import interact interact.ToggleInteractiveWindow() def OnUpdateInteractiveWindow(self, cmdui): try: interact=sys.modules['pywin.framework.interact'] state = interact.IsInteractiveWindowVisible() except KeyError: # Interactive module hasnt ever been imported. state = 0 cmdui.Enable() cmdui.SetCheck(state) def OnFileSaveAll(self, id, code): # Only attempt to save editor documents. from pywin.framework.editor import editorTemplate num = 0 for doc in editorTemplate.GetDocumentList(): if doc.IsModified() and doc.GetPathName(): num = num = 1 doc.OnSaveDocument(doc.GetPathName()) win32ui.SetStatusText("%d documents saved" % num, 1) def OnViewToolbarDbg(self, id, code): if code==0: return not win32ui.GetMainFrame().OnBarCheck(id) def OnUpdateViewToolbarDbg(self, cmdui): win32ui.GetMainFrame().OnUpdateControlBarMenu(cmdui) cmdui.Enable(1) def OnHelpIndex( self, id, code ): from . import help help.SelectAndRunHelpFile() # As per the comments in app.py, this use is depreciated. # app.AppBuilder = InteractivePythonApp # Now all we do is create the application thisApp = InteractivePythonApp()
mit
eddiep1101/django-oscar
src/oscar/core/decorators.py
15
1104
try: from types import ClassType except ImportError: # Python 3 CHECK_TYPES = (type,) else: # Python 2: new and old-style classes CHECK_TYPES = (type, ClassType) import warnings def deprecated(obj): if isinstance(obj, CHECK_TYPES): return _deprecated_cls(cls=obj) else: return _deprecated_func(f=obj) def _deprecated_func(f): def _deprecated(*args, **kwargs): message = "Method '%s' is deprecated and will be " \ "removed in the next major version of django-oscar" \ % f.__name__ warnings.warn(message, DeprecationWarning, stacklevel=2) return f(*args, **kwargs) return _deprecated def _deprecated_cls(cls): class Deprecated(cls): def __init__(self, *args, **kwargs): message = "Class '%s' is deprecated and will be " \ "removed in the next major version of django-oscar" \ % cls.__name__ warnings.warn(message, DeprecationWarning, stacklevel=2) super(Deprecated, self).__init__(*args, **kwargs) return Deprecated
bsd-3-clause
MalloyPower/parsing-python
front-end/testsuite-python-lib/Python-2.4.3/Lib/test/test_unicode_file.py
32
9201
# Test some Unicode file name semantics # We dont test many operations on files other than # that their names can be used with Unicode characters. import os, glob, time, shutil import unicodedata import unittest from test.test_support import run_suite, TestSkipped, TESTFN_UNICODE from test.test_support import TESTFN_ENCODING, TESTFN_UNICODE_UNENCODEABLE try: TESTFN_ENCODED = TESTFN_UNICODE.encode(TESTFN_ENCODING) except (UnicodeError, TypeError): # Either the file system encoding is None, or the file name # cannot be encoded in the file system encoding. raise TestSkipped("No Unicode filesystem semantics on this platform.") if TESTFN_ENCODED.decode(TESTFN_ENCODING) != TESTFN_UNICODE: # The file system encoding does not support Latin-1 # (which test_support assumes), so try the file system # encoding instead. import sys try: TESTFN_UNICODE = unicode("@test-\xe0\xf2", sys.getfilesystemencoding()) TESTFN_ENCODED = TESTFN_UNICODE.encode(TESTFN_ENCODING) if '?' in TESTFN_ENCODED: # MBCS will not report the error properly raise UnicodeError, "mbcs encoding problem" except (UnicodeError, TypeError): raise TestSkipped("Cannot find a suiteable filename.") if TESTFN_ENCODED.decode(TESTFN_ENCODING) != TESTFN_UNICODE: raise TestSkipped("Cannot find a suitable filename.") def remove_if_exists(filename): if os.path.exists(filename): os.unlink(filename) class TestUnicodeFiles(unittest.TestCase): # The 'do_' functions are the actual tests. They generally assume the # file already exists etc. # Do all the tests we can given only a single filename. The file should # exist. def _do_single(self, filename): self.failUnless(os.path.exists(filename)) self.failUnless(os.path.isfile(filename)) self.failUnless(os.access(filename, os.R_OK)) self.failUnless(os.path.exists(os.path.abspath(filename))) self.failUnless(os.path.isfile(os.path.abspath(filename))) self.failUnless(os.access(os.path.abspath(filename), os.R_OK)) os.chmod(filename, 0777) os.utime(filename, None) os.utime(filename, (time.time(), time.time())) # Copy/rename etc tests using the same filename self._do_copyish(filename, filename) # Filename should appear in glob output self.failUnless( os.path.abspath(filename)==os.path.abspath(glob.glob(filename)[0])) # basename should appear in listdir. path, base = os.path.split(os.path.abspath(filename)) if isinstance(base, str): base = base.decode(TESTFN_ENCODING) file_list = os.listdir(path) # listdir() with a unicode arg may or may not return Unicode # objects, depending on the platform. if file_list and isinstance(file_list[0], str): file_list = [f.decode(TESTFN_ENCODING) for f in file_list] # Normalize the unicode strings, as round-tripping the name via the OS # may return a different (but equivalent) value. base = unicodedata.normalize("NFD", base) file_list = [unicodedata.normalize("NFD", f) for f in file_list] self.failUnless(base in file_list) # Do as many "equivalancy' tests as we can - ie, check that although we # have different types for the filename, they refer to the same file. def _do_equivilent(self, filename1, filename2): # Note we only check "filename1 against filename2" - we don't bother # checking "filename2 against 1", as we assume we are called again with # the args reversed. self.failUnless(type(filename1)!=type(filename2), "No point checking equivalent filenames of the same type") # stat and lstat should return the same results. self.failUnlessEqual(os.stat(filename1), os.stat(filename2)) self.failUnlessEqual(os.lstat(filename1), os.lstat(filename2)) # Copy/rename etc tests using equivalent filename self._do_copyish(filename1, filename2) # Tests that copy, move, etc one file to another. def _do_copyish(self, filename1, filename2): # Should be able to rename the file using either name. self.failUnless(os.path.isfile(filename1)) # must exist. os.rename(filename1, filename2 + ".new") self.failUnless(os.path.isfile(filename1+".new")) os.rename(filename1 + ".new", filename2) self.failUnless(os.path.isfile(filename2)) # Try using shutil on the filenames. try: filename1==filename2 except UnicodeDecodeError: # these filenames can't be compared - shutil.copy tries to do # just that. This is really a bug in 'shutil' - if one of shutil's # 2 params are Unicode and the other isn't, it should coerce the # string to Unicode with the filesystem encoding before comparison. pass else: # filenames can be compared. shutil.copy(filename1, filename2 + ".new") os.unlink(filename1 + ".new") # remove using equiv name. # And a couple of moves, one using each name. shutil.move(filename1, filename2 + ".new") self.failUnless(not os.path.exists(filename2)) shutil.move(filename1 + ".new", filename2) self.failUnless(os.path.exists(filename1)) # Note - due to the implementation of shutil.move, # it tries a rename first. This only fails on Windows when on # different file systems - and this test can't ensure that. # So we test the shutil.copy2 function, which is the thing most # likely to fail. shutil.copy2(filename1, filename2 + ".new") os.unlink(filename1 + ".new") def _do_directory(self, make_name, chdir_name, encoded): cwd = os.getcwd() if os.path.isdir(make_name): os.rmdir(make_name) os.mkdir(make_name) try: os.chdir(chdir_name) try: if not encoded: cwd_result = os.getcwdu() name_result = make_name else: cwd_result = os.getcwd().decode(TESTFN_ENCODING) name_result = make_name.decode(TESTFN_ENCODING) cwd_result = unicodedata.normalize("NFD", cwd_result) name_result = unicodedata.normalize("NFD", name_result) self.failUnlessEqual(os.path.basename(cwd_result),name_result) finally: os.chdir(cwd) finally: os.rmdir(make_name) # The '_test' functions 'entry points with params' - ie, what the # top-level 'test' functions would be if they could take params def _test_single(self, filename): remove_if_exists(filename) f = file(filename, "w") f.close() try: self._do_single(filename) finally: os.unlink(filename) self.failUnless(not os.path.exists(filename)) # and again with os.open. f = os.open(filename, os.O_CREAT) os.close(f) try: self._do_single(filename) finally: os.unlink(filename) def _test_equivalent(self, filename1, filename2): remove_if_exists(filename1) self.failUnless(not os.path.exists(filename2)) f = file(filename1, "w") f.close() try: self._do_equivilent(filename1, filename2) finally: os.unlink(filename1) # The 'test' functions are unittest entry points, and simply call our # _test functions with each of the filename combinations we wish to test def test_single_files(self): self._test_single(TESTFN_ENCODED) self._test_single(TESTFN_UNICODE) if TESTFN_UNICODE_UNENCODEABLE is not None: self._test_single(TESTFN_UNICODE_UNENCODEABLE) def test_equivalent_files(self): self._test_equivalent(TESTFN_ENCODED, TESTFN_UNICODE) self._test_equivalent(TESTFN_UNICODE, TESTFN_ENCODED) def test_directories(self): # For all 'equivilent' combinations: # Make dir with encoded, chdir with unicode, checkdir with encoded # (or unicode/encoded/unicode, etc ext = ".dir" self._do_directory(TESTFN_ENCODED+ext, TESTFN_ENCODED+ext, True) self._do_directory(TESTFN_ENCODED+ext, TESTFN_UNICODE+ext, True) self._do_directory(TESTFN_UNICODE+ext, TESTFN_ENCODED+ext, False) self._do_directory(TESTFN_UNICODE+ext, TESTFN_UNICODE+ext, False) # Our directory name that can't use a non-unicode name. if TESTFN_UNICODE_UNENCODEABLE is not None: self._do_directory(TESTFN_UNICODE_UNENCODEABLE+ext, TESTFN_UNICODE_UNENCODEABLE+ext, False) def test_main(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(TestUnicodeFiles)) run_suite(suite) if __name__ == "__main__": test_main()
mit
captainpete/rethinkdb
test/rql_test/connections/http_support/werkzeug/testsuite/datastructures.py
145
28212
# -*- coding: utf-8 -*- """ werkzeug.testsuite.datastructures ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tests the functionality of the provided Werkzeug datastructures. TODO: - FileMultiDict - Immutable types undertested - Split up dict tests :copyright: (c) 2014 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ from __future__ import with_statement import unittest import pickle from contextlib import contextmanager from copy import copy, deepcopy from werkzeug import datastructures from werkzeug._compat import iterkeys, itervalues, iteritems, iterlists, \ iterlistvalues, text_type, PY2 from werkzeug.testsuite import WerkzeugTestCase from werkzeug.exceptions import BadRequestKeyError class NativeItermethodsTestCase(WerkzeugTestCase): def test_basic(self): @datastructures.native_itermethods(['keys', 'values', 'items']) class StupidDict(object): def keys(self, multi=1): return iter(['a', 'b', 'c'] * multi) def values(self, multi=1): return iter([1, 2, 3] * multi) def items(self, multi=1): return iter(zip(iterkeys(self, multi=multi), itervalues(self, multi=multi))) d = StupidDict() expected_keys = ['a', 'b', 'c'] expected_values = [1, 2, 3] expected_items = list(zip(expected_keys, expected_values)) self.assert_equal(list(iterkeys(d)), expected_keys) self.assert_equal(list(itervalues(d)), expected_values) self.assert_equal(list(iteritems(d)), expected_items) self.assert_equal(list(iterkeys(d, 2)), expected_keys * 2) self.assert_equal(list(itervalues(d, 2)), expected_values * 2) self.assert_equal(list(iteritems(d, 2)), expected_items * 2) class MutableMultiDictBaseTestCase(WerkzeugTestCase): storage_class = None def test_pickle(self): cls = self.storage_class for protocol in range(pickle.HIGHEST_PROTOCOL + 1): d = cls() d.setlist(b'foo', [1, 2, 3, 4]) d.setlist(b'bar', b'foo bar baz'.split()) s = pickle.dumps(d, protocol) ud = pickle.loads(s) self.assert_equal(type(ud), type(d)) self.assert_equal(ud, d) self.assert_equal(pickle.loads( s.replace(b'werkzeug.datastructures', b'werkzeug')), d) ud[b'newkey'] = b'bla' self.assert_not_equal(ud, d) def test_basic_interface(self): md = self.storage_class() assert isinstance(md, dict) mapping = [('a', 1), ('b', 2), ('a', 2), ('d', 3), ('a', 1), ('a', 3), ('d', 4), ('c', 3)] md = self.storage_class(mapping) # simple getitem gives the first value self.assert_equal(md['a'], 1) self.assert_equal(md['c'], 3) with self.assert_raises(KeyError): md['e'] self.assert_equal(md.get('a'), 1) # list getitem self.assert_equal(md.getlist('a'), [1, 2, 1, 3]) self.assert_equal(md.getlist('d'), [3, 4]) # do not raise if key not found self.assert_equal(md.getlist('x'), []) # simple setitem overwrites all values md['a'] = 42 self.assert_equal(md.getlist('a'), [42]) # list setitem md.setlist('a', [1, 2, 3]) self.assert_equal(md['a'], 1) self.assert_equal(md.getlist('a'), [1, 2, 3]) # verify that it does not change original lists l1 = [1, 2, 3] md.setlist('a', l1) del l1[:] self.assert_equal(md['a'], 1) # setdefault, setlistdefault self.assert_equal(md.setdefault('u', 23), 23) self.assert_equal(md.getlist('u'), [23]) del md['u'] md.setlist('u', [-1, -2]) # delitem del md['u'] with self.assert_raises(KeyError): md['u'] del md['d'] self.assert_equal(md.getlist('d'), []) # keys, values, items, lists self.assert_equal(list(sorted(md.keys())), ['a', 'b', 'c']) self.assert_equal(list(sorted(iterkeys(md))), ['a', 'b', 'c']) self.assert_equal(list(sorted(itervalues(md))), [1, 2, 3]) self.assert_equal(list(sorted(itervalues(md))), [1, 2, 3]) self.assert_equal(list(sorted(md.items())), [('a', 1), ('b', 2), ('c', 3)]) self.assert_equal(list(sorted(md.items(multi=True))), [('a', 1), ('a', 2), ('a', 3), ('b', 2), ('c', 3)]) self.assert_equal(list(sorted(iteritems(md))), [('a', 1), ('b', 2), ('c', 3)]) self.assert_equal(list(sorted(iteritems(md, multi=True))), [('a', 1), ('a', 2), ('a', 3), ('b', 2), ('c', 3)]) self.assert_equal(list(sorted(md.lists())), [('a', [1, 2, 3]), ('b', [2]), ('c', [3])]) self.assert_equal(list(sorted(iterlists(md))), [('a', [1, 2, 3]), ('b', [2]), ('c', [3])]) # copy method c = md.copy() self.assert_equal(c['a'], 1) self.assert_equal(c.getlist('a'), [1, 2, 3]) # copy method 2 c = copy(md) self.assert_equal(c['a'], 1) self.assert_equal(c.getlist('a'), [1, 2, 3]) # deepcopy method c = md.deepcopy() self.assert_equal(c['a'], 1) self.assert_equal(c.getlist('a'), [1, 2, 3]) # deepcopy method 2 c = deepcopy(md) self.assert_equal(c['a'], 1) self.assert_equal(c.getlist('a'), [1, 2, 3]) # update with a multidict od = self.storage_class([('a', 4), ('a', 5), ('y', 0)]) md.update(od) self.assert_equal(md.getlist('a'), [1, 2, 3, 4, 5]) self.assert_equal(md.getlist('y'), [0]) # update with a regular dict md = c od = {'a': 4, 'y': 0} md.update(od) self.assert_equal(md.getlist('a'), [1, 2, 3, 4]) self.assert_equal(md.getlist('y'), [0]) # pop, poplist, popitem, popitemlist self.assert_equal(md.pop('y'), 0) assert 'y' not in md self.assert_equal(md.poplist('a'), [1, 2, 3, 4]) assert 'a' not in md self.assert_equal(md.poplist('missing'), []) # remaining: b=2, c=3 popped = md.popitem() assert popped in [('b', 2), ('c', 3)] popped = md.popitemlist() assert popped in [('b', [2]), ('c', [3])] # type conversion md = self.storage_class({'a': '4', 'b': ['2', '3']}) self.assert_equal(md.get('a', type=int), 4) self.assert_equal(md.getlist('b', type=int), [2, 3]) # repr md = self.storage_class([('a', 1), ('a', 2), ('b', 3)]) assert "('a', 1)" in repr(md) assert "('a', 2)" in repr(md) assert "('b', 3)" in repr(md) # add and getlist md.add('c', '42') md.add('c', '23') self.assert_equal(md.getlist('c'), ['42', '23']) md.add('c', 'blah') self.assert_equal(md.getlist('c', type=int), [42, 23]) # setdefault md = self.storage_class() md.setdefault('x', []).append(42) md.setdefault('x', []).append(23) self.assert_equal(md['x'], [42, 23]) # to dict md = self.storage_class() md['foo'] = 42 md.add('bar', 1) md.add('bar', 2) self.assert_equal(md.to_dict(), {'foo': 42, 'bar': 1}) self.assert_equal(md.to_dict(flat=False), {'foo': [42], 'bar': [1, 2]}) # popitem from empty dict with self.assert_raises(KeyError): self.storage_class().popitem() with self.assert_raises(KeyError): self.storage_class().popitemlist() # key errors are of a special type with self.assert_raises(BadRequestKeyError): self.storage_class()[42] # setlist works md = self.storage_class() md['foo'] = 42 md.setlist('foo', [1, 2]) self.assert_equal(md.getlist('foo'), [1, 2]) class ImmutableDictBaseTestCase(WerkzeugTestCase): storage_class = None def test_follows_dict_interface(self): cls = self.storage_class data = {'foo': 1, 'bar': 2, 'baz': 3} d = cls(data) self.assert_equal(d['foo'], 1) self.assert_equal(d['bar'], 2) self.assert_equal(d['baz'], 3) self.assert_equal(sorted(d.keys()), ['bar', 'baz', 'foo']) self.assert_true('foo' in d) self.assert_true('foox' not in d) self.assert_equal(len(d), 3) def test_copies_are_mutable(self): cls = self.storage_class immutable = cls({'a': 1}) with self.assert_raises(TypeError): immutable.pop('a') mutable = immutable.copy() mutable.pop('a') self.assert_true('a' in immutable) self.assert_true(mutable is not immutable) self.assert_true(copy(immutable) is immutable) def test_dict_is_hashable(self): cls = self.storage_class immutable = cls({'a': 1, 'b': 2}) immutable2 = cls({'a': 2, 'b': 2}) x = set([immutable]) self.assert_true(immutable in x) self.assert_true(immutable2 not in x) x.discard(immutable) self.assert_true(immutable not in x) self.assert_true(immutable2 not in x) x.add(immutable2) self.assert_true(immutable not in x) self.assert_true(immutable2 in x) x.add(immutable) self.assert_true(immutable in x) self.assert_true(immutable2 in x) class ImmutableTypeConversionDictTestCase(ImmutableDictBaseTestCase): storage_class = datastructures.ImmutableTypeConversionDict class ImmutableMultiDictTestCase(ImmutableDictBaseTestCase): storage_class = datastructures.ImmutableMultiDict def test_multidict_is_hashable(self): cls = self.storage_class immutable = cls({'a': [1, 2], 'b': 2}) immutable2 = cls({'a': [1], 'b': 2}) x = set([immutable]) self.assert_true(immutable in x) self.assert_true(immutable2 not in x) x.discard(immutable) self.assert_true(immutable not in x) self.assert_true(immutable2 not in x) x.add(immutable2) self.assert_true(immutable not in x) self.assert_true(immutable2 in x) x.add(immutable) self.assert_true(immutable in x) self.assert_true(immutable2 in x) class ImmutableDictTestCase(ImmutableDictBaseTestCase): storage_class = datastructures.ImmutableDict class ImmutableOrderedMultiDictTestCase(ImmutableDictBaseTestCase): storage_class = datastructures.ImmutableOrderedMultiDict def test_ordered_multidict_is_hashable(self): a = self.storage_class([('a', 1), ('b', 1), ('a', 2)]) b = self.storage_class([('a', 1), ('a', 2), ('b', 1)]) self.assert_not_equal(hash(a), hash(b)) class MultiDictTestCase(MutableMultiDictBaseTestCase): storage_class = datastructures.MultiDict def test_multidict_pop(self): make_d = lambda: self.storage_class({'foo': [1, 2, 3, 4]}) d = make_d() self.assert_equal(d.pop('foo'), 1) assert not d d = make_d() self.assert_equal(d.pop('foo', 32), 1) assert not d d = make_d() self.assert_equal(d.pop('foos', 32), 32) assert d with self.assert_raises(KeyError): d.pop('foos') def test_setlistdefault(self): md = self.storage_class() self.assert_equal(md.setlistdefault('u', [-1, -2]), [-1, -2]) self.assert_equal(md.getlist('u'), [-1, -2]) self.assert_equal(md['u'], -1) def test_iter_interfaces(self): mapping = [('a', 1), ('b', 2), ('a', 2), ('d', 3), ('a', 1), ('a', 3), ('d', 4), ('c', 3)] md = self.storage_class(mapping) self.assert_equal(list(zip(md.keys(), md.listvalues())), list(md.lists())) self.assert_equal(list(zip(md, iterlistvalues(md))), list(iterlists(md))) self.assert_equal(list(zip(iterkeys(md), iterlistvalues(md))), list(iterlists(md))) class OrderedMultiDictTestCase(MutableMultiDictBaseTestCase): storage_class = datastructures.OrderedMultiDict def test_ordered_interface(self): cls = self.storage_class d = cls() assert not d d.add('foo', 'bar') self.assert_equal(len(d), 1) d.add('foo', 'baz') self.assert_equal(len(d), 1) self.assert_equal(list(iteritems(d)), [('foo', 'bar')]) self.assert_equal(list(d), ['foo']) self.assert_equal(list(iteritems(d, multi=True)), [('foo', 'bar'), ('foo', 'baz')]) del d['foo'] assert not d self.assert_equal(len(d), 0) self.assert_equal(list(d), []) d.update([('foo', 1), ('foo', 2), ('bar', 42)]) d.add('foo', 3) self.assert_equal(d.getlist('foo'), [1, 2, 3]) self.assert_equal(d.getlist('bar'), [42]) self.assert_equal(list(iteritems(d)), [('foo', 1), ('bar', 42)]) expected = ['foo', 'bar'] self.assert_sequence_equal(list(d.keys()), expected) self.assert_sequence_equal(list(d), expected) self.assert_sequence_equal(list(iterkeys(d)), expected) self.assert_equal(list(iteritems(d, multi=True)), [('foo', 1), ('foo', 2), ('bar', 42), ('foo', 3)]) self.assert_equal(len(d), 2) self.assert_equal(d.pop('foo'), 1) assert d.pop('blafasel', None) is None self.assert_equal(d.pop('blafasel', 42), 42) self.assert_equal(len(d), 1) self.assert_equal(d.poplist('bar'), [42]) assert not d d.get('missingkey') is None d.add('foo', 42) d.add('foo', 23) d.add('bar', 2) d.add('foo', 42) self.assert_equal(d, datastructures.MultiDict(d)) id = self.storage_class(d) self.assert_equal(d, id) d.add('foo', 2) assert d != id d.update({'blah': [1, 2, 3]}) self.assert_equal(d['blah'], 1) self.assert_equal(d.getlist('blah'), [1, 2, 3]) # setlist works d = self.storage_class() d['foo'] = 42 d.setlist('foo', [1, 2]) self.assert_equal(d.getlist('foo'), [1, 2]) with self.assert_raises(BadRequestKeyError): d.pop('missing') with self.assert_raises(BadRequestKeyError): d['missing'] # popping d = self.storage_class() d.add('foo', 23) d.add('foo', 42) d.add('foo', 1) self.assert_equal(d.popitem(), ('foo', 23)) with self.assert_raises(BadRequestKeyError): d.popitem() assert not d d.add('foo', 23) d.add('foo', 42) d.add('foo', 1) self.assert_equal(d.popitemlist(), ('foo', [23, 42, 1])) with self.assert_raises(BadRequestKeyError): d.popitemlist() def test_iterables(self): a = datastructures.MultiDict((("key_a", "value_a"),)) b = datastructures.MultiDict((("key_b", "value_b"),)) ab = datastructures.CombinedMultiDict((a,b)) self.assert_equal(sorted(ab.lists()), [('key_a', ['value_a']), ('key_b', ['value_b'])]) self.assert_equal(sorted(ab.listvalues()), [['value_a'], ['value_b']]) self.assert_equal(sorted(ab.keys()), ["key_a", "key_b"]) self.assert_equal(sorted(iterlists(ab)), [('key_a', ['value_a']), ('key_b', ['value_b'])]) self.assert_equal(sorted(iterlistvalues(ab)), [['value_a'], ['value_b']]) self.assert_equal(sorted(iterkeys(ab)), ["key_a", "key_b"]) class CombinedMultiDictTestCase(WerkzeugTestCase): storage_class = datastructures.CombinedMultiDict def test_basic_interface(self): d1 = datastructures.MultiDict([('foo', '1')]) d2 = datastructures.MultiDict([('bar', '2'), ('bar', '3')]) d = self.storage_class([d1, d2]) # lookup self.assert_equal(d['foo'], '1') self.assert_equal(d['bar'], '2') self.assert_equal(d.getlist('bar'), ['2', '3']) self.assert_equal(sorted(d.items()), [('bar', '2'), ('foo', '1')]) self.assert_equal(sorted(d.items(multi=True)), [('bar', '2'), ('bar', '3'), ('foo', '1')]) assert 'missingkey' not in d assert 'foo' in d # type lookup self.assert_equal(d.get('foo', type=int), 1) self.assert_equal(d.getlist('bar', type=int), [2, 3]) # get key errors for missing stuff with self.assert_raises(KeyError): d['missing'] # make sure that they are immutable with self.assert_raises(TypeError): d['foo'] = 'blub' # copies are immutable d = d.copy() with self.assert_raises(TypeError): d['foo'] = 'blub' # make sure lists merges md1 = datastructures.MultiDict((("foo", "bar"),)) md2 = datastructures.MultiDict((("foo", "blafasel"),)) x = self.storage_class((md1, md2)) self.assert_equal(list(iterlists(x)), [('foo', ['bar', 'blafasel'])]) class HeadersTestCase(WerkzeugTestCase): storage_class = datastructures.Headers def test_basic_interface(self): headers = self.storage_class() headers.add('Content-Type', 'text/plain') headers.add('X-Foo', 'bar') assert 'x-Foo' in headers assert 'Content-type' in headers headers['Content-Type'] = 'foo/bar' self.assert_equal(headers['Content-Type'], 'foo/bar') self.assert_equal(len(headers.getlist('Content-Type')), 1) # list conversion self.assert_equal(headers.to_wsgi_list(), [ ('Content-Type', 'foo/bar'), ('X-Foo', 'bar') ]) self.assert_equal(str(headers), ( "Content-Type: foo/bar\r\n" "X-Foo: bar\r\n" "\r\n")) self.assert_equal(str(self.storage_class()), "\r\n") # extended add headers.add('Content-Disposition', 'attachment', filename='foo') self.assert_equal(headers['Content-Disposition'], 'attachment; filename=foo') headers.add('x', 'y', z='"') self.assert_equal(headers['x'], r'y; z="\""') def test_defaults_and_conversion(self): # defaults headers = self.storage_class([ ('Content-Type', 'text/plain'), ('X-Foo', 'bar'), ('X-Bar', '1'), ('X-Bar', '2') ]) self.assert_equal(headers.getlist('x-bar'), ['1', '2']) self.assert_equal(headers.get('x-Bar'), '1') self.assert_equal(headers.get('Content-Type'), 'text/plain') self.assert_equal(headers.setdefault('X-Foo', 'nope'), 'bar') self.assert_equal(headers.setdefault('X-Bar', 'nope'), '1') self.assert_equal(headers.setdefault('X-Baz', 'quux'), 'quux') self.assert_equal(headers.setdefault('X-Baz', 'nope'), 'quux') headers.pop('X-Baz') # type conversion self.assert_equal(headers.get('x-bar', type=int), 1) self.assert_equal(headers.getlist('x-bar', type=int), [1, 2]) # list like operations self.assert_equal(headers[0], ('Content-Type', 'text/plain')) self.assert_equal(headers[:1], self.storage_class([('Content-Type', 'text/plain')])) del headers[:2] del headers[-1] self.assert_equal(headers, self.storage_class([('X-Bar', '1')])) def test_copying(self): a = self.storage_class([('foo', 'bar')]) b = a.copy() a.add('foo', 'baz') self.assert_equal(a.getlist('foo'), ['bar', 'baz']) self.assert_equal(b.getlist('foo'), ['bar']) def test_popping(self): headers = self.storage_class([('a', 1)]) self.assert_equal(headers.pop('a'), 1) self.assert_equal(headers.pop('b', 2), 2) with self.assert_raises(KeyError): headers.pop('c') def test_set_arguments(self): a = self.storage_class() a.set('Content-Disposition', 'useless') a.set('Content-Disposition', 'attachment', filename='foo') self.assert_equal(a['Content-Disposition'], 'attachment; filename=foo') def test_reject_newlines(self): h = self.storage_class() for variation in 'foo\nbar', 'foo\r\nbar', 'foo\rbar': with self.assert_raises(ValueError): h['foo'] = variation with self.assert_raises(ValueError): h.add('foo', variation) with self.assert_raises(ValueError): h.add('foo', 'test', option=variation) with self.assert_raises(ValueError): h.set('foo', variation) with self.assert_raises(ValueError): h.set('foo', 'test', option=variation) def test_slicing(self): # there's nothing wrong with these being native strings # Headers doesn't care about the data types h = self.storage_class() h.set('X-Foo-Poo', 'bleh') h.set('Content-Type', 'application/whocares') h.set('X-Forwarded-For', '192.168.0.123') h[:] = [(k, v) for k, v in h if k.startswith(u'X-')] self.assert_equal(list(h), [ ('X-Foo-Poo', 'bleh'), ('X-Forwarded-For', '192.168.0.123') ]) def test_bytes_operations(self): h = self.storage_class() h.set('X-Foo-Poo', 'bleh') h.set('X-Whoops', b'\xff') self.assert_equal(h.get('x-foo-poo', as_bytes=True), b'bleh') self.assert_equal(h.get('x-whoops', as_bytes=True), b'\xff') def test_to_wsgi_list(self): h = self.storage_class() h.set(u'Key', u'Value') for key, value in h.to_wsgi_list(): if PY2: self.assert_strict_equal(key, b'Key') self.assert_strict_equal(value, b'Value') else: self.assert_strict_equal(key, u'Key') self.assert_strict_equal(value, u'Value') class EnvironHeadersTestCase(WerkzeugTestCase): storage_class = datastructures.EnvironHeaders def test_basic_interface(self): # this happens in multiple WSGI servers because they # use a vary naive way to convert the headers; broken_env = { 'HTTP_CONTENT_TYPE': 'text/html', 'CONTENT_TYPE': 'text/html', 'HTTP_CONTENT_LENGTH': '0', 'CONTENT_LENGTH': '0', 'HTTP_ACCEPT': '*', 'wsgi.version': (1, 0) } headers = self.storage_class(broken_env) assert headers self.assert_equal(len(headers), 3) self.assert_equal(sorted(headers), [ ('Accept', '*'), ('Content-Length', '0'), ('Content-Type', 'text/html') ]) assert not self.storage_class({'wsgi.version': (1, 0)}) self.assert_equal(len(self.storage_class({'wsgi.version': (1, 0)})), 0) def test_return_type_is_unicode(self): # environ contains native strings; we return unicode headers = self.storage_class({ 'HTTP_FOO': '\xe2\x9c\x93', 'CONTENT_TYPE': 'text/plain', }) self.assert_equal(headers['Foo'], u"\xe2\x9c\x93") assert isinstance(headers['Foo'], text_type) assert isinstance(headers['Content-Type'], text_type) iter_output = dict(iter(headers)) self.assert_equal(iter_output['Foo'], u"\xe2\x9c\x93") assert isinstance(iter_output['Foo'], text_type) assert isinstance(iter_output['Content-Type'], text_type) def test_bytes_operations(self): foo_val = '\xff' h = self.storage_class({ 'HTTP_X_FOO': foo_val }) self.assert_equal(h.get('x-foo', as_bytes=True), b'\xff') self.assert_equal(h.get('x-foo'), u'\xff') class HeaderSetTestCase(WerkzeugTestCase): storage_class = datastructures.HeaderSet def test_basic_interface(self): hs = self.storage_class() hs.add('foo') hs.add('bar') assert 'Bar' in hs self.assert_equal(hs.find('foo'), 0) self.assert_equal(hs.find('BAR'), 1) assert hs.find('baz') < 0 hs.discard('missing') hs.discard('foo') assert hs.find('foo') < 0 self.assert_equal(hs.find('bar'), 0) with self.assert_raises(IndexError): hs.index('missing') self.assert_equal(hs.index('bar'), 0) assert hs hs.clear() assert not hs class ImmutableListTestCase(WerkzeugTestCase): storage_class = datastructures.ImmutableList def test_list_hashable(self): t = (1, 2, 3, 4) l = self.storage_class(t) self.assert_equal(hash(t), hash(l)) self.assert_not_equal(t, l) def make_call_asserter(assert_equal_func, func=None): """Utility to assert a certain number of function calls. >>> assert_calls, func = make_call_asserter(self.assert_equal) >>> with assert_calls(2): func() func() """ calls = [0] @contextmanager def asserter(count, msg=None): calls[0] = 0 yield assert_equal_func(calls[0], count, msg) def wrapped(*args, **kwargs): calls[0] += 1 if func is not None: return func(*args, **kwargs) return asserter, wrapped class CallbackDictTestCase(WerkzeugTestCase): storage_class = datastructures.CallbackDict def test_callback_dict_reads(self): assert_calls, func = make_call_asserter(self.assert_equal) initial = {'a': 'foo', 'b': 'bar'} dct = self.storage_class(initial=initial, on_update=func) with assert_calls(0, 'callback triggered by read-only method'): # read-only methods dct['a'] dct.get('a') self.assert_raises(KeyError, lambda: dct['x']) 'a' in dct list(iter(dct)) dct.copy() with assert_calls(0, 'callback triggered without modification'): # methods that may write but don't dct.pop('z', None) dct.setdefault('a') def test_callback_dict_writes(self): assert_calls, func = make_call_asserter(self.assert_equal) initial = {'a': 'foo', 'b': 'bar'} dct = self.storage_class(initial=initial, on_update=func) with assert_calls(8, 'callback not triggered by write method'): # always-write methods dct['z'] = 123 dct['z'] = 123 # must trigger again del dct['z'] dct.pop('b', None) dct.setdefault('x') dct.popitem() dct.update([]) dct.clear() with assert_calls(0, 'callback triggered by failed del'): self.assert_raises(KeyError, lambda: dct.__delitem__('x')) with assert_calls(0, 'callback triggered by failed pop'): self.assert_raises(KeyError, lambda: dct.pop('x')) def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(MultiDictTestCase)) suite.addTest(unittest.makeSuite(OrderedMultiDictTestCase)) suite.addTest(unittest.makeSuite(CombinedMultiDictTestCase)) suite.addTest(unittest.makeSuite(ImmutableTypeConversionDictTestCase)) suite.addTest(unittest.makeSuite(ImmutableMultiDictTestCase)) suite.addTest(unittest.makeSuite(ImmutableDictTestCase)) suite.addTest(unittest.makeSuite(ImmutableOrderedMultiDictTestCase)) suite.addTest(unittest.makeSuite(HeadersTestCase)) suite.addTest(unittest.makeSuite(EnvironHeadersTestCase)) suite.addTest(unittest.makeSuite(HeaderSetTestCase)) suite.addTest(unittest.makeSuite(NativeItermethodsTestCase)) suite.addTest(unittest.makeSuite(CallbackDictTestCase)) return suite
agpl-3.0
Boyang--Li/ardupilot
Tools/ardupilotwaf/px4.py
12
10782
#!/usr/bin/env python # encoding: utf-8 """ Waf tool for PX4 build """ from waflib import Logs, Task, Utils from waflib.TaskGen import after_method, before_method, feature import os import sys _dynamic_env_data = {} def _load_dynamic_env_data(bld): bldnode = bld.bldnode.make_node('modules/PX4Firmware') for name in ('cxx_flags', 'include_dirs', 'definitions'): _dynamic_env_data[name] = bldnode.find_node(name).read().split(';') @feature('px4_ap_stlib', 'px4_ap_program') @before_method('process_source') def px4_dynamic_env(self): # The generated files from configuration possibly don't exist if it's just # a list command (TODO: figure out a better way to address that). if self.bld.cmd == 'list': return if not _dynamic_env_data: _load_dynamic_env_data(self.bld) self.env.append_value('INCLUDES', _dynamic_env_data['include_dirs']) self.env.prepend_value('CXXFLAGS', _dynamic_env_data['cxx_flags']) self.env.prepend_value('CXXFLAGS', _dynamic_env_data['definitions']) # Single static library # NOTE: This only works only for local static libraries dependencies - fake # libraries aren't supported yet @feature('px4_ap_program') @after_method('apply_link') @before_method('process_use') def px4_import_objects_from_use(self): queue = Utils.to_list(getattr(self, 'use', [])) names = set() while queue: name = queue.pop(0) if name in names: continue names.add(name) try: tg = self.bld.get_tgen_by_name(name) except Errors.WafError: continue tg.post() for t in getattr(tg, 'compiled_tasks', []): self.link_task.set_inputs(t.outputs) queue.extend(Utils.to_list(getattr(tg, 'use', []))) class px4_copy(Task.Task): run_str = '${CP} ${SRC} ${TGT}' color = 'CYAN' def keyword(self): return "PX4: Copying %s to" % self.inputs[0].name def __str__(self): return self.outputs[0].path_from(self.generator.bld.bldnode) class px4_add_git_hashes(Task.Task): run_str = '${PYTHON} ${PX4_ADD_GIT_HASHES} --ardupilot ${PX4_APM_ROOT} --px4 ${PX4_ROOT} --nuttx ${PX4_NUTTX_ROOT} --uavcan ${PX4_UAVCAN_ROOT} ${SRC} ${TGT}' color = 'CYAN' def keyword(self): return "PX4: Copying firmware and adding git hashes" def __str__(self): return self.outputs[0].path_from(self.outputs[0].ctx.launch_node()) def _update_firmware_sig(fw_task, firmware, elf): original_post_run = fw_task.post_run def post_run(): original_post_run() firmware.sig = firmware.cache_sig = Utils.h_file(firmware.abspath()) elf.sig = elf.cache_sig = Utils.h_file(elf.abspath()) fw_task.post_run = post_run _cp_px4io = None _firmware_semaphorish_tasks = [] _upload_task = [] @feature('px4_ap_program') @after_method('process_source') def px4_firmware(self): global _cp_px4io, _firmware_semaphorish_tasks, _upload_task version = self.env.get_flat('PX4_VERSION') px4 = self.bld.cmake('px4') px4.vars['APM_PROGRAM_LIB'] = self.link_task.outputs[0].abspath() if self.env.PX4_USE_PX4IO and not _cp_px4io: px4io_task = self.create_cmake_build_task('px4', 'fw_io') px4io = px4io_task.cmake.bldnode.make_node( 'src/modules/px4iofirmware/px4io-v%s.bin' % version, ) px4io_elf = px4.bldnode.make_node( 'src/modules/px4iofirmware/px4io-v%s' % version ) px4io_task.set_outputs([px4io, px4io_elf]) romfs = self.bld.bldnode.make_node(self.env.PX4_ROMFS_BLD) romfs_px4io = romfs.make_node('px4io/px4io.bin') romfs_px4io.parent.mkdir() _cp_px4io = self.create_task('px4_copy', px4io, romfs_px4io) _cp_px4io.keyword = lambda: 'PX4: Copying PX4IO to ROMFS' px4io_elf_dest = self.bld.bldnode.make_node(self.env.PX4IO_ELF_DEST) cp_px4io_elf = self.create_task('px4_copy', px4io_elf, px4io_elf_dest) fw_task = self.create_cmake_build_task( 'px4', 'build_firmware_px4fmu-v%s' % version, ) # we need to synchronize in order to avoid the output expected by the # previous ap_program being overwritten before used for t in _firmware_semaphorish_tasks: fw_task.set_run_after(t) _firmware_semaphorish_tasks = [] if self.env.PX4_USE_PX4IO and _cp_px4io.generator is self: fw_task.set_run_after(_cp_px4io) firmware = px4.bldnode.make_node( 'src/firmware/nuttx/nuttx-px4fmu-v%s-apm.px4' % version, ) fw_elf = px4.bldnode.make_node( 'src/firmware/nuttx/firmware_nuttx', ) _update_firmware_sig(fw_task, firmware, fw_elf) fw_dest = self.bld.bldnode.make_node( os.path.join(self.program_dir, '%s.px4' % self.program_name) ) git_hashes = self.create_task('px4_add_git_hashes', firmware, fw_dest) git_hashes.set_run_after(fw_task) _firmware_semaphorish_tasks.append(git_hashes) fw_elf_dest = self.bld.bldnode.make_node( os.path.join(self.program_dir, self.program_name) ) cp_elf = self.create_task('px4_copy', fw_elf, fw_elf_dest) cp_elf.set_run_after(fw_task) _firmware_semaphorish_tasks.append(cp_elf) self.build_summary = dict( target=self.name, binary=fw_elf_dest.path_from(self.bld.bldnode), ) if self.bld.options.upload: if _upload_task: Logs.warn('PX4: upload for %s ignored' % self.name) return _upload_task = self.create_cmake_build_task('px4', 'upload') _upload_task.set_run_after(fw_task) _firmware_semaphorish_tasks.append(_upload_task) def _px4_taskgen(bld, **kw): if 'cls_keyword' in kw and not callable(kw['cls_keyword']): cls_keyword = str(kw['cls_keyword']) kw['cls_keyword'] = lambda tsk: 'PX4: ' + cls_keyword if 'cls_str' in kw and not callable(kw['cls_str']): cls_str = str(kw['cls_str']) kw['cls_str'] = lambda tsk: cls_str kw['color'] = 'CYAN' return bld(**kw) @feature('_px4_romfs') def _process_romfs(self): bld = self.bld file_list = ( 'firmware/oreoled.bin', 'init.d/rc.APM', 'init.d/rc.error', 'init.d/rcS', (bld.env.PX4_BOOTLOADER, 'bootloader/fmu_bl.bin'), ) romfs_src = bld.srcnode.find_dir(bld.env.PX4_ROMFS_SRC) romfs_bld = bld.bldnode.make_node(bld.env.PX4_ROMFS_BLD) for item in file_list: if isinstance(item, str): src = romfs_src.make_node(item) dst = romfs_bld.make_node(item) else: src = bld.srcnode.make_node(item[0]) dst = romfs_bld.make_node(item[1]) dst.parent.mkdir() self.create_task('px4_copy', src, dst) def configure(cfg): cfg.env.CMAKE_MIN_VERSION = '3.2' cfg.load('cmake') cfg.find_program('cp') bldnode = cfg.bldnode.make_node(cfg.variant) env = cfg.env env.AP_PROGRAM_FEATURES += ['px4_ap_program'] env.AP_STLIB_FEATURES += ['px4_ap_stlib'] def srcpath(path): return cfg.srcnode.make_node(path).abspath() def bldpath(path): return bldnode.make_node(path).abspath() if env.PX4_VERSION == '1': bootloader_name = 'px4fmu_bl.bin' else: bootloader_name = 'px4fmuv%s_bl.bin' % env.get_flat('PX4_VERSION') # TODO: we should move stuff from mk/PX4 to Tools/ardupilotwaf/px4 after # stop using the make-based build system env.PX4_ROMFS_SRC = 'mk/PX4/ROMFS' env.PX4_ROMFS_BLD = 'px4-extra-files/ROMFS' env.PX4_BOOTLOADER = 'mk/PX4/bootloader/%s' % bootloader_name env.PX4_ADD_GIT_HASHES = srcpath('Tools/scripts/add_git_hashes.py') env.PX4_APM_ROOT = srcpath('') env.PX4_ROOT = srcpath('modules/PX4Firmware') env.PX4_NUTTX_ROOT = srcpath('modules/PX4NuttX') env.PX4_UAVCAN_ROOT = srcpath('modules/uavcan') if env.PX4_USE_PX4IO: env.PX4IO_ELF_DEST = 'px4-extra-files/px4io' env.PX4_CMAKE_VARS = dict( CONFIG='nuttx_px4fmu-v%s_apm' % env.get_flat('PX4_VERSION'), CMAKE_MODULE_PATH=srcpath('Tools/ardupilotwaf/px4/cmake'), UAVCAN_LIBUAVCAN_PATH=env.PX4_UAVCAN_ROOT, NUTTX_SRC=env.PX4_NUTTX_ROOT, PX4_NUTTX_ROMFS=bldpath(env.PX4_ROMFS_BLD), ARDUPILOT_BUILD='YES', EXTRA_CXX_FLAGS=' '.join(( # NOTE: these "-Wno-error=*" flags should be removed as we update # the submodule '-Wno-error=double-promotion', '-Wno-error=reorder', # NOTE: *Temporarily* using this definition so that both # PX4Firmware build systems (cmake and legacy make-based) can live # together '-DCMAKE_BUILD', '-DARDUPILOT_BUILD', '-I%s' % bldpath('libraries/GCS_MAVLink'), '-I%s' % bldpath('libraries/GCS_MAVLink/include/mavlink'), '-Wl,--gc-sections', )), EXTRA_C_FLAGS=' '.join(( # NOTE: *Temporarily* using this definition so that both # PX4Firmware build systems (cmake and legacy make-based) can live # together '-DCMAKE_BUILD', )), ) def build(bld): version = bld.env.get_flat('PX4_VERSION') px4 = bld.cmake( name='px4', cmake_src=bld.srcnode.find_dir('modules/PX4Firmware'), cmake_vars=bld.env.PX4_CMAKE_VARS, ) px4.build( 'msg_gen', group='dynamic_sources', cmake_output_patterns='src/modules/uORB/topics/*.h', ) px4.build( 'prebuild_targets', group='dynamic_sources', cmake_output_patterns='px4fmu-v%s/NuttX/nuttx-export/**/*.h' % version, ) bld( name='px4_romfs_static_files', group='dynamic_sources', features='_px4_romfs', ) bld.extra_build_summary = _extra_build_summary def _extra_build_summary(bld, build_summary): build_summary.text('') build_summary.text('PX4') build_summary.text('', ''' The ELF files are pointed by the path in the "%s" column. The .px4 files are in the same directory of their corresponding ELF files. ''' % build_summary.header_text['target']) if not bld.options.upload: build_summary.text('') build_summary.text('', ''' You can use the option --upload to upload the firmware to the PX4 board if you have one connected.''') if bld.env.PX4_USE_PX4IO: build_summary.text('') build_summary.text('PX4IO') summary_data_list = bld.size_summary([bld.env.PX4IO_ELF_DEST]) header = bld.env.BUILD_SUMMARY_HEADER[:] try: header.remove('target') except ValueError: pass header.insert(0, 'binary_path') build_summary.print_table(summary_data_list, header)
gpl-3.0
hyperized/ansible
test/units/modules/network/fortios/test_fortios_log_syslogd2_filter.py
21
9206
# Copyright 2019 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <https://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest from mock import ANY from ansible.module_utils.network.fortios.fortios import FortiOSHandler try: from ansible.modules.network.fortios import fortios_log_syslogd2_filter except ImportError: pytest.skip("Could not load required modules for testing", allow_module_level=True) @pytest.fixture(autouse=True) def connection_mock(mocker): connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_log_syslogd2_filter.Connection') return connection_class_mock fos_instance = FortiOSHandler(connection_mock) def test_log_syslogd2_filter_creation(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'log_syslogd2_filter': { 'anomaly': 'enable', 'dns': 'enable', 'filter': 'test_value_5', 'filter_type': 'include', 'forward_traffic': 'enable', 'gtp': 'enable', 'local_traffic': 'enable', 'multicast_traffic': 'enable', 'netscan_discovery': 'test_value_11,', 'netscan_vulnerability': 'test_value_12,', 'severity': 'emergency', 'sniffer_traffic': 'enable', 'ssh': 'enable', 'voip': 'enable' }, 'vdom': 'root'} is_error, changed, response = fortios_log_syslogd2_filter.fortios_log_syslogd2(input_data, fos_instance) expected_data = { 'anomaly': 'enable', 'dns': 'enable', 'filter': 'test_value_5', 'filter-type': 'include', 'forward-traffic': 'enable', 'gtp': 'enable', 'local-traffic': 'enable', 'multicast-traffic': 'enable', 'netscan-discovery': 'test_value_11,', 'netscan-vulnerability': 'test_value_12,', 'severity': 'emergency', 'sniffer-traffic': 'enable', 'ssh': 'enable', 'voip': 'enable' } set_method_mock.assert_called_with('log.syslogd2', 'filter', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert changed assert response['status'] == 'success' assert response['http_status'] == 200 def test_log_syslogd2_filter_creation_fails(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'log_syslogd2_filter': { 'anomaly': 'enable', 'dns': 'enable', 'filter': 'test_value_5', 'filter_type': 'include', 'forward_traffic': 'enable', 'gtp': 'enable', 'local_traffic': 'enable', 'multicast_traffic': 'enable', 'netscan_discovery': 'test_value_11,', 'netscan_vulnerability': 'test_value_12,', 'severity': 'emergency', 'sniffer_traffic': 'enable', 'ssh': 'enable', 'voip': 'enable' }, 'vdom': 'root'} is_error, changed, response = fortios_log_syslogd2_filter.fortios_log_syslogd2(input_data, fos_instance) expected_data = { 'anomaly': 'enable', 'dns': 'enable', 'filter': 'test_value_5', 'filter-type': 'include', 'forward-traffic': 'enable', 'gtp': 'enable', 'local-traffic': 'enable', 'multicast-traffic': 'enable', 'netscan-discovery': 'test_value_11,', 'netscan-vulnerability': 'test_value_12,', 'severity': 'emergency', 'sniffer-traffic': 'enable', 'ssh': 'enable', 'voip': 'enable' } set_method_mock.assert_called_with('log.syslogd2', 'filter', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert is_error assert not changed assert response['status'] == 'error' assert response['http_status'] == 500 def test_log_syslogd2_filter_idempotent(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'log_syslogd2_filter': { 'anomaly': 'enable', 'dns': 'enable', 'filter': 'test_value_5', 'filter_type': 'include', 'forward_traffic': 'enable', 'gtp': 'enable', 'local_traffic': 'enable', 'multicast_traffic': 'enable', 'netscan_discovery': 'test_value_11,', 'netscan_vulnerability': 'test_value_12,', 'severity': 'emergency', 'sniffer_traffic': 'enable', 'ssh': 'enable', 'voip': 'enable' }, 'vdom': 'root'} is_error, changed, response = fortios_log_syslogd2_filter.fortios_log_syslogd2(input_data, fos_instance) expected_data = { 'anomaly': 'enable', 'dns': 'enable', 'filter': 'test_value_5', 'filter-type': 'include', 'forward-traffic': 'enable', 'gtp': 'enable', 'local-traffic': 'enable', 'multicast-traffic': 'enable', 'netscan-discovery': 'test_value_11,', 'netscan-vulnerability': 'test_value_12,', 'severity': 'emergency', 'sniffer-traffic': 'enable', 'ssh': 'enable', 'voip': 'enable' } set_method_mock.assert_called_with('log.syslogd2', 'filter', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert not changed assert response['status'] == 'error' assert response['http_status'] == 404 def test_log_syslogd2_filter_filter_foreign_attributes(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'log_syslogd2_filter': { 'random_attribute_not_valid': 'tag', 'anomaly': 'enable', 'dns': 'enable', 'filter': 'test_value_5', 'filter_type': 'include', 'forward_traffic': 'enable', 'gtp': 'enable', 'local_traffic': 'enable', 'multicast_traffic': 'enable', 'netscan_discovery': 'test_value_11,', 'netscan_vulnerability': 'test_value_12,', 'severity': 'emergency', 'sniffer_traffic': 'enable', 'ssh': 'enable', 'voip': 'enable' }, 'vdom': 'root'} is_error, changed, response = fortios_log_syslogd2_filter.fortios_log_syslogd2(input_data, fos_instance) expected_data = { 'anomaly': 'enable', 'dns': 'enable', 'filter': 'test_value_5', 'filter-type': 'include', 'forward-traffic': 'enable', 'gtp': 'enable', 'local-traffic': 'enable', 'multicast-traffic': 'enable', 'netscan-discovery': 'test_value_11,', 'netscan-vulnerability': 'test_value_12,', 'severity': 'emergency', 'sniffer-traffic': 'enable', 'ssh': 'enable', 'voip': 'enable' } set_method_mock.assert_called_with('log.syslogd2', 'filter', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert changed assert response['status'] == 'success' assert response['http_status'] == 200
gpl-3.0
nrmay/hpctardis
tardis/tardis_portal/templatetags/basiccomparisonfilters.py
10
1639
#!/usr/bin/python # -*- coding: utf-8 -*- from django.template import Library from django import template def gt(value, arg): """Returns a boolean of whether the value is greater than the argument.""" return value > int(arg) def lt(value, arg): """Returns a boolean of whether the value is less than the argument.""" return value < int(arg) def gte(value, arg): """Returns a boolean of whether the value is greater than or equal to the argument. """ return value >= int(arg) def lte(value, arg): """Returns a boolean of whether the value is less than or equal to the argument. """ return value <= int(arg) def length_gt(value, arg): """Returns a boolean of whether the value's length is greater than the argument. """ return len(value) > int(arg) def length_lt(value, arg): """Returns a boolean of whether the value's length is less than the argument. """ return len(value) < int(arg) def length_gte(value, arg): """Returns a boolean of whether the value's length is greater than or equal to the argument. """ return len(value) >= int(arg) def length_lte(value, arg): """Returns a boolean of whether the value's length is less than or equal to the argument. """ return len(value) <= int(arg) register = template.Library() register.filter('gt', gt) register.filter('lt', lt) register.filter('gte', gte) register.filter('lte', lte) register.filter('length_gt', length_gt) register.filter('length_lt', length_lt) register.filter('length_gte', length_gte) register.filter('length_lte', length_lte)
bsd-3-clause
craighiller/serendipity
venv/lib/python2.7/site-packages/setuptools/tests/test_easy_install.py
135
15441
"""Easy install Tests """ import sys import os import shutil import tempfile import unittest import site import contextlib import textwrap import tarfile import logging import distutils.core from setuptools.compat import StringIO, BytesIO, next, urlparse from setuptools.sandbox import run_setup, SandboxViolation from setuptools.command.easy_install import ( easy_install, fix_jython_executable, get_script_args, nt_quote_arg) from setuptools.command.easy_install import PthDistributions from setuptools.command import easy_install as easy_install_pkg from setuptools.dist import Distribution from pkg_resources import working_set, VersionConflict from pkg_resources import Distribution as PRDistribution import setuptools.tests.server import pkg_resources class FakeDist(object): def get_entry_map(self, group): if group != 'console_scripts': return {} return {'name': 'ep'} def as_requirement(self): return 'spec' WANTED = """\ #!%s # EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' __requires__ = 'spec' import sys from pkg_resources import load_entry_point if __name__ == '__main__': sys.exit( load_entry_point('spec', 'console_scripts', 'name')() ) """ % nt_quote_arg(fix_jython_executable(sys.executable, "")) SETUP_PY = """\ from setuptools import setup setup(name='foo') """ class TestEasyInstallTest(unittest.TestCase): def test_install_site_py(self): dist = Distribution() cmd = easy_install(dist) cmd.sitepy_installed = False cmd.install_dir = tempfile.mkdtemp() try: cmd.install_site_py() sitepy = os.path.join(cmd.install_dir, 'site.py') self.assertTrue(os.path.exists(sitepy)) finally: shutil.rmtree(cmd.install_dir) def test_get_script_args(self): dist = FakeDist() old_platform = sys.platform try: name, script = [i for i in next(get_script_args(dist))][0:2] finally: sys.platform = old_platform self.assertEqual(script, WANTED) def test_no_find_links(self): # new option '--no-find-links', that blocks find-links added at # the project level dist = Distribution() cmd = easy_install(dist) cmd.check_pth_processing = lambda: True cmd.no_find_links = True cmd.find_links = ['link1', 'link2'] cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') cmd.args = ['ok'] cmd.ensure_finalized() self.assertEqual(cmd.package_index.scanned_urls, {}) # let's try without it (default behavior) cmd = easy_install(dist) cmd.check_pth_processing = lambda: True cmd.find_links = ['link1', 'link2'] cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') cmd.args = ['ok'] cmd.ensure_finalized() keys = sorted(cmd.package_index.scanned_urls.keys()) self.assertEqual(keys, ['link1', 'link2']) class TestPTHFileWriter(unittest.TestCase): def test_add_from_cwd_site_sets_dirty(self): '''a pth file manager should set dirty if a distribution is in site but also the cwd ''' pth = PthDistributions('does-not_exist', [os.getcwd()]) self.assertTrue(not pth.dirty) pth.add(PRDistribution(os.getcwd())) self.assertTrue(pth.dirty) def test_add_from_site_is_ignored(self): if os.name != 'nt': location = '/test/location/does-not-have-to-exist' else: location = 'c:\\does_not_exist' pth = PthDistributions('does-not_exist', [location, ]) self.assertTrue(not pth.dirty) pth.add(PRDistribution(location)) self.assertTrue(not pth.dirty) class TestUserInstallTest(unittest.TestCase): def setUp(self): self.dir = tempfile.mkdtemp() setup = os.path.join(self.dir, 'setup.py') f = open(setup, 'w') f.write(SETUP_PY) f.close() self.old_cwd = os.getcwd() os.chdir(self.dir) self.old_enable_site = site.ENABLE_USER_SITE self.old_file = easy_install_pkg.__file__ self.old_base = site.USER_BASE site.USER_BASE = tempfile.mkdtemp() self.old_site = site.USER_SITE site.USER_SITE = tempfile.mkdtemp() easy_install_pkg.__file__ = site.USER_SITE def tearDown(self): os.chdir(self.old_cwd) shutil.rmtree(self.dir) shutil.rmtree(site.USER_BASE) shutil.rmtree(site.USER_SITE) site.USER_BASE = self.old_base site.USER_SITE = self.old_site site.ENABLE_USER_SITE = self.old_enable_site easy_install_pkg.__file__ = self.old_file def test_user_install_implied(self): site.ENABLE_USER_SITE = True # disabled sometimes #XXX: replace with something meaningfull dist = Distribution() dist.script_name = 'setup.py' cmd = easy_install(dist) cmd.args = ['py'] cmd.ensure_finalized() self.assertTrue(cmd.user, 'user should be implied') def test_multiproc_atexit(self): try: __import__('multiprocessing') except ImportError: # skip the test if multiprocessing is not available return log = logging.getLogger('test_easy_install') logging.basicConfig(level=logging.INFO, stream=sys.stderr) log.info('this should not break') def test_user_install_not_implied_without_usersite_enabled(self): site.ENABLE_USER_SITE = False # usually enabled #XXX: replace with something meaningfull dist = Distribution() dist.script_name = 'setup.py' cmd = easy_install(dist) cmd.args = ['py'] cmd.initialize_options() self.assertFalse(cmd.user, 'NOT user should be implied') def test_local_index(self): # make sure the local index is used # when easy_install looks for installed # packages new_location = tempfile.mkdtemp() target = tempfile.mkdtemp() egg_file = os.path.join(new_location, 'foo-1.0.egg-info') f = open(egg_file, 'w') try: f.write('Name: foo\n') finally: f.close() sys.path.append(target) old_ppath = os.environ.get('PYTHONPATH') os.environ['PYTHONPATH'] = os.path.pathsep.join(sys.path) try: dist = Distribution() dist.script_name = 'setup.py' cmd = easy_install(dist) cmd.install_dir = target cmd.args = ['foo'] cmd.ensure_finalized() cmd.local_index.scan([new_location]) res = cmd.easy_install('foo') self.assertEqual(os.path.realpath(res.location), os.path.realpath(new_location)) finally: sys.path.remove(target) for basedir in [new_location, target, ]: if not os.path.exists(basedir) or not os.path.isdir(basedir): continue try: shutil.rmtree(basedir) except: pass if old_ppath is not None: os.environ['PYTHONPATH'] = old_ppath else: del os.environ['PYTHONPATH'] def test_setup_requires(self): """Regression test for Distribute issue #318 Ensure that a package with setup_requires can be installed when setuptools is installed in the user site-packages without causing a SandboxViolation. """ test_pkg = create_setup_requires_package(self.dir) test_setup_py = os.path.join(test_pkg, 'setup.py') try: with quiet_context(): with reset_setup_stop_context(): run_setup(test_setup_py, ['install']) except SandboxViolation: self.fail('Installation caused SandboxViolation') class TestSetupRequires(unittest.TestCase): def test_setup_requires_honors_fetch_params(self): """ When easy_install installs a source distribution which specifies setup_requires, it should honor the fetch parameters (such as allow-hosts, index-url, and find-links). """ # set up a server which will simulate an alternate package index. p_index = setuptools.tests.server.MockServer() p_index.start() netloc = 1 p_index_loc = urlparse(p_index.url)[netloc] if p_index_loc.endswith(':0'): # Some platforms (Jython) don't find a port to which to bind, # so skip this test for them. return with quiet_context(): # create an sdist that has a build-time dependency. with TestSetupRequires.create_sdist() as dist_file: with tempdir_context() as temp_install_dir: with environment_context(PYTHONPATH=temp_install_dir): ei_params = ['--index-url', p_index.url, '--allow-hosts', p_index_loc, '--exclude-scripts', '--install-dir', temp_install_dir, dist_file] with reset_setup_stop_context(): with argv_context(['easy_install']): # attempt to install the dist. It should fail because # it doesn't exist. self.assertRaises(SystemExit, easy_install_pkg.main, ei_params) # there should have been two or three requests to the server # (three happens on Python 3.3a) self.assertTrue(2 <= len(p_index.requests) <= 3) self.assertEqual(p_index.requests[0].path, '/does-not-exist/') @staticmethod @contextlib.contextmanager def create_sdist(): """ Return an sdist with a setup_requires dependency (of something that doesn't exist) """ with tempdir_context() as dir: dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz') make_trivial_sdist( dist_path, textwrap.dedent(""" import setuptools setuptools.setup( name="setuptools-test-fetcher", version="1.0", setup_requires = ['does-not-exist'], ) """).lstrip()) yield dist_path def test_setup_requires_overrides_version_conflict(self): """ Regression test for issue #323. Ensures that a distribution's setup_requires requirements can still be installed and used locally even if a conflicting version of that requirement is already on the path. """ pr_state = pkg_resources.__getstate__() fake_dist = PRDistribution('does-not-matter', project_name='foobar', version='0.0') working_set.add(fake_dist) try: with tempdir_context() as temp_dir: test_pkg = create_setup_requires_package(temp_dir) test_setup_py = os.path.join(test_pkg, 'setup.py') with quiet_context() as (stdout, stderr): with reset_setup_stop_context(): try: # Don't even need to install the package, just # running the setup.py at all is sufficient run_setup(test_setup_py, ['--name']) except VersionConflict: self.fail('Installing setup.py requirements ' 'caused a VersionConflict') lines = stdout.readlines() self.assertTrue(len(lines) > 0) self.assertTrue(lines[-1].strip(), 'test_pkg') finally: pkg_resources.__setstate__(pr_state) def create_setup_requires_package(path): """Creates a source tree under path for a trivial test package that has a single requirement in setup_requires--a tarball for that requirement is also created and added to the dependency_links argument. """ test_setup_attrs = { 'name': 'test_pkg', 'version': '0.0', 'setup_requires': ['foobar==0.1'], 'dependency_links': [os.path.abspath(path)] } test_pkg = os.path.join(path, 'test_pkg') test_setup_py = os.path.join(test_pkg, 'setup.py') os.mkdir(test_pkg) f = open(test_setup_py, 'w') f.write(textwrap.dedent("""\ import setuptools setuptools.setup(**%r) """ % test_setup_attrs)) f.close() foobar_path = os.path.join(path, 'foobar-0.1.tar.gz') make_trivial_sdist( foobar_path, textwrap.dedent("""\ import setuptools setuptools.setup( name='foobar', version='0.1' ) """)) return test_pkg def make_trivial_sdist(dist_path, setup_py): """Create a simple sdist tarball at dist_path, containing just a setup.py, the contents of which are provided by the setup_py string. """ setup_py_file = tarfile.TarInfo(name='setup.py') try: # Python 3 (StringIO gets converted to io module) MemFile = BytesIO except AttributeError: MemFile = StringIO setup_py_bytes = MemFile(setup_py.encode('utf-8')) setup_py_file.size = len(setup_py_bytes.getvalue()) dist = tarfile.open(dist_path, 'w:gz') try: dist.addfile(setup_py_file, fileobj=setup_py_bytes) finally: dist.close() @contextlib.contextmanager def tempdir_context(cd=lambda dir:None): temp_dir = tempfile.mkdtemp() orig_dir = os.getcwd() try: cd(temp_dir) yield temp_dir finally: cd(orig_dir) shutil.rmtree(temp_dir) @contextlib.contextmanager def environment_context(**updates): old_env = os.environ.copy() os.environ.update(updates) try: yield finally: for key in updates: del os.environ[key] os.environ.update(old_env) @contextlib.contextmanager def argv_context(repl): old_argv = sys.argv[:] sys.argv[:] = repl yield sys.argv[:] = old_argv @contextlib.contextmanager def reset_setup_stop_context(): """ When the setuptools tests are run using setup.py test, and then one wants to invoke another setup() command (such as easy_install) within those tests, it's necessary to reset the global variable in distutils.core so that the setup() command will run naturally. """ setup_stop_after = distutils.core._setup_stop_after distutils.core._setup_stop_after = None yield distutils.core._setup_stop_after = setup_stop_after @contextlib.contextmanager def quiet_context(): """ Redirect stdout/stderr to StringIO objects to prevent console output from distutils commands. """ old_stdout = sys.stdout old_stderr = sys.stderr new_stdout = sys.stdout = StringIO() new_stderr = sys.stderr = StringIO() try: yield new_stdout, new_stderr finally: new_stdout.seek(0) new_stderr.seek(0) sys.stdout = old_stdout sys.stderr = old_stderr
mit
pkoutsias/SickRage
sickbeard/metadata/kodi.py
12
4654
# coding=utf-8 # Author: Nic Wolfe <nic@wolfeden.ca> # URL: http://code.google.com/p/sickbeard/ # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. import os from sickbeard.metadata import generic from sickbeard.metadata import kodi_12plus from sickrage.helper.common import replace_extension from sickrage.helper.encoding import ek class KODIMetadata(kodi_12plus.KODI_12PlusMetadata): """ Metadata generation class for KODI (legacy). The following file structure is used: show_root/tvshow.nfo (show metadata) show_root/fanart.jpg (fanart) show_root/folder.jpg (poster) show_root/folder.jpg (banner) show_root/Season ##/filename.ext (*) show_root/Season ##/filename.nfo (episode metadata) show_root/Season ##/filename.tbn (episode thumb) show_root/season##.tbn (season posters) show_root/season-all.tbn (season all poster) """ def __init__(self, show_metadata=False, episode_metadata=False, fanart=False, poster=False, banner=False, episode_thumbnails=False, season_posters=False, season_banners=False, season_all_poster=False, season_all_banner=False): generic.GenericMetadata.__init__(self, show_metadata, episode_metadata, fanart, poster, banner, episode_thumbnails, season_posters, season_banners, season_all_poster, season_all_banner) self.name = 'KODI' self.poster_name = self.banner_name = "folder.jpg" self.season_all_poster_name = "season-all.tbn" # web-ui metadata template self.eg_show_metadata = "tvshow.nfo" self.eg_episode_metadata = "Season##\\<i>filename</i>.nfo" self.eg_fanart = "fanart.jpg" self.eg_poster = "folder.jpg" self.eg_banner = "folder.jpg" self.eg_episode_thumbnails = "Season##\\<i>filename</i>.tbn" self.eg_season_posters = "season##.tbn" self.eg_season_banners = "<i>not supported</i>" self.eg_season_all_poster = "season-all.tbn" self.eg_season_all_banner = "<i>not supported</i>" # Override with empty methods for unsupported features def create_season_banners(self, ep_obj): pass def create_season_all_banner(self, show_obj): pass @staticmethod def get_episode_thumb_path(ep_obj): """ Returns the path where the episode thumbnail should be stored. Defaults to the same path as the episode file but with a .tbn extension. ep_obj: a TVEpisode instance for which to create the thumbnail """ if ek(os.path.isfile, ep_obj.location): tbn_filename = replace_extension(ep_obj.location, 'tbn') else: return None return tbn_filename @staticmethod def get_season_poster_path(show_obj, season): """ Returns the full path to the file for a given season poster. show_obj: a TVShow instance for which to generate the path season: a season number to be used for the path. Note that season 0 means specials. """ # Our specials thumbnail is, well, special if season == 0: season_poster_filename = 'season-specials' else: season_poster_filename = 'season' + str(season).zfill(2) return ek(os.path.join, show_obj.location, season_poster_filename + '.tbn') # present a standard "interface" from the module metadata_class = KODIMetadata
gpl-3.0
mancoast/CPythonPyc_test
cpython/211_test_pwd.py
15
1639
from test_support import verbose import pwd print 'pwd.getpwall()' entries = pwd.getpwall() for e in entries: name = e[0] uid = e[2] if verbose: print name, uid print 'pwd.getpwuid()' dbuid = pwd.getpwuid(uid) if dbuid[0] != name: print 'Mismatch in pwd.getpwuid()' print 'pwd.getpwnam()' dbname = pwd.getpwnam(name) if dbname[2] != uid: print 'Mismatch in pwd.getpwnam()' else: print 'name matches uid' break # try to get some errors bynames = {} byuids = {} for n, p, u, g, gecos, d, s in entries: bynames[n] = u byuids[u] = n allnames = bynames.keys() namei = 0 fakename = allnames[namei] while bynames.has_key(fakename): chars = map(None, fakename) for i in range(len(chars)): if chars[i] == 'z': chars[i] = 'A' break elif chars[i] == 'Z': continue else: chars[i] = chr(ord(chars[i]) + 1) break else: namei = namei + 1 try: fakename = allnames[namei] except IndexError: # should never happen... if so, just forget it break fakename = ''.join(map(None, chars)) try: pwd.getpwnam(fakename) except KeyError: print 'caught expected exception' else: print 'fakename', fakename, 'did not except pwd.getpwnam()' # Choose a non-existent uid. fakeuid = 4127 while byuids.has_key(fakeuid): fakeuid = (fakeuid * 3) % 0x10000 try: pwd.getpwuid(fakeuid) except KeyError: print 'caught expected exception' else: print 'fakeuid', fakeuid, 'did not except pwd.getpwuid()'
gpl-3.0
nrwahl2/ansible
lib/ansible/modules/network/aci/aci_tenant_span_dst_group.py
4
3931
#!/usr/bin/python # -*- coding: utf-8 -*- # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: aci_tenant_span_dst_group short_description: Manage SPAN destination groups on Cisco ACI fabrics (span:DestGrp) description: - Manage SPAN destination groups on Cisco ACI fabrics. - More information from the internal APIC class I(span:DestGrp) at U(https://developer.cisco.com/media/mim-ref/MO-spanDestGrp.html). author: - Swetha Chunduri (@schunduri) - Dag Wieers (@dagwieers) - Jacob McGill (@jmcgill298) version_added: '2.4' requirements: - ACI Fabric 1.0(3f)+ notes: - The C(tenant) used must exist before using this module in your playbook. The M(aci_tenant) module can be used for this. options: dst_group: description: - The name of the SPAN destination group. required: yes aliases: [ name ] description: description: - The description of the SPAN destination group. aliases: [ descr ] tenant: description: - The name of the tenant. required: yes aliases: [ tenant_name ] state: description: - Use C(present) or C(absent) for adding or removing. - Use C(query) for listing an object or multiple objects. choices: [ absent, present, query ] default: present extends_documentation_fragment: aci ''' # FIXME: Add more, better examples EXAMPLES = r''' - aci_tenant_span_dst_group: hostname: '{{ inventory_hostname }}' username: '{{ username }}' password: '{{ password }}' dst_group: '{{ dst_group }}' description: '{{ descr }}' tenant: '{{ tenant }}' ''' RETURN = r''' # ''' from ansible.module_utils.aci import ACIModule, aci_argument_spec from ansible.module_utils.basic import AnsibleModule def main(): argument_spec = aci_argument_spec argument_spec.update( dst_group=dict(type='str', required=False, aliases=['name']), # Not required for querying all objects tenant=dict(type='str', required=True, aliases=['tenant_name']), # Not required for querying all objects description=dict(type='str', aliases=['descr']), state=dict(type='str', default='present', choices=['absent', 'present', 'query']), method=dict(type='str', choices=['delete', 'get', 'post'], aliases=['action'], removed_in_version='2.6'), # Deprecated starting from v2.6 ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) dst_group = module.params['dst_group'] # tenant = module.params['tenant'] description = module.params['description'] state = module.params['state'] aci = ACIModule(module) # TODO: This logic could be cleaner. if dst_group is not None: path = 'api/mo/uni/tn-%(tenant)s/destgrp-%(dst_group)s.json' % module.params elif state == 'query': # Query all contracts path = 'api/node/class/spanDestGrp.json' else: module.fail_json(msg="Parameter 'dst_group' is required for state 'absent' or 'present'") aci.result['url'] = '%(protocol)s://%(hostname)s/' % aci.params + path aci.get_existing() if state == 'present': # Filter out module parameters with null values aci.payload(aci_class='spanDestGrp', class_config=dict(name=dst_group, descr=description)) # Generate config diff which will be used as POST request body aci.get_diff(aci_class='spanDestGrp') # Submit changes if module not in check_mode and the proposed is different than existing aci.post_config() elif state == 'absent': aci.delete_config() module.exit_json(**aci.result) if __name__ == "__main__": main()
gpl-3.0
sternoru/goscalecms
goscale/plugins/pictures/models.py
1
8401
import re import os import simplejson import datetime import urllib2 from gdata.photos import service from goscale import models as goscale_models from goscale import utils from goscale import conf from goscale import decorators from django.db import models from django.db.models import signals from django.utils.translation import ugettext as _ class Picasa(goscale_models.GoscaleCMSPlugin): """ Picasa photos """ url = models.CharField(max_length=250, verbose_name=_('Picasa user or album link'), help_text=_('user: https://plus.google.com/photos/114610201247248895941/<br/> \ album: https://plus.google.com/photos/114610201247248895941/albums/5319044261264143137<br/> \ picasaweb album (RSS link): https://picasaweb.google.com/data/feed/base/user/sterno.bloggerCMS/albumid/5352591675044168945?alt=rss&kind=photo&hl=en_US')) width = models.SmallIntegerField(null=True, blank=True, verbose_name=_('Container width'), help_text=_('Width of a slideshow container or a lightbox.')) height = models.SmallIntegerField(null=True, blank=True, verbose_name=_('Container height'), help_text=_('Height of a slideshow container or a lightbox.')) thumbnail_width = models.SmallIntegerField(null=True, blank=True, verbose_name=_('Thumbnail width'), help_text=_('Width of a thumbnail.')) thumbnail_height = models.SmallIntegerField(null=True, blank=True, verbose_name=_('Thumbnail height'), help_text=_('Height of a thumbnail.')) autoplay = models.BooleanField(default=False, verbose_name=_('Autoplay'), help_text=_('If set slideshow will start automatically.')) user = None album = None type = 'photos' def copy_relations(self, oldinstance): # FIXME: remove after this issue is resolved: https://github.com/divio/django-cms/issues/1723 super(Picasa, self).copy_relations(oldinstance) def _regex_id(self, url=None): if not url: url = self.url id = None try: # check for album (picasaweb style) match = re.search('(\/user\/)([\d\w.]+)(\/albumid\/)(\d+)', url) id = match.group(4) self.album = id self.user = match.group(2) return id except AttributeError: pass try: # check for album (google+ style) match = re.search('(\/photos\/)([\d\w.]+)(\/albums\/)(\d+)', url) id = match.group(4) self.album = id self.user = match.group(2) return id except AttributeError: pass try: # check for user id = re.search('(\/photos\/)([\d\w.]+)(\/)', url).group(2) self.user = id if not self.album: self.type = 'albums' return id except AttributeError: raise goscale_models.WrongAttribute(attribute='url') return id def _get_entry_link(self, entry): """ Returns a unique link for an entry """ entry_link = None for link in entry.link: if '/data/' not in link.href and '/lh/' not in link.href: entry_link = link.href break return entry_link or entry.link[0].href def _get_data(self, album_id=None): data = [] id = self._regex_id() if self.type == 'photos': # type photo with album_id album_id = album_id or self.album data = self._get_picasa_photos(album_id) # elif type == 'photos': # # type photo without album_id # for album_id in self.params.get('albums', None): # data.extend(self._get_picasa_photos(album_id)) elif self.type == 'albums': # type album or all: (in either case, we need to get the albums.) albums = self._get_picasa_albums() # data.extend(albums) print 'user: %s' % self.user for album in albums: print 'album: %s' % album.gphoto_id.text data.extend(self._get_picasa_photos(album.gphoto_id.text)) return data def _store_post(self, stored_entry, entry=None): """ This method formats entry returned by _get_data() and puts to DB create textDesc, title, and MIME """ stored_entry.content_type = self.type if 1==1 or self.type == 'photos': # ignore type for now # is photo width = int(entry.width.text) height = int(entry.height.text) orientation = 'landscape' if width > height else 'portrait' max = width if width > height else height thumbnails = {} sizes = ['72', '144', '288', '480', '512', '800', '1200', '1600'] for thumb in entry.media.thumbnail: size = thumb.width if width > height else thumb.height thumbnails['s%s' % size] = { 'url': thumb.url, 'height': thumb.height, 'width': thumb.width } for size in sizes[3:]: big = size small = str(int(int(size)/(float(72)/float(thumbnails['s72']['height' if width > height else 'width'])))) thumbnails['s%s' % size] = { 'url': thumbnails['s72']['url'].replace('s72', 's%s' % size), 'height': big if height > width else small, 'width': small if height > width else big } url = thumbnails['s144']['url'].replace('s144', 's%s' % max) stored_entry.link = self._get_entry_link(entry) summarytext = '' if entry.summary.text != None: summarytext = unicode(entry.summary.text, "utf-8") stored_entry.summary = summarytext stored_entry.title = unicode(entry.title.text, "utf-8") stored_entry.categories = 'photos' stored_entry.published = datetime.datetime.strptime(entry.published.text, '%Y-%m-%dT%H:%M:%S.%fZ') stored_entry.updated = datetime.datetime.strptime(entry.updated.text, '%Y-%m-%dT%H:%M:%S.%fZ') stored_entry.author = entry.albumid.text stored_entry.attributes = simplejson.dumps({ 'id': entry.gphoto_id.text, 'thumbnails': thumbnails, 'url': url, 'width': str(width), 'height': str(height), 'orientation': orientation }) # elif self.type == 'albums': # # is albums # summarytext = '' # if entry.summary.text != None: # summarytext = unicode(entry.summary.text, "utf-8") # itemLink = entry.link[1].href # stored_entry.name = itemLink[itemLink.rfind('/')+1:] # stored_entry.categories = 'albums' # stored_entry.title = unicode(entry.title.text, "utf-8") # stored_entry.published = datetime.datetime.strptime(entry.published.text, '%Y-%m-%dT%H:%M:%S.%fZ') # stored_entry.updated = datetime.datetime.strptime(entry.updated.text, '%Y-%m-%dT%H:%M:%S.%fZ') # stored_entry.author = entry.user.text # stored_entry.summary = summarytext # stored_entry.attributes = simplejson.dumps({ # 'id': entry.gphoto_id.text, # 'thumbnail': entry.media.thumbnail[0].url, # }) return super(Picasa, self)._store_post(stored_entry) @decorators.cache_func def _get_picasa_albums(self): # get albums by user try: result = [] gd_client = service.PhotosService() albums = gd_client.GetUserFeed(user=self.user) for album in albums.entry: result.append(album) return result except: raise return [] @decorators.cache_func def _get_picasa_photos(self, album_id): # get photos by album_id try: gd_client = service.PhotosService() photos = gd_client.GetFeed('/data/feed/api/user/%s/albumid/%s?kind=photo' % (self.user, album_id)) return photos.entry except: raise return [] #signals.post_save.connect(goscale_models.update_posts, sender=Picasa)
bsd-3-clause
martinbuc/missionplanner
packages/IronPython.StdLib.2.7.4/content/Lib/profile.py
50
24085
#! /usr/bin/env python # # Class for profiling python code. rev 1.0 6/2/94 # # Based on prior profile module by Sjoerd Mullender... # which was hacked somewhat by: Guido van Rossum """Class for profiling Python code.""" # Copyright 1994, by InfoSeek Corporation, all rights reserved. # Written by James Roskind # # Permission to use, copy, modify, and distribute this Python software # and its associated documentation for any purpose (subject to the # restriction in the following sentence) without fee is hereby granted, # provided that the above copyright notice appears in all copies, and # that both that copyright notice and this permission notice appear in # supporting documentation, and that the name of InfoSeek not be used in # advertising or publicity pertaining to distribution of the software # without specific, written prior permission. This permission is # explicitly restricted to the copying and modification of the software # to remain in Python, compiled Python, or other languages (such as C) # wherein the modified or derived code is exclusively imported into a # Python module. # # INFOSEEK CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS # SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND # FITNESS. IN NO EVENT SHALL INFOSEEK CORPORATION BE LIABLE FOR ANY # SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER # RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF # CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import sys import os import time import marshal from optparse import OptionParser __all__ = ["run", "runctx", "help", "Profile"] # Sample timer for use with #i_count = 0 #def integer_timer(): # global i_count # i_count = i_count + 1 # return i_count #itimes = integer_timer # replace with C coded timer returning integers #************************************************************************** # The following are the static member functions for the profiler class # Note that an instance of Profile() is *not* needed to call them. #************************************************************************** def run(statement, filename=None, sort=-1): """Run statement under profiler optionally saving results in filename This function takes a single argument that can be passed to the "exec" statement, and an optional file name. In all cases this routine attempts to "exec" its first argument and gather profiling statistics from the execution. If no file name is present, then this function automatically prints a simple profiling report, sorted by the standard name string (file/line/function-name) that is presented in each line. """ prof = Profile() try: prof = prof.run(statement) except SystemExit: pass if filename is not None: prof.dump_stats(filename) else: return prof.print_stats(sort) def runctx(statement, globals, locals, filename=None, sort=-1): """Run statement under profiler, supplying your own globals and locals, optionally saving results in filename. statement and filename have the same semantics as profile.run """ prof = Profile() try: prof = prof.runctx(statement, globals, locals) except SystemExit: pass if filename is not None: prof.dump_stats(filename) else: return prof.print_stats(sort) # Backwards compatibility. def help(): print "Documentation for the profile module can be found " print "in the Python Library Reference, section 'The Python Profiler'." if hasattr(os, "times"): def _get_time_times(timer=os.times): t = timer() return t[0] + t[1] # Using getrusage(3) is better than clock(3) if available: # on some systems (e.g. FreeBSD), getrusage has a higher resolution # Furthermore, on a POSIX system, returns microseconds, which # wrap around after 36min. _has_res = 0 try: import resource resgetrusage = lambda: resource.getrusage(resource.RUSAGE_SELF) def _get_time_resource(timer=resgetrusage): t = timer() return t[0] + t[1] _has_res = 1 except ImportError: pass class Profile: """Profiler class. self.cur is always a tuple. Each such tuple corresponds to a stack frame that is currently active (self.cur[-2]). The following are the definitions of its members. We use this external "parallel stack" to avoid contaminating the program that we are profiling. (old profiler used to write into the frames local dictionary!!) Derived classes can change the definition of some entries, as long as they leave [-2:] intact (frame and previous tuple). In case an internal error is detected, the -3 element is used as the function name. [ 0] = Time that needs to be charged to the parent frame's function. It is used so that a function call will not have to access the timing data for the parent frame. [ 1] = Total time spent in this frame's function, excluding time in subfunctions (this latter is tallied in cur[2]). [ 2] = Total time spent in subfunctions, excluding time executing the frame's function (this latter is tallied in cur[1]). [-3] = Name of the function that corresponds to this frame. [-2] = Actual frame that we correspond to (used to sync exception handling). [-1] = Our parent 6-tuple (corresponds to frame.f_back). Timing data for each function is stored as a 5-tuple in the dictionary self.timings[]. The index is always the name stored in self.cur[-3]. The following are the definitions of the members: [0] = The number of times this function was called, not counting direct or indirect recursion, [1] = Number of times this function appears on the stack, minus one [2] = Total time spent internal to this function [3] = Cumulative time that this function was present on the stack. In non-recursive functions, this is the total execution time from start to finish of each invocation of a function, including time spent in all subfunctions. [4] = A dictionary indicating for each function name, the number of times it was called by us. """ bias = 0 # calibration constant def __init__(self, timer=None, bias=None): self.timings = {} self.cur = None self.cmd = "" self.c_func_name = "" if bias is None: bias = self.bias self.bias = bias # Materialize in local dict for lookup speed. if not timer: if _has_res: self.timer = resgetrusage self.dispatcher = self.trace_dispatch self.get_time = _get_time_resource elif hasattr(time, 'clock'): self.timer = self.get_time = time.clock self.dispatcher = self.trace_dispatch_i elif hasattr(os, 'times'): self.timer = os.times self.dispatcher = self.trace_dispatch self.get_time = _get_time_times else: self.timer = self.get_time = time.time self.dispatcher = self.trace_dispatch_i else: self.timer = timer t = self.timer() # test out timer function try: length = len(t) except TypeError: self.get_time = timer self.dispatcher = self.trace_dispatch_i else: if length == 2: self.dispatcher = self.trace_dispatch else: self.dispatcher = self.trace_dispatch_l # This get_time() implementation needs to be defined # here to capture the passed-in timer in the parameter # list (for performance). Note that we can't assume # the timer() result contains two values in all # cases. def get_time_timer(timer=timer, sum=sum): return sum(timer()) self.get_time = get_time_timer self.t = self.get_time() self.simulate_call('profiler') # Heavily optimized dispatch routine for os.times() timer def trace_dispatch(self, frame, event, arg): timer = self.timer t = timer() t = t[0] + t[1] - self.t - self.bias if event == "c_call": self.c_func_name = arg.__name__ if self.dispatch[event](self, frame,t): t = timer() self.t = t[0] + t[1] else: r = timer() self.t = r[0] + r[1] - t # put back unrecorded delta # Dispatch routine for best timer program (return = scalar, fastest if # an integer but float works too -- and time.clock() relies on that). def trace_dispatch_i(self, frame, event, arg): timer = self.timer t = timer() - self.t - self.bias if event == "c_call": self.c_func_name = arg.__name__ if self.dispatch[event](self, frame, t): self.t = timer() else: self.t = timer() - t # put back unrecorded delta # Dispatch routine for macintosh (timer returns time in ticks of # 1/60th second) def trace_dispatch_mac(self, frame, event, arg): timer = self.timer t = timer()/60.0 - self.t - self.bias if event == "c_call": self.c_func_name = arg.__name__ if self.dispatch[event](self, frame, t): self.t = timer()/60.0 else: self.t = timer()/60.0 - t # put back unrecorded delta # SLOW generic dispatch routine for timer returning lists of numbers def trace_dispatch_l(self, frame, event, arg): get_time = self.get_time t = get_time() - self.t - self.bias if event == "c_call": self.c_func_name = arg.__name__ if self.dispatch[event](self, frame, t): self.t = get_time() else: self.t = get_time() - t # put back unrecorded delta # In the event handlers, the first 3 elements of self.cur are unpacked # into vrbls w/ 3-letter names. The last two characters are meant to be # mnemonic: # _pt self.cur[0] "parent time" time to be charged to parent frame # _it self.cur[1] "internal time" time spent directly in the function # _et self.cur[2] "external time" time spent in subfunctions def trace_dispatch_exception(self, frame, t): rpt, rit, ret, rfn, rframe, rcur = self.cur if (rframe is not frame) and rcur: return self.trace_dispatch_return(rframe, t) self.cur = rpt, rit+t, ret, rfn, rframe, rcur return 1 def trace_dispatch_call(self, frame, t): if self.cur and frame.f_back is not self.cur[-2]: rpt, rit, ret, rfn, rframe, rcur = self.cur if not isinstance(rframe, Profile.fake_frame): assert rframe.f_back is frame.f_back, ("Bad call", rfn, rframe, rframe.f_back, frame, frame.f_back) self.trace_dispatch_return(rframe, 0) assert (self.cur is None or \ frame.f_back is self.cur[-2]), ("Bad call", self.cur[-3]) fcode = frame.f_code fn = (fcode.co_filename, fcode.co_firstlineno, fcode.co_name) self.cur = (t, 0, 0, fn, frame, self.cur) timings = self.timings if fn in timings: cc, ns, tt, ct, callers = timings[fn] timings[fn] = cc, ns + 1, tt, ct, callers else: timings[fn] = 0, 0, 0, 0, {} return 1 def trace_dispatch_c_call (self, frame, t): fn = ("", 0, self.c_func_name) self.cur = (t, 0, 0, fn, frame, self.cur) timings = self.timings if fn in timings: cc, ns, tt, ct, callers = timings[fn] timings[fn] = cc, ns+1, tt, ct, callers else: timings[fn] = 0, 0, 0, 0, {} return 1 def trace_dispatch_return(self, frame, t): if frame is not self.cur[-2]: assert frame is self.cur[-2].f_back, ("Bad return", self.cur[-3]) self.trace_dispatch_return(self.cur[-2], 0) # Prefix "r" means part of the Returning or exiting frame. # Prefix "p" means part of the Previous or Parent or older frame. rpt, rit, ret, rfn, frame, rcur = self.cur rit = rit + t frame_total = rit + ret ppt, pit, pet, pfn, pframe, pcur = rcur self.cur = ppt, pit + rpt, pet + frame_total, pfn, pframe, pcur timings = self.timings cc, ns, tt, ct, callers = timings[rfn] if not ns: # This is the only occurrence of the function on the stack. # Else this is a (directly or indirectly) recursive call, and # its cumulative time will get updated when the topmost call to # it returns. ct = ct + frame_total cc = cc + 1 if pfn in callers: callers[pfn] = callers[pfn] + 1 # hack: gather more # stats such as the amount of time added to ct courtesy # of this specific call, and the contribution to cc # courtesy of this call. else: callers[pfn] = 1 timings[rfn] = cc, ns - 1, tt + rit, ct, callers return 1 dispatch = { "call": trace_dispatch_call, "exception": trace_dispatch_exception, "return": trace_dispatch_return, "c_call": trace_dispatch_c_call, "c_exception": trace_dispatch_return, # the C function returned "c_return": trace_dispatch_return, } # The next few functions play with self.cmd. By carefully preloading # our parallel stack, we can force the profiled result to include # an arbitrary string as the name of the calling function. # We use self.cmd as that string, and the resulting stats look # very nice :-). def set_cmd(self, cmd): if self.cur[-1]: return # already set self.cmd = cmd self.simulate_call(cmd) class fake_code: def __init__(self, filename, line, name): self.co_filename = filename self.co_line = line self.co_name = name self.co_firstlineno = 0 def __repr__(self): return repr((self.co_filename, self.co_line, self.co_name)) class fake_frame: def __init__(self, code, prior): self.f_code = code self.f_back = prior def simulate_call(self, name): code = self.fake_code('profile', 0, name) if self.cur: pframe = self.cur[-2] else: pframe = None frame = self.fake_frame(code, pframe) self.dispatch['call'](self, frame, 0) # collect stats from pending stack, including getting final # timings for self.cmd frame. def simulate_cmd_complete(self): get_time = self.get_time t = get_time() - self.t while self.cur[-1]: # We *can* cause assertion errors here if # dispatch_trace_return checks for a frame match! self.dispatch['return'](self, self.cur[-2], t) t = 0 self.t = get_time() - t def print_stats(self, sort=-1): import pstats pstats.Stats(self).strip_dirs().sort_stats(sort). \ print_stats() def dump_stats(self, file): f = open(file, 'wb') self.create_stats() marshal.dump(self.stats, f) f.close() def create_stats(self): self.simulate_cmd_complete() self.snapshot_stats() def snapshot_stats(self): self.stats = {} for func, (cc, ns, tt, ct, callers) in self.timings.iteritems(): callers = callers.copy() nc = 0 for callcnt in callers.itervalues(): nc += callcnt self.stats[func] = cc, nc, tt, ct, callers # The following two methods can be called by clients to use # a profiler to profile a statement, given as a string. def run(self, cmd): import __main__ dict = __main__.__dict__ return self.runctx(cmd, dict, dict) def runctx(self, cmd, globals, locals): self.set_cmd(cmd) sys.setprofile(self.dispatcher) try: exec cmd in globals, locals finally: sys.setprofile(None) return self # This method is more useful to profile a single function call. def runcall(self, func, *args, **kw): self.set_cmd(repr(func)) sys.setprofile(self.dispatcher) try: return func(*args, **kw) finally: sys.setprofile(None) #****************************************************************** # The following calculates the overhead for using a profiler. The # problem is that it takes a fair amount of time for the profiler # to stop the stopwatch (from the time it receives an event). # Similarly, there is a delay from the time that the profiler # re-starts the stopwatch before the user's code really gets to # continue. The following code tries to measure the difference on # a per-event basis. # # Note that this difference is only significant if there are a lot of # events, and relatively little user code per event. For example, # code with small functions will typically benefit from having the # profiler calibrated for the current platform. This *could* be # done on the fly during init() time, but it is not worth the # effort. Also note that if too large a value specified, then # execution time on some functions will actually appear as a # negative number. It is *normal* for some functions (with very # low call counts) to have such negative stats, even if the # calibration figure is "correct." # # One alternative to profile-time calibration adjustments (i.e., # adding in the magic little delta during each event) is to track # more carefully the number of events (and cumulatively, the number # of events during sub functions) that are seen. If this were # done, then the arithmetic could be done after the fact (i.e., at # display time). Currently, we track only call/return events. # These values can be deduced by examining the callees and callers # vectors for each functions. Hence we *can* almost correct the # internal time figure at print time (note that we currently don't # track exception event processing counts). Unfortunately, there # is currently no similar information for cumulative sub-function # time. It would not be hard to "get all this info" at profiler # time. Specifically, we would have to extend the tuples to keep # counts of this in each frame, and then extend the defs of timing # tuples to include the significant two figures. I'm a bit fearful # that this additional feature will slow the heavily optimized # event/time ratio (i.e., the profiler would run slower, fur a very # low "value added" feature.) #************************************************************** def calibrate(self, m, verbose=0): if self.__class__ is not Profile: raise TypeError("Subclasses must override .calibrate().") saved_bias = self.bias self.bias = 0 try: return self._calibrate_inner(m, verbose) finally: self.bias = saved_bias def _calibrate_inner(self, m, verbose): get_time = self.get_time # Set up a test case to be run with and without profiling. Include # lots of calls, because we're trying to quantify stopwatch overhead. # Do not raise any exceptions, though, because we want to know # exactly how many profile events are generated (one call event, + # one return event, per Python-level call). def f1(n): for i in range(n): x = 1 def f(m, f1=f1): for i in range(m): f1(100) f(m) # warm up the cache # elapsed_noprofile <- time f(m) takes without profiling. t0 = get_time() f(m) t1 = get_time() elapsed_noprofile = t1 - t0 if verbose: print "elapsed time without profiling =", elapsed_noprofile # elapsed_profile <- time f(m) takes with profiling. The difference # is profiling overhead, only some of which the profiler subtracts # out on its own. p = Profile() t0 = get_time() p.runctx('f(m)', globals(), locals()) t1 = get_time() elapsed_profile = t1 - t0 if verbose: print "elapsed time with profiling =", elapsed_profile # reported_time <- "CPU seconds" the profiler charged to f and f1. total_calls = 0.0 reported_time = 0.0 for (filename, line, funcname), (cc, ns, tt, ct, callers) in \ p.timings.items(): if funcname in ("f", "f1"): total_calls += cc reported_time += tt if verbose: print "'CPU seconds' profiler reported =", reported_time print "total # calls =", total_calls if total_calls != m + 1: raise ValueError("internal error: total calls = %d" % total_calls) # reported_time - elapsed_noprofile = overhead the profiler wasn't # able to measure. Divide by twice the number of calls (since there # are two profiler events per call in this test) to get the hidden # overhead per event. mean = (reported_time - elapsed_noprofile) / 2.0 / total_calls if verbose: print "mean stopwatch overhead per profile event =", mean return mean #**************************************************************************** def Stats(*args): print 'Report generating functions are in the "pstats" module\a' def main(): usage = "profile.py [-o output_file_path] [-s sort] scriptfile [arg] ..." parser = OptionParser(usage=usage) parser.allow_interspersed_args = False parser.add_option('-o', '--outfile', dest="outfile", help="Save stats to <outfile>", default=None) parser.add_option('-s', '--sort', dest="sort", help="Sort order when printing to stdout, based on pstats.Stats class", default=-1) if not sys.argv[1:]: parser.print_usage() sys.exit(2) (options, args) = parser.parse_args() sys.argv[:] = args if len(args) > 0: progname = args[0] sys.path.insert(0, os.path.dirname(progname)) with open(progname, 'rb') as fp: code = compile(fp.read(), progname, 'exec') globs = { '__file__': progname, '__name__': '__main__', '__package__': None, } runctx(code, globs, None, options.outfile, options.sort) else: parser.print_usage() return parser # When invoked as main program, invoke the profiler on a script if __name__ == '__main__': main()
gpl-3.0
mhaberler/Canfestival-3
objdictgen/networkeditortemplate.py
11
5400
import wx from nodeeditortemplate import NodeEditorTemplate from subindextable import * from commondialogs import * [ID_NETWORKEDITNETWORKNODES, ] = [wx.NewId() for _init_ctrls in range(1)] class NetworkEditorTemplate(NodeEditorTemplate): def _init_ctrls(self, prnt): self.NetworkNodes = wx.Notebook(id=ID_NETWORKEDITNETWORKNODES, name='NetworkNodes', parent=prnt, pos=wx.Point(0, 0), size=wx.Size(0, 0), style=wx.NB_LEFT) self.NetworkNodes.Bind(wx.EVT_NOTEBOOK_PAGE_CHANGED, self.OnNodeSelectedChanged, id=ID_NETWORKEDITNETWORKNODES) def __init__(self, manager, frame, mode_solo): self.NodeList = manager NodeEditorTemplate.__init__(self, self.NodeList.GetManager(), frame, mode_solo) def GetCurrentNodeId(self): selected = self.NetworkNodes.GetSelection() # At init selected = -1 if selected > 0: window = self.NetworkNodes.GetPage(selected) return window.GetIndex() else: return 0 def RefreshCurrentIndexList(self): selected = self.NetworkNodes.GetSelection() if selected == 0: window = self.NetworkNodes.GetPage(selected) window.RefreshIndexList() else: pass def RefreshNetworkNodes(self): if self.NetworkNodes.GetPageCount() > 0: self.NetworkNodes.DeleteAllPages() if self.NodeList: new_editingpanel = EditingPanel(self.NetworkNodes, self, self.Manager) new_editingpanel.SetIndex(self.Manager.GetCurrentNodeID()) self.NetworkNodes.AddPage(new_editingpanel, "") for idx in self.NodeList.GetSlaveIDs(): new_editingpanel = EditingPanel(self.NetworkNodes, self, self.NodeList, False) new_editingpanel.SetIndex(idx) self.NetworkNodes.AddPage(new_editingpanel, "") def OnNodeSelectedChanged(self, event): if not self.Closing: selected = event.GetSelection() # At init selected = -1 if selected >= 0: window = self.NetworkNodes.GetPage(selected) self.NodeList.SetCurrentSelected(window.GetIndex()) wx.CallAfter(self.RefreshMainMenu) wx.CallAfter(self.RefreshStatusBar) event.Skip() #------------------------------------------------------------------------------- # Buffer Functions #------------------------------------------------------------------------------- def RefreshBufferState(self): if self.NodeList is not None: nodeID = self.Manager.GetCurrentNodeID() if nodeID != None: nodename = "0x%2.2X %s"%(nodeID, self.Manager.GetCurrentNodeName()) else: nodename = self.Manager.GetCurrentNodeName() self.NetworkNodes.SetPageText(0, nodename) for idx, name in enumerate(self.NodeList.GetSlaveNames()): self.NetworkNodes.SetPageText(idx + 1, name) #------------------------------------------------------------------------------- # Slave Nodes Management #------------------------------------------------------------------------------- def OnAddSlaveMenu(self, event): dialog = AddSlaveDialog(self.Frame) dialog.SetNodeList(self.NodeList) if dialog.ShowModal() == wx.ID_OK: values = dialog.GetValues() result = self.NodeList.AddSlaveNode(values["slaveName"], values["slaveNodeID"], values["edsFile"]) if not result: new_editingpanel = EditingPanel(self.NetworkNodes, self, self.NodeList, False) new_editingpanel.SetIndex(values["slaveNodeID"]) idx = self.NodeList.GetOrderNumber(values["slaveNodeID"]) self.NetworkNodes.InsertPage(idx, new_editingpanel, "") self.NodeList.SetCurrentSelected(idx) self.NetworkNodes.SetSelection(idx) self.RefreshBufferState() else: self.ShowErrorMessage(result) dialog.Destroy() def OnRemoveSlaveMenu(self, event): slavenames = self.NodeList.GetSlaveNames() slaveids = self.NodeList.GetSlaveIDs() dialog = wx.SingleChoiceDialog(self.Frame, _("Choose a slave to remove"), _("Remove slave"), slavenames) if dialog.ShowModal() == wx.ID_OK: choice = dialog.GetSelection() result = self.NodeList.RemoveSlaveNode(slaveids[choice]) if not result: slaveids.pop(choice) current = self.NetworkNodes.GetSelection() self.NetworkNodes.DeletePage(choice + 1) if self.NetworkNodes.GetPageCount() > 0: new_selection = min(current, self.NetworkNodes.GetPageCount() - 1) self.NetworkNodes.SetSelection(new_selection) if new_selection > 0: self.NodeList.SetCurrentSelected(slaveids[new_selection - 1]) self.RefreshBufferState() else: self.ShowErrorMessage(result) dialog.Destroy() def OpenMasterDCFDialog(self, node_id): self.NetworkNodes.SetSelection(0) self.NetworkNodes.GetPage(0).OpenDCFDialog(node_id)
lgpl-2.1
meredith-digops/ansible
lib/ansible/modules/cloud/azure/azure_rm_virtualnetwork.py
56
13795
#!/usr/bin/python # # Copyright (c) 2016 Matt Davis, <mdavis@ansible.com> # Chris Houseknecht, <house@redhat.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'curated'} DOCUMENTATION = ''' --- module: azure_rm_virtualnetwork version_added: "2.1" short_description: Manage Azure virtual networks. description: - Create, update or delete a virtual networks. Allows setting and updating the available IPv4 address ranges and setting custom DNS servers. Use the azure_rm_subnet module to associate subnets with a virtual network. options: resource_group: description: - name of resource group. required: true address_prefixes_cidr: description: - List of IPv4 address ranges where each is formatted using CIDR notation. Required when creating a new virtual network or using purge_address_prefixes. aliases: - address_prefixes default: null required: false dns_servers: description: - Custom list of DNS servers. Maximum length of two. The first server in the list will be treated as the Primary server. This is an explicit list. Existing DNS servers will be replaced with the specified list. Use the purge_dns_servers option to remove all custom DNS servers and revert to default Azure servers. default: null required: false location: description: - Valid azure location. Defaults to location of the resource group. default: resource_group location required: false name: description: - name of the virtual network. required: true purge_address_prefixes: description: - Use with state present to remove any existing address_prefixes. default: false purge_dns_servers: description: - Use with state present to remove existing DNS servers, reverting to default Azure servers. Mutually exclusive with dns_servers. default: false required: false state: description: - Assert the state of the virtual network. Use 'present' to create or update and 'absent' to delete. default: present choices: - absent - present required: false extends_documentation_fragment: - azure - azure_tags author: - "Chris Houseknecht (@chouseknecht)" - "Matt Davis (@nitzmahone)" ''' EXAMPLES = ''' - name: Create a virtual network azure_rm_virtualnetwork: name: foobar resource_group: Testing address_prefixes_cidr: - "10.1.0.0/16" - "172.100.0.0/16" dns_servers: - "127.0.0.1" - "127.0.0.2" tags: testing: testing delete: on-exit - name: Delete a virtual network azure_rm_virtualnetwork: name: foobar resource_group: Testing state: absent ''' RETURN = ''' state: description: Current state of the virtual network. returned: always type: dict sample: { "address_prefixes": [ "10.1.0.0/16", "172.100.0.0/16" ], "dns_servers": [ "127.0.0.1", "127.0.0.3" ], "etag": 'W/"0712e87c-f02f-4bb3-8b9e-2da0390a3886"', "id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/virtualNetworks/my_test_network", "location": "eastus", "name": "my_test_network", "provisioning_state": "Succeeded", "tags": null, "type": "Microsoft.Network/virtualNetworks" } ''' from ansible.module_utils.basic import * from ansible.module_utils.azure_rm_common import * try: from msrestazure.azure_exceptions import CloudError from azure.mgmt.network.models import VirtualNetwork, AddressSpace, DhcpOptions except ImportError: # This is handled in azure_rm_common pass def virtual_network_to_dict(vnet): ''' Convert a virtual network object to a dict. :param vnet: VirtualNet object :return: dict ''' results = dict( id=vnet.id, name=vnet.name, location=vnet.location, type=vnet.type, tags=vnet.tags, provisioning_state=vnet.provisioning_state, etag=vnet.etag ) if vnet.dhcp_options and len(vnet.dhcp_options.dns_servers) > 0: results['dns_servers'] = [] for server in vnet.dhcp_options.dns_servers: results['dns_servers'].append(server) if vnet.address_space and len(vnet.address_space.address_prefixes) > 0: results['address_prefixes'] = [] for space in vnet.address_space.address_prefixes: results['address_prefixes'].append(space) return results class AzureRMVirtualNetwork(AzureRMModuleBase): def __init__(self): self.module_arg_spec = dict( resource_group=dict(type='str', required=True), name=dict(type='str', required=True), state=dict(type='str', default='present', choices=['present', 'absent']), location=dict(type='str'), address_prefixes_cidr=dict(type='list', aliases=['address_prefixes']), dns_servers=dict(type='list',), purge_address_prefixes=dict(type='bool', default=False, aliases=['purge']), purge_dns_servers=dict(type='bool', default=False), ) mutually_exclusive = [ ('dns_servers', 'purge_dns_servers') ] required_if = [ ('purge_address_prefixes', True, ['address_prefixes_cidr']) ] self.resource_group = None self.name = None self.state = None self.location = None self.address_prefixes_cidr = None self.purge_address_prefixes = None self.dns_servers = None self.purge_dns_servers = None self.results=dict( changed=False, state=dict() ) super(AzureRMVirtualNetwork, self).__init__(self.module_arg_spec, mutually_exclusive=mutually_exclusive, required_if=required_if, supports_check_mode=True) def exec_module(self, **kwargs): for key in self.module_arg_spec.keys() + ['tags']: setattr(self, key, kwargs[key]) self.results['check_mode'] = self.check_mode resource_group = self.get_resource_group(self.resource_group) if not self.location: # Set default location self.location = resource_group.location if self.state == 'present' and self.purge_address_prefixes: for prefix in self.address_prefixes_cidr: if not CIDR_PATTERN.match(prefix): self.fail("Parameter error: invalid address prefix value {0}".format(prefix)) if self.dns_servers and len(self.dns_servers) > 2: self.fail("Parameter error: You can provide a maximum of 2 DNS servers.") changed = False results = dict() try: self.log('Fetching vnet {0}'.format(self.name)) vnet = self.network_client.virtual_networks.get(self.resource_group, self.name) results = virtual_network_to_dict(vnet) self.log('Vnet exists {0}'.format(self.name)) self.log(results, pretty_print=True) self.check_provisioning_state(vnet, self.state) if self.state == 'present': if self.address_prefixes_cidr: existing_address_prefix_set = set(vnet.address_space.address_prefixes) requested_address_prefix_set = set(self.address_prefixes_cidr) missing_prefixes = requested_address_prefix_set - existing_address_prefix_set extra_prefixes = existing_address_prefix_set - requested_address_prefix_set if len(missing_prefixes) > 0: self.log('CHANGED: there are missing address_prefixes') changed = True if not self.purge_address_prefixes: # add the missing prefixes for prefix in missing_prefixes: results['address_prefixes'].append(prefix) if len(extra_prefixes) > 0 and self.purge_address_prefixes: self.log('CHANGED: there are address_prefixes to purge') changed = True # replace existing address prefixes with requested set results['address_prefixes'] = self.address_prefixes_cidr update_tags, results['tags'] = self.update_tags(results['tags']) if update_tags: changed = True if self.dns_servers: existing_dns_set = set(vnet.dhcp_options.dns_servers) requested_dns_set = set(self.dns_servers) if existing_dns_set != requested_dns_set: self.log('CHANGED: replacing DNS servers') changed = True results['dns_servers'] = self.dns_servers if self.purge_dns_servers and vnet.dhcp_options and len(vnet.dhcp_options.dns_servers) > 0: self.log('CHANGED: purging existing DNS servers') changed = True results['dns_servers'] = [] elif self.state == 'absent': self.log("CHANGED: vnet exists but requested state is 'absent'") changed = True except CloudError: self.log('Vnet {0} does not exist'.format(self.name)) if self.state == 'present': self.log("CHANGED: vnet {0} does not exist but requested state is 'present'".format(self.name)) changed = True self.results['changed'] = changed self.results['state'] = results if self.check_mode: return self.results if changed: if self.state == 'present': if not results: # create a new virtual network self.log("Create virtual network {0}".format(self.name)) if not self.address_prefixes_cidr: self.fail('Parameter error: address_prefixes_cidr required when creating a virtual network') vnet = VirtualNetwork( location=self.location, address_space=AddressSpace( address_prefixes=self.address_prefixes_cidr ) ) if self.dns_servers: vnet.dhcp_options = DhcpOptions( dns_servers=self.dns_servers ) if self.tags: vnet.tags = self.tags self.results['state'] = self.create_or_update_vnet(vnet) else: # update existing virtual network self.log("Update virtual network {0}".format(self.name)) vnet = VirtualNetwork( location=results['location'], address_space=AddressSpace( address_prefixes=results['address_prefixes'] ), tags=results['tags'] ) if results.get('dns_servers'): vnet.dhcp_options = DhcpOptions( dns_servers=results['dns_servers'] ) self.results['state'] = self.create_or_update_vnet(vnet) elif self.state == 'absent': self.delete_virtual_network() self.results['state']['status'] = 'Deleted' return self.results def create_or_update_vnet(self, vnet): try: poller = self.network_client.virtual_networks.create_or_update(self.resource_group, self.name, vnet) new_vnet = self.get_poller_result(poller) except Exception as exc: self.fail("Error creating or updating virtual network {0} - {1}".format(self.name, str(exc))) return virtual_network_to_dict(new_vnet) def delete_virtual_network(self): try: poller = self.network_client.virtual_networks.delete(self.resource_group, self.name) result = self.get_poller_result(poller) except Exception as exc: self.fail("Error deleting virtual network {0} - {1}".format(self.name, str(exc))) return result def main(): AzureRMVirtualNetwork() if __name__ == '__main__': main()
gpl-3.0
xiaoxq/apollo
modules/tools/mapshow/libs/subplot_traj_speed.py
3
3099
#!/usr/bin/env python3 ############################################################################### # Copyright 2017 The Apollo Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################### from matplotlib import cm as cmx from matplotlib import colors as mcolors import matplotlib.pyplot as plt class TrajSpeedSubplot: def __init__(self, ax): self.ax = ax self.speed_lines = [] self.speed_lines_size = 30 self.colors = [] self.init_colors() # self.colors = ['b','r', 'y', 'k'] for i in range(self.speed_lines_size): line, = ax.plot( [0], [0], c=self.colors[i % len(self.colors)], ls="-", marker='', lw=3, alpha=0.8) self.speed_lines.append(line) ax.set_xlabel("t (second)") # ax.set_xlim([-2, 10]) ax.set_ylim([-1, 25]) self.ax.autoscale_view() # self.ax.relim() ax.set_ylabel("speed (m/s)") ax.set_title("PLANNING SPEED") self.set_visible(False) def init_colors(self): self.colors = [] values = list(range(self.speed_lines_size)) jet = plt.get_cmap('brg') color_norm = mcolors.Normalize(vmin=0, vmax=values[-1]) scalar_map = cmx.ScalarMappable(norm=color_norm, cmap=jet) for val in values: color_val = scalar_map.to_rgba(val) self.colors.append(color_val) def set_visible(self, visible): for line in self.speed_lines: line.set_visible(visible) def show(self, planning): planning.traj_data_lock.acquire() for i in range(len(planning.traj_speed_t_history)): if i >= self.speed_lines_size: print("WARNING: number of path lines is more than " + str(self.speed_lines_size)) continue speed_line = self.speed_lines[self.speed_lines_size - i - 1] speed_line.set_xdata(planning.traj_speed_t_history[i]) speed_line.set_ydata(planning.traj_speed_v_history[i]) # speed_line.set_xdata([1,2,3,4]) # speed_line.set_ydata([1,2,3,4]) # speed_line.set_label(name[0:5]) speed_line.set_visible(True) # self.ax.legend(loc="upper left", borderaxespad=0., ncol=5) # self.ax.axis('equal') planning.traj_data_lock.release() self.ax.autoscale_view() self.ax.relim()
apache-2.0
lmprice/ansible
lib/ansible/modules/cloud/vmware/vmware_portgroup.py
10
20821
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2015, Joseph Callen <jcallen () csc.com> # Copyright: (c) 2017-18, Ansible Project # Copyright: (c) 2017-18, Abhijeet Kasurde <akasurde@redhat.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = { 'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community' } DOCUMENTATION = ''' --- module: vmware_portgroup short_description: Create a VMware portgroup description: - Create a VMware portgroup on given host/s or hosts of given cluster version_added: 2.0 author: - Joseph Callen (@jcpowermac) - Russell Teague (@mtnbikenc) - Abhijeet Kasurde (@Akasurde) <akasurde@redhat.com> notes: - Tested on vSphere 5.5, 6.5 requirements: - "python >= 2.6" - PyVmomi options: switch_name: description: - vSwitch to modify. required: True portgroup_name: description: - Portgroup name to add. required: True vlan_id: description: - VLAN ID to assign to portgroup. required: True network_policy: description: - Network policy specifies layer 2 security settings for a portgroup such as promiscuous mode, where guest adapter listens to all the packets, MAC address changes and forged transmits. - Dict which configures the different security values for portgroup. - 'Valid attributes are:' - '- C(promiscuous_mode) (bool): indicates whether promiscuous mode is allowed. (default: false)' - '- C(forged_transmits) (bool): indicates whether forged transmits are allowed. (default: false)' - '- C(mac_changes) (bool): indicates whether mac changes are allowed. (default: false)' required: False version_added: "2.2" default: { mac_changes: false, promiscuous_mode: false, forged_transmits: false, } teaming_policy: description: - Dictionary which configures the different teaming values for portgroup. - 'Valid attributes are:' - '- C(load_balance_policy) (string): Network adapter teaming policy. (default: loadbalance_srcid)' - ' - choices: [ loadbalance_ip, loadbalance_srcmac, loadbalance_srcid, failover_explicit ]' - '- C(inbound_policy) (bool): Indicate whether or not the teaming policy is applied to inbound frames as well. (default: False)' - '- C(notify_switches) (bool): Indicate whether or not to notify the physical switch if a link fails. (default: True)' - '- C(rolling_order) (bool): Indicate whether or not to use a rolling policy when restoring links. (default: False)' required: False version_added: '2.6' default: { 'notify_switches': True, 'load_balance_policy': 'loadbalance_srcid', 'inbound_policy': False, 'rolling_order': False } cluster_name: description: - Name of cluster name for host membership. - Portgroup will be created on all hosts of the given cluster. - This option is required if C(hosts) is not specified. version_added: "2.5" hosts: description: - List of name of host or hosts on which portgroup needs to be added. - This option is required if C(cluster_name) is not specified. aliases: [ esxi_hostname ] version_added: "2.5" state: description: - Determines if the portgroup should be present or not. choices: - 'present' - 'absent' version_added: '2.5' default: present extends_documentation_fragment: vmware.documentation ''' EXAMPLES = r''' - name: Add Management Network VM Portgroup vmware_portgroup: hostname: esxi_hostname username: esxi_username password: esxi_password switch_name: vswitch_name portgroup_name: portgroup_name vlan_id: vlan_id - name: Add Portgroup with Promiscuous Mode Enabled vmware_portgroup: hostname: esxi_hostname username: esxi_username password: esxi_password switch_name: vswitch_name portgroup_name: portgroup_name network_policy: promiscuous_mode: True - name: Add Management Network VM Portgroup to specific hosts vmware_portgroup: hostname: vCenter_hostname username: esxi_username password: esxi_password hosts: [esxi_hostname_one] switch_name: vswitch_name portgroup_name: portgroup_name vlan_id: vlan_id - name: Add Management Network VM Portgroup to all hosts in a cluster vmware_portgroup: hostname: vCenter_hostname username: esxi_username password: esxi_password cluster_name: rh_engineering switch_name: vswitch_name portgroup_name: portgroup_name vlan_id: vlan_id - name: Remove Management Network VM Portgroup to all hosts in a cluster vmware_portgroup: hostname: vCenter_hostname username: esxi_username password: esxi_password cluster_name: rh_engineering switch_name: vswitch_name portgroup_name: portgroup_name vlan_id: vlan_id state: absent - name: Add Portgroup with teaming policy vmware_portgroup: hostname: esxi_hostname username: esxi_username password: esxi_password switch_name: vswitch_name portgroup_name: portgroup_name teaming_policy: load_balance_policy: 'failover_explicit' inbound_policy: True register: teaming_result ''' RETURN = r''' result: description: metadata about the portgroup returned: always type: dict sample: { "esxi01.example.com": { "portgroup_name": "pg0010", "switch_name": "vswitch_0001", "vlan_id": 1 } } ''' try: from pyVmomi import vim, vmodl except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec from ansible.module_utils._text import to_native class PyVmomiHelper(PyVmomi): def __init__(self, module): super(PyVmomiHelper, self).__init__(module) hosts = self.params['hosts'] cluster = self.params['cluster_name'] self.portgroup_name = self.params['portgroup_name'] self.switch_name = self.params['switch_name'] self.vlan_id = self.params['vlan_id'] self.promiscuous_mode = self.params['network_policy'].get('promiscuous_mode') self.forged_transmits = self.params['network_policy'].get('forged_transmits') self.mac_changes = self.params['network_policy'].get('mac_changes') self.network_policy = self.create_network_policy() self.state = self.params['state'] self.host_obj_list = self.get_all_host_objs(cluster_name=cluster, esxi_host_name=hosts) def process_state(self): """ Function to manage state """ if self.state == 'present': self.add_hosts_port_group() elif self.state == 'absent': self.remove_hosts_port_group() # Get def get_port_group_by_name(self, host_system, portgroup_name, vswitch_name): """ Function to get specific port group by given name Args: host_system: Name of Host System portgroup_name: Name of Port Group vswitch_name: Name of vSwitch Returns: List of port groups by given specifications """ pgs_list = self.get_all_port_groups_by_host(host_system=host_system) desired_pgs = [] for pg in pgs_list: if pg.spec.name == portgroup_name and pg.spec.vswitchName == vswitch_name: desired_pgs.append(pg) return desired_pgs @staticmethod def check_network_policy_diff(current_policy, desired_policy): """ Function to find difference between existing network policy and user given network policy Args: current_policy: Current network policy desired_policy: User defined network policy Returns: True if difference found, False if not. """ ret = False if (current_policy.security.allowPromiscuous != desired_policy.security.allowPromiscuous) or \ (current_policy.security.forgedTransmits != desired_policy.security.forgedTransmits) or \ (current_policy.security.macChanges != desired_policy.security.macChanges) or \ (current_policy.nicTeaming.policy != desired_policy.nicTeaming.policy) or \ (current_policy.nicTeaming.reversePolicy != desired_policy.nicTeaming.reversePolicy) or \ (current_policy.nicTeaming.notifySwitches != desired_policy.nicTeaming.notifySwitches) or \ (current_policy.nicTeaming.rollingOrder != desired_policy.nicTeaming.rollingOrder): ret = True return ret # Add def add_hosts_port_group(self): """ Function to add port group to given hosts """ results = dict(changed=False, result=dict()) host_change_list = [] for host in self.host_obj_list: change = False results['result'][host.name] = dict(portgroup_name=self.portgroup_name, vlan_id=self.vlan_id, switch_name=self.switch_name) change = self.create_host_port_group(host, self.portgroup_name, self.vlan_id, self.switch_name, self.network_policy) host_change_list.append(change) if any(host_change_list): results['changed'] = True self.module.exit_json(**results) def create_host_port_group(self, host_system, portgroup_name, vlan_id, vswitch_name, network_policy): """ Function to create/update portgroup on given host using portgroup specifications Args: host_system: Name of Host System portgroup_name: Name of Portgroup vlan_id: The VLAN ID for ports using this port group. vswitch_name: Name of vSwitch Name network_policy: Network policy object """ desired_pgs = self.get_port_group_by_name(host_system=host_system, portgroup_name=portgroup_name, vswitch_name=vswitch_name) port_group = vim.host.PortGroup.Config() port_group.spec = vim.host.PortGroup.Specification() port_changed = False if not desired_pgs: # Add new portgroup port_group.spec.name = portgroup_name port_group.spec.vlanId = vlan_id port_group.spec.vswitchName = vswitch_name port_group.spec.policy = network_policy try: host_system.configManager.networkSystem.AddPortGroup(portgrp=port_group.spec) port_changed = True except vim.fault.AlreadyExists as e: # To match with other vmware_* modules if portgroup # exists, we proceed, as user may want idempotency pass except vim.fault.NotFound as not_found: self.module.fail_json(msg="Failed to add Portgroup as vSwitch" " was not found: %s" % to_native(not_found.msg)) except vim.fault.HostConfigFault as host_config_fault: self.module.fail_json(msg="Failed to add Portgroup due to host system" " configuration failure : %s" % to_native(host_config_fault.msg)) except vmodl.fault.InvalidArgument as invalid_argument: self.module.fail_json(msg="Failed to add Portgroup as VLAN id was not" " correct as per specifications: %s" % to_native(invalid_argument.msg)) except Exception as generic_exception: self.module.fail_json(msg="Failed to add Portgroup due to generic" " exception : %s" % to_native(generic_exception)) else: # Change portgroup port_group.spec.name = portgroup_name port_group.spec.vlanId = vlan_id port_group.spec.policy = network_policy port_group.spec.vswitchName = desired_pgs[0].spec.vswitchName if self.check_network_policy_diff(desired_pgs[0].spec.policy, network_policy) or \ desired_pgs[0].spec.vlanId != vlan_id: port_changed = True try: host_system.configManager.networkSystem.UpdatePortGroup(pgName=self.portgroup_name, portgrp=port_group.spec) except vim.fault.AlreadyExists as e: # To match with other vmware_* modules if portgroup # exists, we proceed, as user may want idempotency pass except vim.fault.NotFound as not_found: self.module.fail_json(msg="Failed to update Portgroup as" " vSwitch was not found: %s" % to_native(not_found.msg)) except vim.fault.HostConfigFault as host_config_fault: self.module.fail_json(msg="Failed to update Portgroup due to host" " system configuration failure : %s" % to_native(host_config_fault.msg)) except vmodl.fault.InvalidArgument as invalid_argument: self.module.fail_json(msg="Failed to update Portgroup as VLAN id was not" " correct as per specifications: %s" % to_native(invalid_argument.msg)) except Exception as generic_exception: self.module.fail_json(msg="Failed to update Portgroup due to generic" " exception : %s" % to_native(generic_exception)) return port_changed def create_network_policy(self): """ Function to create Network policy Returns: Network policy object """ security_policy = vim.host.NetworkPolicy.SecurityPolicy() if self.promiscuous_mode: security_policy.allowPromiscuous = self.promiscuous_mode if self.forged_transmits: security_policy.forgedTransmits = self.forged_transmits if self.mac_changes: security_policy.macChanges = self.mac_changes # Teaming Policy teaming_policy = vim.host.NetworkPolicy.NicTeamingPolicy() teaming_policy.policy = self.module.params['teaming_policy']['load_balance_policy'] teaming_policy.reversePolicy = self.module.params['teaming_policy']['inbound_policy'] teaming_policy.notifySwitches = self.module.params['teaming_policy']['notify_switches'] teaming_policy.rollingOrder = self.module.params['teaming_policy']['rolling_order'] network_policy = vim.host.NetworkPolicy(nicTeaming=teaming_policy, security=security_policy) return network_policy def remove_hosts_port_group(self): """ Function to remove port group from given host """ results = dict(changed=False, result=dict()) host_change_list = [] for host in self.host_obj_list: change = False results['result'][host.name] = dict(portgroup_name=self.portgroup_name, vlan_id=self.vlan_id, switch_name=self.switch_name) change = self.remove_host_port_group(host_system=host, portgroup_name=self.portgroup_name, vswitch_name=self.switch_name) host_change_list.append(change) if any(host_change_list): results['changed'] = True self.module.exit_json(**results) def remove_host_port_group(self, host_system, portgroup_name, vswitch_name): """ Function to remove port group depending upon host system, port group name and vswitch name Args: host_system: Name of Host System portgroup_name: Name of Portgroup vswitch_name: Name of vSwitch """ changed = False desired_pgs = self.get_port_group_by_name(host_system=host_system, portgroup_name=portgroup_name, vswitch_name=vswitch_name) if desired_pgs: try: host_system.configManager.networkSystem.RemovePortGroup(pgName=self.portgroup_name) changed = True except vim.fault.NotFound as not_found: self.module.fail_json(msg="Failed to remove Portgroup as it was" " not found: %s" % to_native(not_found.msg)) except vim.fault.ResourceInUse as resource_in_use: self.module.fail_json(msg="Failed to remove Portgroup as it is" " in use: %s" % to_native(resource_in_use.msg)) except vim.fault.HostConfigFault as host_config_fault: self.module.fail_json(msg="Failed to remove Portgroup due to configuration" " failures: %s" % to_native(host_config_fault.msg)) except Exception as generic_exception: self.module.fail_json(msg="Failed to remove Portgroup due to generic" " exception : %s" % to_native(generic_exception)) return changed def main(): argument_spec = vmware_argument_spec() argument_spec.update(dict( portgroup_name=dict(required=True, type='str'), switch_name=dict(required=True, type='str'), vlan_id=dict(required=True, type='int'), hosts=dict(type='list', aliases=['esxi_hostname']), cluster_name=dict(type='str'), state=dict(type='str', choices=['present', 'absent'], default='present'), network_policy=dict(type='dict', options=dict( promiscuous_mode=dict(type='bool'), forged_transmits=dict(type='bool'), mac_changes=dict(type='bool'), ), default=dict( promiscuous_mode=False, forged_transmits=False, mac_changes=False, ) ), teaming_policy=dict( type='dict', options=dict( inbound_policy=dict(type='bool', default=False), notify_switches=dict(type='bool', default=True), rolling_order=dict(type='bool', default=False), load_balance_policy=dict(type='str', default='loadbalance_srcid', choices=[ 'loadbalance_ip', 'loadbalance_srcmac', 'loadbalance_srcid', 'failover_explicit', ], ) ), default=dict( inbound_policy=False, notify_switches=True, rolling_order=False, load_balance_policy='loadbalance_srcid', ), ), ) ) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False, required_one_of=[ ['cluster_name', 'hosts'], ] ) try: pyv = PyVmomiHelper(module) pyv.process_state() except vmodl.RuntimeFault as runtime_fault: module.fail_json(msg=to_native(runtime_fault.msg)) except vmodl.MethodFault as method_fault: module.fail_json(msg=to_native(method_fault.msg)) except Exception as e: module.fail_json(msg=to_native(e)) if __name__ == '__main__': main()
gpl-3.0
pozdnyakov/chromium-crosswalk
chrome/test/functional/multiprofile.py
7
14471
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import re import pyauto_functional import pyauto class MultiprofileTest(pyauto.PyUITest): """Tests for Multi-Profile / Multi-users""" _RESTORE_STARTUP_URL_VALUE = 4 _RESTORE_LASTOPEN_URL_VALUE = 1 _RESTORE_DEFAULT_URL_VALUE = 0 def Debug(self): """Test method for experimentation. This method will not run automatically. """ while True: raw_input('Hit <enter> to dump info.. ') self.pprint(self.GetMultiProfileInfo()) def _GetSearchEngineWithKeyword(self, keyword, windex=0): """Get search engine info and return an element that matches keyword. Args: keyword: Search engine keyword field. windex: The window index, default is 0. Returns: A search engine info dict or None. """ match_list = ([x for x in self.GetSearchEngineInfo(windex=windex) if x['keyword'] == keyword]) if match_list: return match_list[0] return None def _SetPreferences(self, dict, windex=0): """Sets preferences settings. Args: _dict: Dictionary of key preferences and its value to be set. windex: The window index, defaults to 0 (the first window). """ for key in dict.iterkeys(): self.SetPrefs(key, dict[key], windex=windex) def _SetStartUpPage(self, url, windex=0): """Set start up page. Args: url: URL of the page to be set as start up page. windex: The window index, default is 0. """ _dict = {pyauto.kURLsToRestoreOnStartup: [url], pyauto.kRestoreOnStartup: self._RESTORE_STARTUP_URL_VALUE} self._SetPreferences(_dict, windex=windex) prefs_info = self.GetPrefsInfo(windex=windex).Prefs( pyauto.kURLsToRestoreOnStartup) self.assertTrue(url in prefs_info) def _SetHomePage(self, url, windex=0): """Create new profile and set home page. Args: url: URL of the page to be set as home page windex: The window index, default is 0. """ _dict = {pyauto.kHomePage: url, pyauto.kHomePageIsNewTabPage: False, pyauto.kShowHomeButton: True, pyauto.kRestoreOnStartup: self._RESTORE_DEFAULT_URL_VALUE} self._SetPreferences(_dict, windex=windex) self.assertTrue(url in self.GetPrefsInfo(windex=windex).Prefs(pyauto.kHomePage)) def _SetSessionRestoreURLs(self, set_restore, windex=0): """Create new profile and set home page. Args: set_restore: Value of action of start up. windex: The window index, default is 0. """ self.NavigateToURL('http://www.google.com/', windex) self.AppendTab(pyauto.GURL('http://news.google.com/'), windex) num_tabs = self.GetTabCount(windex) dict = {pyauto.kRestoreOnStartup: set_restore} self._SetPreferences(dict, windex=windex) def _AddSearchEngine(self, title, keyword, url, windex=0): """Add search engine. Args: title: Name for search engine. keyword: Keyword, used to initiate a custom search from omnibox. url: URL template for this search engine's query. windex: The window index, default is 0. """ self.AddSearchEngine(title, keyword, url, windex=windex) name = self._GetSearchEngineWithKeyword(keyword, windex=windex) self.assertTrue(name) def _AssertStartUpPage(self, url, profile='Default'): """Asserts start up page for given profile. Args: url: URL of the page to be set as start up page profile: The profile name, defaults to 'Default'. """ self.AppendBrowserLaunchSwitch('--profile-directory=' + profile) self.RestartBrowser(clear_profile=False) info = self.GetBrowserInfo() self.assertEqual(url, info['windows'][0]['tabs'][0]['url'].rstrip('/')) self.assertTrue(url in self.GetPrefsInfo().Prefs(pyauto.kURLsToRestoreOnStartup)) def _AssertHomePage(self, url, profile='Default'): """Asserts home page for given profile. Args: url: URL of the page to be set as home page profile: The profile name, defaults to 'Dafault'. """ self.AppendBrowserLaunchSwitch('--profile-directory=' + profile) self.RestartBrowser(clear_profile=False) self.assertTrue(url in self.GetPrefsInfo().Prefs(pyauto.kHomePage)) def _AssertDefaultSearchEngine(self, search_engine, profile='Default'): """Asserts default search engine for given profile. Args: search_engine: Name of default search engine. profile: The profile name, defaults to 'Default'. """ self.AppendBrowserLaunchSwitch('--profile-directory=' + profile) self.RestartBrowser(clear_profile=False) name = self._GetSearchEngineWithKeyword(search_engine) self.assertTrue(name['is_default']) self.SetOmniboxText('test search') self.OmniboxAcceptInput() self.assertTrue(re.search(search_engine, self.GetActiveTabURL().spec())) def _AssertSessionRestore(self, url_list, set_restore, num_tabs=1, profile='Default'): """Asserts urls when session is set to restored or set default. Args: url_list: List of URL to be restored. set_restore: Value of action of start up. num_tabs: Number of tabs to be restored, default is 1. profile: The profile name, defaults to 'Default'. """ self.AppendBrowserLaunchSwitch('--profile-directory=' + profile) self.RestartBrowser(clear_profile=False) self.assertEqual(num_tabs, self.GetTabCount()) self.assertEqual(self.GetPrefsInfo().Prefs(pyauto.kRestoreOnStartup), set_restore) tab_index = 0 while (tab_index < num_tabs): self.ActivateTab(tab_index) self.assertEqual(url_list[tab_index], self.GetActiveTabURL().spec()) tab_index += 1 def testBasic(self): """Multi-profile windows can open.""" self.assertEqual(1, self.GetBrowserWindowCount()) self.assertTrue(self.GetMultiProfileInfo()['enabled'], msg='Multi-profile is not enabled') self.OpenNewBrowserWindowWithNewProfile() # Verify multi-profile info. multi_profile = self.GetMultiProfileInfo() self.assertEqual(2, len(multi_profile['profiles'])) new_profile = multi_profile['profiles'][1] self.assertTrue(new_profile['name']) # Verify browser windows. self.assertEqual(2, self.GetBrowserWindowCount(), msg='New browser window did not open') info = self.GetBrowserInfo() new_profile_window = info['windows'][1] self.assertEqual('Profile 1', new_profile_window['profile_path']) self.assertEqual(1, len(new_profile_window['tabs'])) self.assertEqual('chrome://newtab/', new_profile_window['tabs'][0]['url']) def test20NewProfiles(self): """Verify we can create 20 new profiles.""" for index in range(1, 21): self.OpenNewBrowserWindowWithNewProfile() multi_profile = self.GetMultiProfileInfo() self.assertEqual(index + 1, len(multi_profile['profiles']), msg='Expected %d profiles after adding %d new users. Got %d' % ( index + 1, index, len(multi_profile['profiles']))) def testStartUpPageOptionInMultiProfile(self): """Test startup page for Multi-profile windows.""" self.assertTrue(self.GetMultiProfileInfo()['enabled'], msg='Multi-profile is not enabled') # Launch browser with new Profile 1, set startup page to 'www.google.com'. self.OpenNewBrowserWindowWithNewProfile() self._SetStartUpPage('http://www.google.com', windex=1) # Launch browser with new Profile 2, set startup page to 'www.yahoo.com'. self.OpenNewBrowserWindowWithNewProfile() # Verify start up page for Profile 2 is still newtab page. info = self.GetBrowserInfo() self.assertEqual('chrome://newtab/', info['windows'][2]['tabs'][0]['url']) self._SetStartUpPage('http://www.yahoo.com', windex=2) # Exit Profile 1 / Profile 2 self.CloseBrowserWindow(2) self.CloseBrowserWindow(1) # Relaunch Browser with Profile 2, verify startup page. self._AssertStartUpPage('http://www.yahoo.com', profile='Profile 2') # Relaunch Browser with Profile 1, verify startup page. self._AssertStartUpPage('http://www.google.com', profile='Profile 1') def testHomePageOptionMultiProfile(self): """Test Home page for Multi-profile windows.""" self.assertTrue(self.GetMultiProfileInfo()['enabled'], msg='Multi-profile is not enabled') # Launch browser with new Profile 1, set homepage to 'www.google.com'. self.OpenNewBrowserWindowWithNewProfile() self._SetHomePage('http://www.google.com', windex=1) # Launch browser with new Profile 2, set homepage to 'www.yahoo.com'. self.OpenNewBrowserWindowWithNewProfile() self._SetHomePage('http://www.yahoo.com', windex=2) # Exit Profile 1 / Profile 2 self.CloseBrowserWindow(2) self.CloseBrowserWindow(1) # Relaunch Browser with Profile 2, verify startup page. self._AssertHomePage('http://www.yahoo.com', profile='Profile 2') # Relaunch Browser with Profile 1, verify startup page. self._AssertHomePage('http://www.google.com', profile='Profile 1') def testSessionRestoreInMultiProfile(self): """Test session restore preference for Multi-profile windows.""" self.assertTrue(self.GetMultiProfileInfo()['enabled'], msg='Multi-profile is not enabled') # Launch browser with new Profile 1, set pref to restore session on # startup. self.OpenNewBrowserWindowWithNewProfile() self._SetSessionRestoreURLs(self._RESTORE_LASTOPEN_URL_VALUE, windex=1) # Launch browser with new Profile 2, do not set session restore pref. self.OpenNewBrowserWindowWithNewProfile() self._SetSessionRestoreURLs(self._RESTORE_DEFAULT_URL_VALUE, windex=2) # Exit Profile 1 / Profile 2 self.CloseBrowserWindow(2) self.CloseBrowserWindow(1) # Relaunch Browser with Profile 1, verify session restores on startup. url_list = ['http://www.google.com/', 'http://news.google.com/'] self._AssertSessionRestore(url_list, self._RESTORE_LASTOPEN_URL_VALUE, num_tabs=2, profile='Profile 1') # Relaunch Browser with Profile 2, verify session does not get restored. url_list = ['chrome://newtab/'] self._AssertSessionRestore(url_list, self._RESTORE_DEFAULT_URL_VALUE, num_tabs=1, profile='Profile 2') def testInstantSearchInMultiProfile(self): """Test instant search for Multi-profile windows.""" self.assertTrue(self.GetMultiProfileInfo()['enabled'], msg='Multi-profile is not enabled') # Launch browser with new Profile 1, enable instant search self.OpenNewBrowserWindowWithNewProfile() self.SetPrefs(pyauto.kSearchInstantEnabled, True, windex=1) prefs_info = self.GetPrefsInfo(windex=1) self.assertTrue(prefs_info.Prefs(pyauto.kSearchInstantEnabled)) # Launch browser with new Profile 2. self.OpenNewBrowserWindowWithNewProfile() # Exit Profile 1 / Profile 2 self.CloseBrowserWindow(2) self.CloseBrowserWindow(1) # Relaunch Browser with Profile 1, verify instant search is enabled. self.AppendBrowserLaunchSwitch('--profile-directory=Profile 1') self.RestartBrowser(clear_profile=False) self.assertTrue(self.GetPrefsInfo().Prefs(pyauto.kSearchInstantEnabled)) # Relaunch Browser with Profile 2, verify instant search is disabled. self.AppendBrowserLaunchSwitch('--profile-directory=Profile 2') self.RestartBrowser(clear_profile=False) self.assertFalse(self.GetPrefsInfo().Prefs(pyauto.kSearchInstantEnabled)) def testMakeSearchEngineDefaultInMultiprofile(self): """Test adding and making a search engine default for Multi-profiles.""" self.assertTrue(self.GetMultiProfileInfo()['enabled'], msg='Multi-profile is not enabled') # Launch browser with new Profile 1, add search engine to 'Hulu'. self.OpenNewBrowserWindowWithNewProfile() self._AddSearchEngine('Hulu', 'hulu.com', 'http://www.hulu.com/search?query=%s&ref=os&src={referrer:source?}', 1) self.MakeSearchEngineDefault('hulu.com', windex=1) # Launch browser with new Profile 2, add search engine to 'Youtube'. self.OpenNewBrowserWindowWithNewProfile() self._AddSearchEngine('YouTube Video Search', 'youtube.com', 'http://www.youtube.com/results?search_query=%s&page={startPage?}'+ '&utm_source=opensearch', 2) self.MakeSearchEngineDefault('youtube.com', windex=2) # Exit Profile 1 / Profile 2 self.CloseBrowserWindow(2) self.CloseBrowserWindow(1) # Relaunch Browser with Profile 1, verify default search engine as 'Hulu'. self._AssertDefaultSearchEngine('hulu.com', profile='Profile 1') # Relaunch Browser with Profile 2, verify default search engine as # 'Youtube'. self._AssertDefaultSearchEngine('youtube.com', profile='Profile 2') def testDeleteSearchEngineInMultiprofile(self): """Test adding then deleting a search engine for Multi-profiles.""" self.assertTrue(self.GetMultiProfileInfo()['enabled'], msg='Multi-profile is not enabled') # Launch browser with new Profile 1, add 'foo.com' as new search engine. self.OpenNewBrowserWindowWithNewProfile() self._AddSearchEngine('foo', 'foo.com', 'http://foo/?q=%s', windex=1) # Launch browser with new Profile 2, add 'foo.com' as new search engine. self.OpenNewBrowserWindowWithNewProfile() self._AddSearchEngine('foo', 'foo.com', 'http://foo/?q=%s', windex=2) # Delete search engine 'foo.com' from Profile 1 and exit. self.DeleteSearchEngine('foo.com', windex=1) self.CloseBrowserWindow(2) self.CloseBrowserWindow(1) # Relaunch Browser with Profile 1, verify search engine 'foo.com' # is deleted. self.AppendBrowserLaunchSwitch('--profile-directory=Profile 1') self.RestartBrowser(clear_profile=False) foo = self._GetSearchEngineWithKeyword('foo.com') self.assertFalse(foo) # Relaunch Browser with Profile 2, verify search engine 'foo.com' # is not deleted. self.AppendBrowserLaunchSwitch('--profile-directory=Profile 2') self.RestartBrowser(clear_profile=False) foo = self._GetSearchEngineWithKeyword('foo.com') self.assertTrue(foo) if __name__ == '__main__': pyauto_functional.Main()
bsd-3-clause
mancoast/CPythonPyc_test
fail/320_test_wave.py
64
1875
from test.support import TESTFN, run_unittest import os import wave import struct import unittest nchannels = 2 sampwidth = 2 framerate = 8000 nframes = 100 class TestWave(unittest.TestCase): def setUp(self): self.f = None def tearDown(self): if self.f is not None: self.f.close() try: os.remove(TESTFN) except OSError: pass def test_it(self, test_rounding=False): self.f = wave.open(TESTFN, 'wb') self.f.setnchannels(nchannels) self.f.setsampwidth(sampwidth) if test_rounding: self.f.setframerate(framerate - 0.1) else: self.f.setframerate(framerate) self.f.setnframes(nframes) output = b'\0' * nframes * nchannels * sampwidth self.f.writeframes(output) self.f.close() self.f = wave.open(TESTFN, 'rb') self.assertEqual(nchannels, self.f.getnchannels()) self.assertEqual(sampwidth, self.f.getsampwidth()) self.assertEqual(framerate, self.f.getframerate()) self.assertEqual(nframes, self.f.getnframes()) self.assertEqual(self.f.readframes(nframes), output) def test_fractional_framerate(self): """ Addresses [ 1512791 ] module wave does no rounding Floating point framerates should be rounded, rather than truncated. """ self.test_it(test_rounding=True) def test_issue7681(self): self.f = wave.open(TESTFN, 'wb') self.f.setnchannels(nchannels) self.f.setsampwidth(sampwidth) self.f.setframerate(framerate) # Don't call setnframes, make _write_header divide to figure it out output = b'\0' * nframes * nchannels * sampwidth self.f.writeframes(output) def test_main(): run_unittest(TestWave) if __name__ == '__main__': test_main()
gpl-3.0
canavandl/bokeh
bokeh/server/views/bbauth.py
29
2919
from __future__ import absolute_import import logging from functools import wraps from flask import abort, jsonify from ..app import bokeh_app from ..models import docs from ...exceptions import AuthenticationException logger = logging.getLogger(__name__) def handle_auth_error(func): """Decorator wraps a function and watches for AuthenticationException If one is thrown, log and abort 401 instead """ @wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except AuthenticationException as e: logger.exception(e) return abort(401) return wrapper def check_read_authentication(func): @wraps(func) def wrapper(docid, *args, **kwargs): if bokeh_app.authentication.can_read_doc(docid): return func(docid, *args, **kwargs) else: abort(401) return wrapper def check_write_authentication(func): @wraps(func) def wrapper(docid, *args, **kwargs): if bokeh_app.authentication.can_write_doc(docid): return func(docid, *args, **kwargs) else: abort(401) return wrapper @bokeh_app.route('/bokeh/login', methods=['GET']) def login_get(): ''' Log in a user from a form. :status 200: render login view ''' return bokeh_app.authentication.login_get() @bokeh_app.route('/bokeh/login', methods=['POST']) def login_post(): ''' Log in user from a submission. :status 200: if API flag set, log in status :status 302: if API flag not set, redirect to index on success, to login on failue ''' return bokeh_app.authentication.login_post() @bokeh_app.route('/bokeh/loginfromapikey', methods=['GET']) def login_from_apikey(): ''' Log in a user from an API key. :status 302: redirect to index on success, to login on failure ''' return bokeh_app.authentication.login_from_apikey() @bokeh_app.route('/bokeh/register', methods=['GET']) def register_get(): ''' Register a new user via a view. :status 200: render registration form ''' return bokeh_app.authentication.register_get() @bokeh_app.route('/bokeh/register', methods=['POST']) def register_post(): ''' Register a new user via a submission. :status 200: registration result ''' return bokeh_app.authentication.register_post() @bokeh_app.route('/bokeh/logout') def logout(): ''' Log out the current user. :status 302: redirect to index ''' return bokeh_app.authentication.logout() @bokeh_app.route('/bokeh/<docid>/publish', methods=['POST']) def publish(docid): #bokehuser = bokeh_app.current_user() doc = docs.Doc.load(bokeh_app.servermodel_storage, docid) if not bokeh_app.authentication.can_write_doc(docid): return abort(401) doc.published = True doc.save(bokeh_app.servermodel_storage) return jsonify(status='success')
bsd-3-clause
luoqii/repo
git_config.py
18
18791
# # Copyright (C) 2008 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import cPickle import os import re import subprocess import sys try: import threading as _threading except ImportError: import dummy_threading as _threading import time import urllib2 from signal import SIGTERM from urllib2 import urlopen, HTTPError from error import GitError, UploadError from trace import Trace from git_command import GitCommand from git_command import ssh_sock from git_command import terminate_ssh_clients R_HEADS = 'refs/heads/' R_TAGS = 'refs/tags/' ID_RE = re.compile('^[0-9a-f]{40}$') REVIEW_CACHE = dict() def IsId(rev): return ID_RE.match(rev) def _key(name): parts = name.split('.') if len(parts) < 2: return name.lower() parts[ 0] = parts[ 0].lower() parts[-1] = parts[-1].lower() return '.'.join(parts) class GitConfig(object): _ForUser = None @classmethod def ForUser(cls): if cls._ForUser is None: cls._ForUser = cls(file = os.path.expanduser('~/.gitconfig')) return cls._ForUser @classmethod def ForRepository(cls, gitdir, defaults=None): return cls(file = os.path.join(gitdir, 'config'), defaults = defaults) def __init__(self, file, defaults=None, pickleFile=None): self.file = file self.defaults = defaults self._cache_dict = None self._section_dict = None self._remotes = {} self._branches = {} if pickleFile is None: self._pickle = os.path.join( os.path.dirname(self.file), '.repopickle_' + os.path.basename(self.file)) else: self._pickle = pickleFile def ClearCache(self): if os.path.exists(self._pickle): os.remove(self._pickle) self._cache_dict = None self._section_dict = None self._remotes = {} self._branches = {} def Has(self, name, include_defaults = True): """Return true if this configuration file has the key. """ if _key(name) in self._cache: return True if include_defaults and self.defaults: return self.defaults.Has(name, include_defaults = True) return False def GetBoolean(self, name): """Returns a boolean from the configuration file. None : The value was not defined, or is not a boolean. True : The value was set to true or yes. False: The value was set to false or no. """ v = self.GetString(name) if v is None: return None v = v.lower() if v in ('true', 'yes'): return True if v in ('false', 'no'): return False return None def GetString(self, name, all=False): """Get the first value for a key, or None if it is not defined. This configuration file is used first, if the key is not defined or all = True then the defaults are also searched. """ try: v = self._cache[_key(name)] except KeyError: if self.defaults: return self.defaults.GetString(name, all = all) v = [] if not all: if v: return v[0] return None r = [] r.extend(v) if self.defaults: r.extend(self.defaults.GetString(name, all = True)) return r def SetString(self, name, value): """Set the value(s) for a key. Only this configuration file is modified. The supplied value should be either a string, or a list of strings (to store multiple values). """ key = _key(name) try: old = self._cache[key] except KeyError: old = [] if value is None: if old: del self._cache[key] self._do('--unset-all', name) elif isinstance(value, list): if len(value) == 0: self.SetString(name, None) elif len(value) == 1: self.SetString(name, value[0]) elif old != value: self._cache[key] = list(value) self._do('--replace-all', name, value[0]) for i in xrange(1, len(value)): self._do('--add', name, value[i]) elif len(old) != 1 or old[0] != value: self._cache[key] = [value] self._do('--replace-all', name, value) def GetRemote(self, name): """Get the remote.$name.* configuration values as an object. """ try: r = self._remotes[name] except KeyError: r = Remote(self, name) self._remotes[r.name] = r return r def GetBranch(self, name): """Get the branch.$name.* configuration values as an object. """ try: b = self._branches[name] except KeyError: b = Branch(self, name) self._branches[b.name] = b return b def GetSubSections(self, section): """List all subsection names matching $section.*.* """ return self._sections.get(section, set()) def HasSection(self, section, subsection = ''): """Does at least one key in section.subsection exist? """ try: return subsection in self._sections[section] except KeyError: return False @property def _sections(self): d = self._section_dict if d is None: d = {} for name in self._cache.keys(): p = name.split('.') if 2 == len(p): section = p[0] subsect = '' else: section = p[0] subsect = '.'.join(p[1:-1]) if section not in d: d[section] = set() d[section].add(subsect) self._section_dict = d return d @property def _cache(self): if self._cache_dict is None: self._cache_dict = self._Read() return self._cache_dict def _Read(self): d = self._ReadPickle() if d is None: d = self._ReadGit() self._SavePickle(d) return d def _ReadPickle(self): try: if os.path.getmtime(self._pickle) \ <= os.path.getmtime(self.file): os.remove(self._pickle) return None except OSError: return None try: Trace(': unpickle %s', self.file) fd = open(self._pickle, 'rb') try: return cPickle.load(fd) finally: fd.close() except EOFError: os.remove(self._pickle) return None except IOError: os.remove(self._pickle) return None except cPickle.PickleError: os.remove(self._pickle) return None def _SavePickle(self, cache): try: fd = open(self._pickle, 'wb') try: cPickle.dump(cache, fd, cPickle.HIGHEST_PROTOCOL) finally: fd.close() except IOError: if os.path.exists(self._pickle): os.remove(self._pickle) except cPickle.PickleError: if os.path.exists(self._pickle): os.remove(self._pickle) def _ReadGit(self): """ Read configuration data from git. This internal method populates the GitConfig cache. """ c = {} d = self._do('--null', '--list') if d is None: return c for line in d.rstrip('\0').split('\0'): if '\n' in line: key, val = line.split('\n', 1) else: key = line val = None if key in c: c[key].append(val) else: c[key] = [val] return c def _do(self, *args): command = ['config', '--file', self.file] command.extend(args) p = GitCommand(None, command, capture_stdout = True, capture_stderr = True) if p.Wait() == 0: return p.stdout else: GitError('git config %s: %s' % (str(args), p.stderr)) class RefSpec(object): """A Git refspec line, split into its components: forced: True if the line starts with '+' src: Left side of the line dst: Right side of the line """ @classmethod def FromString(cls, rs): lhs, rhs = rs.split(':', 2) if lhs.startswith('+'): lhs = lhs[1:] forced = True else: forced = False return cls(forced, lhs, rhs) def __init__(self, forced, lhs, rhs): self.forced = forced self.src = lhs self.dst = rhs def SourceMatches(self, rev): if self.src: if rev == self.src: return True if self.src.endswith('/*') and rev.startswith(self.src[:-1]): return True return False def DestMatches(self, ref): if self.dst: if ref == self.dst: return True if self.dst.endswith('/*') and ref.startswith(self.dst[:-1]): return True return False def MapSource(self, rev): if self.src.endswith('/*'): return self.dst[:-1] + rev[len(self.src) - 1:] return self.dst def __str__(self): s = '' if self.forced: s += '+' if self.src: s += self.src if self.dst: s += ':' s += self.dst return s _master_processes = [] _master_keys = set() _ssh_master = True _master_keys_lock = None def init_ssh(): """Should be called once at the start of repo to init ssh master handling. At the moment, all we do is to create our lock. """ global _master_keys_lock assert _master_keys_lock is None, "Should only call init_ssh once" _master_keys_lock = _threading.Lock() def _open_ssh(host, port=None): global _ssh_master # Acquire the lock. This is needed to prevent opening multiple masters for # the same host when we're running "repo sync -jN" (for N > 1) _and_ the # manifest <remote fetch="ssh://xyz"> specifies a different host from the # one that was passed to repo init. _master_keys_lock.acquire() try: # Check to see whether we already think that the master is running; if we # think it's already running, return right away. if port is not None: key = '%s:%s' % (host, port) else: key = host if key in _master_keys: return True if not _ssh_master \ or 'GIT_SSH' in os.environ \ or sys.platform in ('win32', 'cygwin'): # failed earlier, or cygwin ssh can't do this # return False # We will make two calls to ssh; this is the common part of both calls. command_base = ['ssh', '-o','ControlPath %s' % ssh_sock(), host] if port is not None: command_base[1:1] = ['-p',str(port)] # Since the key wasn't in _master_keys, we think that master isn't running. # ...but before actually starting a master, we'll double-check. This can # be important because we can't tell that that 'git@myhost.com' is the same # as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file. check_command = command_base + ['-O','check'] try: Trace(': %s', ' '.join(check_command)) check_process = subprocess.Popen(check_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) check_process.communicate() # read output, but ignore it... isnt_running = check_process.wait() if not isnt_running: # Our double-check found that the master _was_ infact running. Add to # the list of keys. _master_keys.add(key) return True except Exception: # Ignore excpetions. We we will fall back to the normal command and print # to the log there. pass command = command_base[:1] + \ ['-M', '-N'] + \ command_base[1:] try: Trace(': %s', ' '.join(command)) p = subprocess.Popen(command) except Exception, e: _ssh_master = False print >>sys.stderr, \ '\nwarn: cannot enable ssh control master for %s:%s\n%s' \ % (host,port, str(e)) return False _master_processes.append(p) _master_keys.add(key) time.sleep(1) return True finally: _master_keys_lock.release() def close_ssh(): global _master_keys_lock terminate_ssh_clients() for p in _master_processes: try: os.kill(p.pid, SIGTERM) p.wait() except OSError: pass del _master_processes[:] _master_keys.clear() d = ssh_sock(create=False) if d: try: os.rmdir(os.path.dirname(d)) except OSError: pass # We're done with the lock, so we can delete it. _master_keys_lock = None URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):') URI_ALL = re.compile(r'^([a-z][a-z+]*)://([^@/]*@?[^/]*)/') def _preconnect(url): m = URI_ALL.match(url) if m: scheme = m.group(1) host = m.group(2) if ':' in host: host, port = host.split(':') else: port = None if scheme in ('ssh', 'git+ssh', 'ssh+git'): return _open_ssh(host, port) return False m = URI_SCP.match(url) if m: host = m.group(1) return _open_ssh(host) return False class Remote(object): """Configuration options related to a remote. """ def __init__(self, config, name): self._config = config self.name = name self.url = self._Get('url') self.review = self._Get('review') self.projectname = self._Get('projectname') self.fetch = map(lambda x: RefSpec.FromString(x), self._Get('fetch', all=True)) self._review_protocol = None def _InsteadOf(self): globCfg = GitConfig.ForUser() urlList = globCfg.GetSubSections('url') longest = "" longestUrl = "" for url in urlList: key = "url." + url + ".insteadOf" insteadOfList = globCfg.GetString(key, all=True) for insteadOf in insteadOfList: if self.url.startswith(insteadOf) \ and len(insteadOf) > len(longest): longest = insteadOf longestUrl = url if len(longest) == 0: return self.url return self.url.replace(longest, longestUrl, 1) def PreConnectFetch(self): connectionUrl = self._InsteadOf() return _preconnect(connectionUrl) @property def ReviewProtocol(self): if self._review_protocol is None: if self.review is None: return None u = self.review if not u.startswith('http:') and not u.startswith('https:'): u = 'http://%s' % u if u.endswith('/Gerrit'): u = u[:len(u) - len('/Gerrit')] if not u.endswith('/ssh_info'): if not u.endswith('/'): u += '/' u += 'ssh_info' if u in REVIEW_CACHE: info = REVIEW_CACHE[u] self._review_protocol = info[0] self._review_host = info[1] self._review_port = info[2] else: try: info = urlopen(u).read() if info == 'NOT_AVAILABLE': raise UploadError('%s: SSH disabled' % self.review) if '<' in info: # Assume the server gave us some sort of HTML # response back, like maybe a login page. # raise UploadError('%s: Cannot parse response' % u) self._review_protocol = 'ssh' self._review_host = info.split(" ")[0] self._review_port = info.split(" ")[1] except urllib2.URLError, e: raise UploadError('%s: %s' % (self.review, e.reason[1])) except HTTPError, e: if e.code == 404: self._review_protocol = 'http-post' self._review_host = None self._review_port = None else: raise UploadError('Upload over ssh unavailable') REVIEW_CACHE[u] = ( self._review_protocol, self._review_host, self._review_port) return self._review_protocol def SshReviewUrl(self, userEmail): if self.ReviewProtocol != 'ssh': return None username = self._config.GetString('review.%s.username' % self.review) if username is None: username = userEmail.split("@")[0] return 'ssh://%s@%s:%s/%s' % ( username, self._review_host, self._review_port, self.projectname) def ToLocal(self, rev): """Convert a remote revision string to something we have locally. """ if IsId(rev): return rev if rev.startswith(R_TAGS): return rev if not rev.startswith('refs/'): rev = R_HEADS + rev for spec in self.fetch: if spec.SourceMatches(rev): return spec.MapSource(rev) raise GitError('remote %s does not have %s' % (self.name, rev)) def WritesTo(self, ref): """True if the remote stores to the tracking ref. """ for spec in self.fetch: if spec.DestMatches(ref): return True return False def ResetFetch(self, mirror=False): """Set the fetch refspec to its default value. """ if mirror: dst = 'refs/heads/*' else: dst = 'refs/remotes/%s/*' % self.name self.fetch = [RefSpec(True, 'refs/heads/*', dst)] def Save(self): """Save this remote to the configuration. """ self._Set('url', self.url) self._Set('review', self.review) self._Set('projectname', self.projectname) self._Set('fetch', map(lambda x: str(x), self.fetch)) def _Set(self, key, value): key = 'remote.%s.%s' % (self.name, key) return self._config.SetString(key, value) def _Get(self, key, all=False): key = 'remote.%s.%s' % (self.name, key) return self._config.GetString(key, all = all) class Branch(object): """Configuration options related to a single branch. """ def __init__(self, config, name): self._config = config self.name = name self.merge = self._Get('merge') r = self._Get('remote') if r: self.remote = self._config.GetRemote(r) else: self.remote = None @property def LocalMerge(self): """Convert the merge spec to a local name. """ if self.remote and self.merge: return self.remote.ToLocal(self.merge) return None def Save(self): """Save this branch back into the configuration. """ if self._config.HasSection('branch', self.name): if self.remote: self._Set('remote', self.remote.name) else: self._Set('remote', None) self._Set('merge', self.merge) else: fd = open(self._config.file, 'ab') try: fd.write('[branch "%s"]\n' % self.name) if self.remote: fd.write('\tremote = %s\n' % self.remote.name) if self.merge: fd.write('\tmerge = %s\n' % self.merge) finally: fd.close() def _Set(self, key, value): key = 'branch.%s.%s' % (self.name, key) return self._config.SetString(key, value) def _Get(self, key, all=False): key = 'branch.%s.%s' % (self.name, key) return self._config.GetString(key, all = all)
apache-2.0
SDSG-Invenio/invenio
invenio/modules/jsonalchemy/testsuite/functions/sync_authors.py
33
1722
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2014 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. from invenio.modules.jsonalchemy.jsonext.functions.util_merge_fields_info_list \ import util_merge_fields_info_list def sync_authors(self, field_name, connected_field, action): # pylint: disable=W0613 """Sync authors content only when `__setitem__` or similar is used""" if action == 'set': if field_name == 'authors' and self.get('authors'): self.__setitem__('_first_author', self['authors'][0], exclude=['connect']) if self['authors'][1:]: self.__setitem__('_additional_authors', self['authors'][1:], exclude=['connect']) elif field_name in ('_first_author', '_additional_authors'): self.__setitem__( 'authors', util_merge_fields_info_list(self, ['_first_author', '_additional_authors']), exclude=['connect'])
gpl-2.0
jkchen2/JshBot-plugins
data_converter/data_converter.py
1
2914
import discord from jshbot import utilities, data, configurations, plugins, logger from jshbot.exceptions import BotException, ConfiguredBotException from jshbot.commands import ( Command, SubCommand, Shortcut, ArgTypes, Attachment, Arg, Opt, MessageTypes, Response) __version__ = '0.1.0' CBException = ConfiguredBotException('0.3 to 0.4 plugin') @plugins.command_spawner def get_commands(bot): return [Command('convertdata', hidden=True, elevated_level=3)] async def get_response(bot, context): for guild in bot.guilds: convert_core(bot, guild) if 'tags.py' in bot.plugins: convert_tags(bot, guild) return Response("Converted.") def convert_core(bot, guild): if data.get(bot, 'core', None, guild_id=guild.id): logger.warn("Guild %s (%s) already had core converted", guild.name, guild.id) return base_data = data.get(bot, 'base', None, guild_id=guild.id, default={}) if 'disabled' in base_data: # TODO: Iterate through toggled commands pass if 'blocked' in base_data: replacement = [] for entry in base_data['blocked']: replacement.append(int(entry)) base_data['blocked'] = replacement if 'muted_channels' in base_data: replacement = [] for entry in base_data['muted_channels']: replacement.append(int(entry)) base_data['muted_channels'] = replacement if 'moderators' in base_data: del base_data['moderators'] if base_data: for key, value in base_data.items(): data.add(bot, 'core', key, value, guild_id=guild.id) data.remove(bot, 'base', None, guild_id=guild.id) def convert_tags(bot, guild): if not data.get(bot, 'tags.py', 'tags', guild_id=guild.id): logger.warn("Guild %s (%s) already had tags converted", guild.name, guild.id) return tags = data.get(bot, 'tags.py', 'tags', guild_id=guild.id, default={}) add_tag = bot.plugins['tags.py']._add_tag #key,value,length,volume,name,flags,author,hits,created,last_used,last_used_by,complex,extra for key, tag in tags.items(): to_insert = [ key, # key tag['value'], # value tag['length'], # length tag['volume'], # volume tag['name'], # name tag['flags'], # flags int(tag['author']), # author tag['hits'], # hits int(tag['created']), # created int(tag['last_used']), # last_used None, # last_used_by {}, # complex {} # extra ] add_tag(bot, to_insert, guild.id) data.remove(bot, 'tags.py', 'tags', guild_id=guild.id, safe=True)
mit
ShoRit/shipping-costs-sample
v2/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.py
328
32532
from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import text_type, binary_type from pip._vendor.six.moves import http_client, urllib import codecs import re from pip._vendor import webencodings from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase from .constants import ReparseException from . import _utils from io import StringIO try: from io import BytesIO except ImportError: BytesIO = StringIO # Non-unicode versions of constants for use in the pre-parser spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters]) asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters]) asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase]) spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"]) invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqa if _utils.supports_lone_surrogates: # Use one extra step of indirection and create surrogates with # eval. Not using this indirection would introduce an illegal # unicode literal on platforms not supporting such lone # surrogates. assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1 invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] + eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used "]") else: invalid_unicode_re = re.compile(invalid_unicode_no_surrogate) non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, 0x10FFFE, 0x10FFFF]) ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005B-\u0060\u007B-\u007E]") # Cache for charsUntil() charsUntilRegEx = {} class BufferedStream(object): """Buffering for streams that do not have buffering of their own The buffer is implemented as a list of chunks on the assumption that joining many strings will be slow since it is O(n**2) """ def __init__(self, stream): self.stream = stream self.buffer = [] self.position = [-1, 0] # chunk number, offset def tell(self): pos = 0 for chunk in self.buffer[:self.position[0]]: pos += len(chunk) pos += self.position[1] return pos def seek(self, pos): assert pos <= self._bufferedBytes() offset = pos i = 0 while len(self.buffer[i]) < offset: offset -= len(self.buffer[i]) i += 1 self.position = [i, offset] def read(self, bytes): if not self.buffer: return self._readStream(bytes) elif (self.position[0] == len(self.buffer) and self.position[1] == len(self.buffer[-1])): return self._readStream(bytes) else: return self._readFromBuffer(bytes) def _bufferedBytes(self): return sum([len(item) for item in self.buffer]) def _readStream(self, bytes): data = self.stream.read(bytes) self.buffer.append(data) self.position[0] += 1 self.position[1] = len(data) return data def _readFromBuffer(self, bytes): remainingBytes = bytes rv = [] bufferIndex = self.position[0] bufferOffset = self.position[1] while bufferIndex < len(self.buffer) and remainingBytes != 0: assert remainingBytes > 0 bufferedData = self.buffer[bufferIndex] if remainingBytes <= len(bufferedData) - bufferOffset: bytesToRead = remainingBytes self.position = [bufferIndex, bufferOffset + bytesToRead] else: bytesToRead = len(bufferedData) - bufferOffset self.position = [bufferIndex, len(bufferedData)] bufferIndex += 1 rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead]) remainingBytes -= bytesToRead bufferOffset = 0 if remainingBytes: rv.append(self._readStream(remainingBytes)) return b"".join(rv) def HTMLInputStream(source, **kwargs): # Work around Python bug #20007: read(0) closes the connection. # http://bugs.python.org/issue20007 if (isinstance(source, http_client.HTTPResponse) or # Also check for addinfourl wrapping HTTPResponse (isinstance(source, urllib.response.addbase) and isinstance(source.fp, http_client.HTTPResponse))): isUnicode = False elif hasattr(source, "read"): isUnicode = isinstance(source.read(0), text_type) else: isUnicode = isinstance(source, text_type) if isUnicode: encodings = [x for x in kwargs if x.endswith("_encoding")] if encodings: raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings) return HTMLUnicodeInputStream(source, **kwargs) else: return HTMLBinaryInputStream(source, **kwargs) class HTMLUnicodeInputStream(object): """Provides a unicode stream of characters to the HTMLTokenizer. This class takes care of character encoding and removing or replacing incorrect byte-sequences and also provides column and line tracking. """ _defaultChunkSize = 10240 def __init__(self, source): """Initialises the HTMLInputStream. HTMLInputStream(source, [encoding]) -> Normalized stream from source for use by html5lib. source can be either a file-object, local filename or a string. The optional encoding parameter must be a string that indicates the encoding. If specified, that encoding will be used, regardless of any BOM or later declaration (such as in a meta element) """ if not _utils.supports_lone_surrogates: # Such platforms will have already checked for such # surrogate errors, so no need to do this checking. self.reportCharacterErrors = None elif len("\U0010FFFF") == 1: self.reportCharacterErrors = self.characterErrorsUCS4 else: self.reportCharacterErrors = self.characterErrorsUCS2 # List of where new lines occur self.newLines = [0] self.charEncoding = (lookupEncoding("utf-8"), "certain") self.dataStream = self.openStream(source) self.reset() def reset(self): self.chunk = "" self.chunkSize = 0 self.chunkOffset = 0 self.errors = [] # number of (complete) lines in previous chunks self.prevNumLines = 0 # number of columns in the last line of the previous chunk self.prevNumCols = 0 # Deal with CR LF and surrogates split over chunk boundaries self._bufferedCharacter = None def openStream(self, source): """Produces a file object from source. source can be either a file object, local filename or a string. """ # Already a file object if hasattr(source, 'read'): stream = source else: stream = StringIO(source) return stream def _position(self, offset): chunk = self.chunk nLines = chunk.count('\n', 0, offset) positionLine = self.prevNumLines + nLines lastLinePos = chunk.rfind('\n', 0, offset) if lastLinePos == -1: positionColumn = self.prevNumCols + offset else: positionColumn = offset - (lastLinePos + 1) return (positionLine, positionColumn) def position(self): """Returns (line, col) of the current position in the stream.""" line, col = self._position(self.chunkOffset) return (line + 1, col) def char(self): """ Read one character from the stream or queue if available. Return EOF when EOF is reached. """ # Read a new chunk from the input stream if necessary if self.chunkOffset >= self.chunkSize: if not self.readChunk(): return EOF chunkOffset = self.chunkOffset char = self.chunk[chunkOffset] self.chunkOffset = chunkOffset + 1 return char def readChunk(self, chunkSize=None): if chunkSize is None: chunkSize = self._defaultChunkSize self.prevNumLines, self.prevNumCols = self._position(self.chunkSize) self.chunk = "" self.chunkSize = 0 self.chunkOffset = 0 data = self.dataStream.read(chunkSize) # Deal with CR LF and surrogates broken across chunks if self._bufferedCharacter: data = self._bufferedCharacter + data self._bufferedCharacter = None elif not data: # We have no more data, bye-bye stream return False if len(data) > 1: lastv = ord(data[-1]) if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF: self._bufferedCharacter = data[-1] data = data[:-1] if self.reportCharacterErrors: self.reportCharacterErrors(data) # Replace invalid characters data = data.replace("\r\n", "\n") data = data.replace("\r", "\n") self.chunk = data self.chunkSize = len(data) return True def characterErrorsUCS4(self, data): for _ in range(len(invalid_unicode_re.findall(data))): self.errors.append("invalid-codepoint") def characterErrorsUCS2(self, data): # Someone picked the wrong compile option # You lose skip = False for match in invalid_unicode_re.finditer(data): if skip: continue codepoint = ord(match.group()) pos = match.start() # Pretty sure there should be endianness issues here if _utils.isSurrogatePair(data[pos:pos + 2]): # We have a surrogate pair! char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2]) if char_val in non_bmp_invalid_codepoints: self.errors.append("invalid-codepoint") skip = True elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and pos == len(data) - 1): self.errors.append("invalid-codepoint") else: skip = False self.errors.append("invalid-codepoint") def charsUntil(self, characters, opposite=False): """ Returns a string of characters from the stream up to but not including any character in 'characters' or EOF. 'characters' must be a container that supports the 'in' method and iteration over its characters. """ # Use a cache of regexps to find the required characters try: chars = charsUntilRegEx[(characters, opposite)] except KeyError: if __debug__: for c in characters: assert(ord(c) < 128) regex = "".join(["\\x%02x" % ord(c) for c in characters]) if not opposite: regex = "^%s" % regex chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex) rv = [] while True: # Find the longest matching prefix m = chars.match(self.chunk, self.chunkOffset) if m is None: # If nothing matched, and it wasn't because we ran out of chunk, # then stop if self.chunkOffset != self.chunkSize: break else: end = m.end() # If not the whole chunk matched, return everything # up to the part that didn't match if end != self.chunkSize: rv.append(self.chunk[self.chunkOffset:end]) self.chunkOffset = end break # If the whole remainder of the chunk matched, # use it all and read the next chunk rv.append(self.chunk[self.chunkOffset:]) if not self.readChunk(): # Reached EOF break r = "".join(rv) return r def unget(self, char): # Only one character is allowed to be ungotten at once - it must # be consumed again before any further call to unget if char is not None: if self.chunkOffset == 0: # unget is called quite rarely, so it's a good idea to do # more work here if it saves a bit of work in the frequently # called char and charsUntil. # So, just prepend the ungotten character onto the current # chunk: self.chunk = char + self.chunk self.chunkSize += 1 else: self.chunkOffset -= 1 assert self.chunk[self.chunkOffset] == char class HTMLBinaryInputStream(HTMLUnicodeInputStream): """Provides a unicode stream of characters to the HTMLTokenizer. This class takes care of character encoding and removing or replacing incorrect byte-sequences and also provides column and line tracking. """ def __init__(self, source, override_encoding=None, transport_encoding=None, same_origin_parent_encoding=None, likely_encoding=None, default_encoding="windows-1252", useChardet=True): """Initialises the HTMLInputStream. HTMLInputStream(source, [encoding]) -> Normalized stream from source for use by html5lib. source can be either a file-object, local filename or a string. The optional encoding parameter must be a string that indicates the encoding. If specified, that encoding will be used, regardless of any BOM or later declaration (such as in a meta element) """ # Raw Stream - for unicode objects this will encode to utf-8 and set # self.charEncoding as appropriate self.rawStream = self.openStream(source) HTMLUnicodeInputStream.__init__(self, self.rawStream) # Encoding Information # Number of bytes to use when looking for a meta element with # encoding information self.numBytesMeta = 1024 # Number of bytes to use when using detecting encoding using chardet self.numBytesChardet = 100 # Things from args self.override_encoding = override_encoding self.transport_encoding = transport_encoding self.same_origin_parent_encoding = same_origin_parent_encoding self.likely_encoding = likely_encoding self.default_encoding = default_encoding # Determine encoding self.charEncoding = self.determineEncoding(useChardet) assert self.charEncoding[0] is not None # Call superclass self.reset() def reset(self): self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace') HTMLUnicodeInputStream.reset(self) def openStream(self, source): """Produces a file object from source. source can be either a file object, local filename or a string. """ # Already a file object if hasattr(source, 'read'): stream = source else: stream = BytesIO(source) try: stream.seek(stream.tell()) except: # pylint:disable=bare-except stream = BufferedStream(stream) return stream def determineEncoding(self, chardet=True): # BOMs take precedence over everything # This will also read past the BOM if present charEncoding = self.detectBOM(), "certain" if charEncoding[0] is not None: return charEncoding # If we've been overriden, we've been overriden charEncoding = lookupEncoding(self.override_encoding), "certain" if charEncoding[0] is not None: return charEncoding # Now check the transport layer charEncoding = lookupEncoding(self.transport_encoding), "certain" if charEncoding[0] is not None: return charEncoding # Look for meta elements with encoding information charEncoding = self.detectEncodingMeta(), "tentative" if charEncoding[0] is not None: return charEncoding # Parent document encoding charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative" if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"): return charEncoding # "likely" encoding charEncoding = lookupEncoding(self.likely_encoding), "tentative" if charEncoding[0] is not None: return charEncoding # Guess with chardet, if available if chardet: try: from chardet.universaldetector import UniversalDetector except ImportError: pass else: buffers = [] detector = UniversalDetector() while not detector.done: buffer = self.rawStream.read(self.numBytesChardet) assert isinstance(buffer, bytes) if not buffer: break buffers.append(buffer) detector.feed(buffer) detector.close() encoding = lookupEncoding(detector.result['encoding']) self.rawStream.seek(0) if encoding is not None: return encoding, "tentative" # Try the default encoding charEncoding = lookupEncoding(self.default_encoding), "tentative" if charEncoding[0] is not None: return charEncoding # Fallback to html5lib's default if even that hasn't worked return lookupEncoding("windows-1252"), "tentative" def changeEncoding(self, newEncoding): assert self.charEncoding[1] != "certain" newEncoding = lookupEncoding(newEncoding) if newEncoding is None: return if newEncoding.name in ("utf-16be", "utf-16le"): newEncoding = lookupEncoding("utf-8") assert newEncoding is not None elif newEncoding == self.charEncoding[0]: self.charEncoding = (self.charEncoding[0], "certain") else: self.rawStream.seek(0) self.charEncoding = (newEncoding, "certain") self.reset() raise ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding)) def detectBOM(self): """Attempts to detect at BOM at the start of the stream. If an encoding can be determined from the BOM return the name of the encoding otherwise return None""" bomDict = { codecs.BOM_UTF8: 'utf-8', codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be', codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be' } # Go to beginning of file and read in 4 bytes string = self.rawStream.read(4) assert isinstance(string, bytes) # Try detecting the BOM using bytes from the string encoding = bomDict.get(string[:3]) # UTF-8 seek = 3 if not encoding: # Need to detect UTF-32 before UTF-16 encoding = bomDict.get(string) # UTF-32 seek = 4 if not encoding: encoding = bomDict.get(string[:2]) # UTF-16 seek = 2 # Set the read position past the BOM if one was found, otherwise # set it to the start of the stream if encoding: self.rawStream.seek(seek) return lookupEncoding(encoding) else: self.rawStream.seek(0) return None def detectEncodingMeta(self): """Report the encoding declared by the meta element """ buffer = self.rawStream.read(self.numBytesMeta) assert isinstance(buffer, bytes) parser = EncodingParser(buffer) self.rawStream.seek(0) encoding = parser.getEncoding() if encoding is not None and encoding.name in ("utf-16be", "utf-16le"): encoding = lookupEncoding("utf-8") return encoding class EncodingBytes(bytes): """String-like object with an associated position and various extra methods If the position is ever greater than the string length then an exception is raised""" def __new__(self, value): assert isinstance(value, bytes) return bytes.__new__(self, value.lower()) def __init__(self, value): # pylint:disable=unused-argument self._position = -1 def __iter__(self): return self def __next__(self): p = self._position = self._position + 1 if p >= len(self): raise StopIteration elif p < 0: raise TypeError return self[p:p + 1] def next(self): # Py2 compat return self.__next__() def previous(self): p = self._position if p >= len(self): raise StopIteration elif p < 0: raise TypeError self._position = p = p - 1 return self[p:p + 1] def setPosition(self, position): if self._position >= len(self): raise StopIteration self._position = position def getPosition(self): if self._position >= len(self): raise StopIteration if self._position >= 0: return self._position else: return None position = property(getPosition, setPosition) def getCurrentByte(self): return self[self.position:self.position + 1] currentByte = property(getCurrentByte) def skip(self, chars=spaceCharactersBytes): """Skip past a list of characters""" p = self.position # use property for the error-checking while p < len(self): c = self[p:p + 1] if c not in chars: self._position = p return c p += 1 self._position = p return None def skipUntil(self, chars): p = self.position while p < len(self): c = self[p:p + 1] if c in chars: self._position = p return c p += 1 self._position = p return None def matchBytes(self, bytes): """Look for a sequence of bytes at the start of a string. If the bytes are found return True and advance the position to the byte after the match. Otherwise return False and leave the position alone""" p = self.position data = self[p:p + len(bytes)] rv = data.startswith(bytes) if rv: self.position += len(bytes) return rv def jumpTo(self, bytes): """Look for the next sequence of bytes matching a given sequence. If a match is found advance the position to the last byte of the match""" newPosition = self[self.position:].find(bytes) if newPosition > -1: # XXX: This is ugly, but I can't see a nicer way to fix this. if self._position == -1: self._position = 0 self._position += (newPosition + len(bytes) - 1) return True else: raise StopIteration class EncodingParser(object): """Mini parser for detecting character encoding from meta elements""" def __init__(self, data): """string - the data to work on for encoding detection""" self.data = EncodingBytes(data) self.encoding = None def getEncoding(self): methodDispatch = ( (b"<!--", self.handleComment), (b"<meta", self.handleMeta), (b"</", self.handlePossibleEndTag), (b"<!", self.handleOther), (b"<?", self.handleOther), (b"<", self.handlePossibleStartTag)) for _ in self.data: keepParsing = True for key, method in methodDispatch: if self.data.matchBytes(key): try: keepParsing = method() break except StopIteration: keepParsing = False break if not keepParsing: break return self.encoding def handleComment(self): """Skip over comments""" return self.data.jumpTo(b"-->") def handleMeta(self): if self.data.currentByte not in spaceCharactersBytes: # if we have <meta not followed by a space so just keep going return True # We have a valid meta element we want to search for attributes hasPragma = False pendingEncoding = None while True: # Try to find the next attribute after the current position attr = self.getAttribute() if attr is None: return True else: if attr[0] == b"http-equiv": hasPragma = attr[1] == b"content-type" if hasPragma and pendingEncoding is not None: self.encoding = pendingEncoding return False elif attr[0] == b"charset": tentativeEncoding = attr[1] codec = lookupEncoding(tentativeEncoding) if codec is not None: self.encoding = codec return False elif attr[0] == b"content": contentParser = ContentAttrParser(EncodingBytes(attr[1])) tentativeEncoding = contentParser.parse() if tentativeEncoding is not None: codec = lookupEncoding(tentativeEncoding) if codec is not None: if hasPragma: self.encoding = codec return False else: pendingEncoding = codec def handlePossibleStartTag(self): return self.handlePossibleTag(False) def handlePossibleEndTag(self): next(self.data) return self.handlePossibleTag(True) def handlePossibleTag(self, endTag): data = self.data if data.currentByte not in asciiLettersBytes: # If the next byte is not an ascii letter either ignore this # fragment (possible start tag case) or treat it according to # handleOther if endTag: data.previous() self.handleOther() return True c = data.skipUntil(spacesAngleBrackets) if c == b"<": # return to the first step in the overall "two step" algorithm # reprocessing the < byte data.previous() else: # Read all attributes attr = self.getAttribute() while attr is not None: attr = self.getAttribute() return True def handleOther(self): return self.data.jumpTo(b">") def getAttribute(self): """Return a name,value pair for the next attribute in the stream, if one is found, or None""" data = self.data # Step 1 (skip chars) c = data.skip(spaceCharactersBytes | frozenset([b"/"])) assert c is None or len(c) == 1 # Step 2 if c in (b">", None): return None # Step 3 attrName = [] attrValue = [] # Step 4 attribute name while True: if c == b"=" and attrName: break elif c in spaceCharactersBytes: # Step 6! c = data.skip() break elif c in (b"/", b">"): return b"".join(attrName), b"" elif c in asciiUppercaseBytes: attrName.append(c.lower()) elif c is None: return None else: attrName.append(c) # Step 5 c = next(data) # Step 7 if c != b"=": data.previous() return b"".join(attrName), b"" # Step 8 next(data) # Step 9 c = data.skip() # Step 10 if c in (b"'", b'"'): # 10.1 quoteChar = c while True: # 10.2 c = next(data) # 10.3 if c == quoteChar: next(data) return b"".join(attrName), b"".join(attrValue) # 10.4 elif c in asciiUppercaseBytes: attrValue.append(c.lower()) # 10.5 else: attrValue.append(c) elif c == b">": return b"".join(attrName), b"" elif c in asciiUppercaseBytes: attrValue.append(c.lower()) elif c is None: return None else: attrValue.append(c) # Step 11 while True: c = next(data) if c in spacesAngleBrackets: return b"".join(attrName), b"".join(attrValue) elif c in asciiUppercaseBytes: attrValue.append(c.lower()) elif c is None: return None else: attrValue.append(c) class ContentAttrParser(object): def __init__(self, data): assert isinstance(data, bytes) self.data = data def parse(self): try: # Check if the attr name is charset # otherwise return self.data.jumpTo(b"charset") self.data.position += 1 self.data.skip() if not self.data.currentByte == b"=": # If there is no = sign keep looking for attrs return None self.data.position += 1 self.data.skip() # Look for an encoding between matching quote marks if self.data.currentByte in (b'"', b"'"): quoteMark = self.data.currentByte self.data.position += 1 oldPosition = self.data.position if self.data.jumpTo(quoteMark): return self.data[oldPosition:self.data.position] else: return None else: # Unquoted value oldPosition = self.data.position try: self.data.skipUntil(spaceCharactersBytes) return self.data[oldPosition:self.data.position] except StopIteration: # Return the whole remaining value return self.data[oldPosition:] except StopIteration: return None def lookupEncoding(encoding): """Return the python codec name corresponding to an encoding or None if the string doesn't correspond to a valid encoding.""" if isinstance(encoding, binary_type): try: encoding = encoding.decode("ascii") except UnicodeDecodeError: return None if encoding is not None: try: return webencodings.lookup(encoding) except AttributeError: return None else: return None
apache-2.0
Matt-Deacalion/django
django/core/files/uploadhandler.py
335
6876
""" Base file upload handler classes, and the built-in concrete subclasses """ from __future__ import unicode_literals from io import BytesIO from django.conf import settings from django.core.files.uploadedfile import ( InMemoryUploadedFile, TemporaryUploadedFile, ) from django.utils.encoding import python_2_unicode_compatible from django.utils.module_loading import import_string __all__ = [ 'UploadFileException', 'StopUpload', 'SkipFile', 'FileUploadHandler', 'TemporaryFileUploadHandler', 'MemoryFileUploadHandler', 'load_handler', 'StopFutureHandlers' ] class UploadFileException(Exception): """ Any error having to do with uploading files. """ pass @python_2_unicode_compatible class StopUpload(UploadFileException): """ This exception is raised when an upload must abort. """ def __init__(self, connection_reset=False): """ If ``connection_reset`` is ``True``, Django knows will halt the upload without consuming the rest of the upload. This will cause the browser to show a "connection reset" error. """ self.connection_reset = connection_reset def __str__(self): if self.connection_reset: return 'StopUpload: Halt current upload.' else: return 'StopUpload: Consume request data, then halt.' class SkipFile(UploadFileException): """ This exception is raised by an upload handler that wants to skip a given file. """ pass class StopFutureHandlers(UploadFileException): """ Upload handers that have handled a file and do not want future handlers to run should raise this exception instead of returning None. """ pass class FileUploadHandler(object): """ Base class for streaming upload handlers. """ chunk_size = 64 * 2 ** 10 # : The default chunk size is 64 KB. def __init__(self, request=None): self.file_name = None self.content_type = None self.content_length = None self.charset = None self.content_type_extra = None self.request = request def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None): """ Handle the raw input from the client. Parameters: :input_data: An object that supports reading via .read(). :META: ``request.META``. :content_length: The (integer) value of the Content-Length header from the client. :boundary: The boundary from the Content-Type header. Be sure to prepend two '--'. """ pass def new_file(self, field_name, file_name, content_type, content_length, charset=None, content_type_extra=None): """ Signal that a new file has been started. Warning: As with any data from the client, you should not trust content_length (and sometimes won't even get it). """ self.field_name = field_name self.file_name = file_name self.content_type = content_type self.content_length = content_length self.charset = charset self.content_type_extra = content_type_extra def receive_data_chunk(self, raw_data, start): """ Receive data from the streamed upload parser. ``start`` is the position in the file of the chunk. """ raise NotImplementedError('subclasses of FileUploadHandler must provide a receive_data_chunk() method') def file_complete(self, file_size): """ Signal that a file has completed. File size corresponds to the actual size accumulated by all the chunks. Subclasses should return a valid ``UploadedFile`` object. """ raise NotImplementedError('subclasses of FileUploadHandler must provide a file_complete() method') def upload_complete(self): """ Signal that the upload is complete. Subclasses should perform cleanup that is necessary for this handler. """ pass class TemporaryFileUploadHandler(FileUploadHandler): """ Upload handler that streams data into a temporary file. """ def __init__(self, *args, **kwargs): super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs) def new_file(self, *args, **kwargs): """ Create the file object to append to as data is coming in. """ super(TemporaryFileUploadHandler, self).new_file(*args, **kwargs) self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra) def receive_data_chunk(self, raw_data, start): self.file.write(raw_data) def file_complete(self, file_size): self.file.seek(0) self.file.size = file_size return self.file class MemoryFileUploadHandler(FileUploadHandler): """ File upload handler to stream uploads into memory (used for small files). """ def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None): """ Use the content_length to signal whether or not this handler should be in use. """ # Check the content-length header to see if we should # If the post is too large, we cannot use the Memory handler. if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE: self.activated = False else: self.activated = True def new_file(self, *args, **kwargs): super(MemoryFileUploadHandler, self).new_file(*args, **kwargs) if self.activated: self.file = BytesIO() raise StopFutureHandlers() def receive_data_chunk(self, raw_data, start): """ Add the data to the BytesIO file. """ if self.activated: self.file.write(raw_data) else: return raw_data def file_complete(self, file_size): """ Return a file object if we're activated. """ if not self.activated: return self.file.seek(0) return InMemoryUploadedFile( file=self.file, field_name=self.field_name, name=self.file_name, content_type=self.content_type, size=file_size, charset=self.charset, content_type_extra=self.content_type_extra ) def load_handler(path, *args, **kwargs): """ Given a path to a handler, return an instance of that handler. E.g.:: >>> from django.http import HttpRequest >>> request = HttpRequest() >>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request) <TemporaryFileUploadHandler object at 0x...> """ return import_string(path)(*args, **kwargs)
bsd-3-clause
s-i-newton/clang-hook
tests/testFilter.py
1
1183
from unittest import TestCase from lib_hook.filter import Filter from lib_hook.stage import Stage class TestFilter(TestCase): def testFilter(self): data1 = \ { "name": "test", "pattern": "(?<=Hello: )[A-Za-z_][A-Za-z_0-9]*", "stages": ["opt"], "type": "string", "mode": "lookaround", "summary": "append", } data2 = \ { "name": "test", "pattern": "Hello: (?P<bla>[0-9]+)", "stages": ["opt"], "type": "int", "mode": "group", "group": "bla", "summary": "append", } f1 = Filter(data1) f2 = Filter(data2) t = [(f1, "Hello: bla", "bla"), (f1, "Hello: bla truc", "bla"), (f1, "Hello: 123", None), (f2, "Hello: bla", None), (f2, "Hello: 123", 123), (f2, "Hello: 123 456", 123), ] for (f, text, response) in t: match = f.search(text, Stage.Opt) self.assertEqual(match, response)
apache-2.0
mach0/QGIS
tests/src/python/test_qgsserver_locale_override.py
45
2755
# -*- coding: utf-8 -*- """QGIS Unit tests for QgsServer Locale Override Options. From build dir, run: ctest -R PyQgsServerLocaleOverride -V .. note:: This test needs env vars to be set before the server is configured for the first time, for this reason it cannot run as a test case of another server test. .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = 'Alessandro Pasotti' __date__ = '01/04/2019' __copyright__ = 'Copyright 2019, The QGIS Project' import os # Needed on Qt 5 so that the serialization of XML is consistent among all # executions os.environ['QT_HASH_SEED'] = '1' from utilities import ( unitTestDataPath, ) from qgis.testing import unittest from test_qgsserver_wms import TestQgsServerWMSTestBase from qgis.core import QgsProject, QgsFontUtils from qgis.server import QgsServer class TestQgsServerWMSLocaleOverride(TestQgsServerWMSTestBase): """QGIS Server WMS Tests for GetFeatureInfo request""" # Set to True to re-generate reference files for this class regenerate_reference = False def setUp(self): """Create the server instance""" self.fontFamily = QgsFontUtils.standardTestFontFamily() QgsFontUtils.loadStandardTestFonts(['All']) self.testdata_path = unitTestDataPath('qgis_server') + '/' d = unitTestDataPath('qgis_server') + '/' self.projectPath = os.path.join(d, "project.qgs") # Clean env just to be sure env_vars = ['QUERY_STRING', 'QGIS_PROJECT_FILE'] for ev in env_vars: try: del os.environ[ev] except KeyError: pass os.environ['QGIS_SERVER_OVERRIDE_SYSTEM_LOCALE'] = 'EN_us' os.environ['QGIS_SERVER_SHOW_GROUP_SEPARATOR'] = '0' self.server = QgsServer() def testGetFeatureInfoThousandSeparator(self): self.wms_request_compare('GetFeatureInfo', '&layers=testlayer_thousands&styles=&' + 'info_format=text%2Fxml&transparent=true&' + 'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' + '5606005.488876367%2C913235.426296057%2C5606035.347090538&' + 'query_layers=testlayer_thousands&X=190&Y=320', 'wms_getfeatureinfo-thousands-text-xml', project='test_project_gfi_thousands.qgs',) if __name__ == '__main__': unittest.main()
gpl-2.0
a9261/flexx
flexx/ui/_label.py
20
1517
""" Simple example: .. UIExample:: 50 label = ui.Label(text='This is a label') Interactive example: .. UIExample:: 50 from flexx import app, ui, react class Example(ui.Widget): def init(self): with ui.HBox(): self.but = ui.Button(text='Push me') self.label = ui.Label(flex=1, text='This is a label. ') class JS: @react.connect('but.mouse_down') def _add_label_text(self, down): if down: self.label.text(self.label.text() + 'Yes it is. ') """ from .. import react from . import Widget class Label(Widget): """ Widget to show text/html. """ CSS = ".flx-label { border: 0px solid #454; }" @react.input def text(v=''): """ The text on the label. """ # todo: use react.check_str() or something? if not isinstance(v, str): raise ValueError('Text input must be a string.') return v class JS: def _js_create_node(self): # todo: allow setting a placeholder DOM element, or any widget parent this.node = document.createElement('div') #this.node.className = this.cssClassName flexx.get('body').appendChild(this.node); # this.node.innerHTML = 'a label' # super()._init() @react.connect('text') def _text_changed(self, text): this.node.innerHTML = text
bsd-2-clause
msimacek/samba
python/samba/tests/auth.py
43
1048
# Unix SMB/CIFS implementation. # Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2007 # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """Tests for the Auth Python bindings. Note that this just tests the bindings work. It does not intend to test the functionality, that's already done in other tests. """ from samba import auth import samba.tests class AuthTests(samba.tests.TestCase): def test_system_session(self): auth.system_session()
gpl-3.0
m4ns0ur/grumpy
third_party/stdlib/rfc822.py
5
33558
"""RFC 2822 message manipulation. Note: This is only a very rough sketch of a full RFC-822 parser; in particular the tokenizing of addresses does not adhere to all the quoting rules. Note: RFC 2822 is a long awaited update to RFC 822. This module should conform to RFC 2822, and is thus mis-named (it's not worth renaming it). Some effort at RFC 2822 updates have been made, but a thorough audit has not been performed. Consider any RFC 2822 non-conformance to be a bug. RFC 2822: http://www.faqs.org/rfcs/rfc2822.html RFC 822 : http://www.faqs.org/rfcs/rfc822.html (obsolete) Directions for use: To create a Message object: first open a file, e.g.: fp = open(file, 'r') You can use any other legal way of getting an open file object, e.g. use sys.stdin or call os.popen(). Then pass the open file object to the Message() constructor: m = Message(fp) This class can work with any input object that supports a readline method. If the input object has seek and tell capability, the rewindbody method will work; also illegal lines will be pushed back onto the input stream. If the input object lacks seek but has an `unread' method that can push back a line of input, Message will use that to push back illegal lines. Thus this class can be used to parse messages coming from a buffered stream. The optional `seekable' argument is provided as a workaround for certain stdio libraries in which tell() discards buffered data before discovering that the lseek() system call doesn't work. For maximum portability, you should set the seekable argument to zero to prevent that initial \code{tell} when passing in an unseekable object such as a file object created from a socket object. If it is 1 on entry -- which it is by default -- the tell() method of the open file object is called once; if this raises an exception, seekable is reset to 0. For other nonzero values of seekable, this test is not made. To get the text of a particular header there are several methods: str = m.getheader(name) str = m.getrawheader(name) where name is the name of the header, e.g. 'Subject'. The difference is that getheader() strips the leading and trailing whitespace, while getrawheader() doesn't. Both functions retain embedded whitespace (including newlines) exactly as they are specified in the header, and leave the case of the text unchanged. For addresses and address lists there are functions realname, mailaddress = m.getaddr(name) list = m.getaddrlist(name) where the latter returns a list of (realname, mailaddr) tuples. There is also a method time = m.getdate(name) which parses a Date-like field and returns a time-compatible tuple, i.e. a tuple such as returned by time.localtime() or accepted by time.mktime(). See the class definition for lower level access methods. There are also some utility functions here. """ # Cleanup and extensions by Eric S. Raymond <esr@thyrsus.com> import time from warnings import warnpy3k warnpy3k("in 3.x, rfc822 has been removed in favor of the email package", stacklevel=2) __all__ = ["Message","AddressList","parsedate","parsedate_tz","mktime_tz"] _blanklines = ('\r\n', '\n') # Optimization for islast() class Message(object): """Represents a single RFC 2822-compliant message.""" def __init__(self, fp, seekable = 1): """Initialize the class instance and read the headers.""" if seekable == 1: # Exercise tell() to make sure it works # (and then assume seek() works, too) try: fp.tell() except (AttributeError, IOError): seekable = 0 self.fp = fp self.seekable = seekable self.startofheaders = None self.startofbody = None # if self.seekable: try: self.startofheaders = self.fp.tell() except IOError: self.seekable = 0 # self.readheaders() # if self.seekable: try: self.startofbody = self.fp.tell() except IOError: self.seekable = 0 def rewindbody(self): """Rewind the file to the start of the body (if seekable).""" if not self.seekable: raise IOError, "unseekable file" self.fp.seek(self.startofbody) def readheaders(self): """Read header lines. Read header lines up to the entirely blank line that terminates them. The (normally blank) line that ends the headers is skipped, but not included in the returned list. If a non-header line ends the headers, (which is an error), an attempt is made to backspace over it; it is never included in the returned list. The variable self.status is set to the empty string if all went well, otherwise it is an error message. The variable self.headers is a completely uninterpreted list of lines contained in the header (so printing them will reproduce the header exactly as it appears in the file). """ self.dict = {} self.unixfrom = '' self.headers = lst = [] self.status = '' headerseen = "" firstline = 1 startofline = unread = tell = None if hasattr(self.fp, 'unread'): unread = self.fp.unread elif self.seekable: tell = self.fp.tell while 1: if tell: try: startofline = tell() except IOError: startofline = tell = None self.seekable = 0 line = self.fp.readline() if not line: self.status = 'EOF in headers' break # Skip unix From name time lines if firstline and line.startswith('From '): self.unixfrom = self.unixfrom + line continue firstline = 0 if headerseen and line[0] in ' \t': # It's a continuation line. lst.append(line) x = (self.dict[headerseen] + "\n " + line.strip()) self.dict[headerseen] = x.strip() continue elif self.iscomment(line): # It's a comment. Ignore it. continue elif self.islast(line): # Note! No pushback here! The delimiter line gets eaten. break headerseen = self.isheader(line) if headerseen: # It's a legal header line, save it. lst.append(line) self.dict[headerseen] = line[len(headerseen)+1:].strip() continue elif headerseen is not None: # An empty header name. These aren't allowed in HTTP, but it's # probably a benign mistake. Don't add the header, just keep # going. continue else: # It's not a header line; throw it back and stop here. if not self.dict: self.status = 'No headers' else: self.status = 'Non-header line where header expected' # Try to undo the read. if unread: unread(line) elif tell: self.fp.seek(startofline) else: self.status = self.status + '; bad seek' break def isheader(self, line): """Determine whether a given line is a legal header. This method should return the header name, suitably canonicalized. You may override this method in order to use Message parsing on tagged data in RFC 2822-like formats with special header formats. """ i = line.find(':') if i > -1: return line[:i].lower() return None def islast(self, line): """Determine whether a line is a legal end of RFC 2822 headers. You may override this method if your application wants to bend the rules, e.g. to strip trailing whitespace, or to recognize MH template separators ('--------'). For convenience (e.g. for code reading from sockets) a line consisting of \\r\\n also matches. """ return line in _blanklines def iscomment(self, line): """Determine whether a line should be skipped entirely. You may override this method in order to use Message parsing on tagged data in RFC 2822-like formats that support embedded comments or free-text data. """ return False def getallmatchingheaders(self, name): """Find all header lines matching a given header name. Look through the list of headers and find all lines matching a given header name (and their continuation lines). A list of the lines is returned, without interpretation. If the header does not occur, an empty list is returned. If the header occurs multiple times, all occurrences are returned. Case is not important in the header name. """ name = name.lower() + ':' n = len(name) lst = [] hit = 0 for line in self.headers: if line[:n].lower() == name: hit = 1 elif not line[:1].isspace(): hit = 0 if hit: lst.append(line) return lst def getfirstmatchingheader(self, name): """Get the first header line matching name. This is similar to getallmatchingheaders, but it returns only the first matching header (and its continuation lines). """ name = name.lower() + ':' n = len(name) lst = [] hit = 0 for line in self.headers: if hit: if not line[:1].isspace(): break elif line[:n].lower() == name: hit = 1 if hit: lst.append(line) return lst def getrawheader(self, name): """A higher-level interface to getfirstmatchingheader(). Return a string containing the literal text of the header but with the keyword stripped. All leading, trailing and embedded whitespace is kept in the string, however. Return None if the header does not occur. """ lst = self.getfirstmatchingheader(name) if not lst: return None lst[0] = lst[0][len(name) + 1:] return ''.join(lst) def getheader(self, name, default=None): """Get the header value for a name. This is the normal interface: it returns a stripped version of the header value for a given header name, or None if it doesn't exist. This uses the dictionary version which finds the *last* such header. """ return self.dict.get(name.lower(), default) get = getheader def getheaders(self, name): """Get all values for a header. This returns a list of values for headers given more than once; each value in the result list is stripped in the same way as the result of getheader(). If the header is not given, return an empty list. """ result = [] current = '' have_header = 0 for s in self.getallmatchingheaders(name): if s[0].isspace(): if current: current = "%s\n %s" % (current, s.strip()) else: current = s.strip() else: if have_header: result.append(current) current = s[s.find(":") + 1:].strip() have_header = 1 if have_header: result.append(current) return result def getaddr(self, name): """Get a single address from a header, as a tuple. An example return value: ('Guido van Rossum', 'guido@cwi.nl') """ # New, by Ben Escoto alist = self.getaddrlist(name) if alist: return alist[0] else: return (None, None) def getaddrlist(self, name): """Get a list of addresses from a header. Retrieves a list of addresses from a header, where each address is a tuple as returned by getaddr(). Scans all named headers, so it works properly with multiple To: or Cc: headers for example. """ raw = [] for h in self.getallmatchingheaders(name): if h[0] in ' \t': raw.append(h) else: if raw: raw.append(', ') i = h.find(':') if i > 0: addr = h[i+1:] raw.append(addr) alladdrs = ''.join(raw) a = AddressList(alladdrs) return a.addresslist def getdate(self, name): """Retrieve a date field from a header. Retrieves a date field from the named header, returning a tuple compatible with time.mktime(). """ try: data = self[name] except KeyError: return None return parsedate(data) def getdate_tz(self, name): """Retrieve a date field from a header as a 10-tuple. The first 9 elements make up a tuple compatible with time.mktime(), and the 10th is the offset of the poster's time zone from GMT/UTC. """ try: data = self[name] except KeyError: return None return parsedate_tz(data) # Access as a dictionary (only finds *last* header of each type): def __len__(self): """Get the number of headers in a message.""" return len(self.dict) def __getitem__(self, name): """Get a specific header, as from a dictionary.""" return self.dict[name.lower()] def __setitem__(self, name, value): """Set the value of a header. Note: This is not a perfect inversion of __getitem__, because any changed headers get stuck at the end of the raw-headers list rather than where the altered header was. """ del self[name] # Won't fail if it doesn't exist self.dict[name.lower()] = value text = name + ": " + value for line in text.split("\n"): self.headers.append(line + "\n") def __delitem__(self, name): """Delete all occurrences of a specific header, if it is present.""" name = name.lower() if not name in self.dict: return del self.dict[name] name = name + ':' n = len(name) lst = [] hit = 0 for i in range(len(self.headers)): line = self.headers[i] if line[:n].lower() == name: hit = 1 elif not line[:1].isspace(): hit = 0 if hit: lst.append(i) for i in reversed(lst): del self.headers[i] def setdefault(self, name, default=""): lowername = name.lower() if lowername in self.dict: return self.dict[lowername] else: text = name + ": " + default for line in text.split("\n"): self.headers.append(line + "\n") self.dict[lowername] = default return default def has_key(self, name): """Determine whether a message contains the named header.""" return name.lower() in self.dict def __contains__(self, name): """Determine whether a message contains the named header.""" return name.lower() in self.dict def __iter__(self): return iter(self.dict) def keys(self): """Get all of a message's header field names.""" return self.dict.keys() def values(self): """Get all of a message's header field values.""" return self.dict.values() def items(self): """Get all of a message's headers. Returns a list of name, value tuples. """ return self.dict.items() def __str__(self): return ''.join(self.headers) # Utility functions # ----------------- # XXX Should fix unquote() and quote() to be really conformant. # XXX The inverses of the parse functions may also be useful. def unquote(s): """Remove quotes from a string.""" if len(s) > 1: if s.startswith('"') and s.endswith('"'): return s[1:-1].replace('\\\\', '\\').replace('\\"', '"') if s.startswith('<') and s.endswith('>'): return s[1:-1] return s def quote(s): """Add quotes around a string.""" return s.replace('\\', '\\\\').replace('"', '\\"') def parseaddr(address): """Parse an address into a (realname, mailaddr) tuple.""" a = AddressList(address) lst = a.addresslist if not lst: return (None, None) return lst[0] class AddrlistClass(object): """Address parser class by Ben Escoto. To understand what this class does, it helps to have a copy of RFC 2822 in front of you. http://www.faqs.org/rfcs/rfc2822.html Note: this class interface is deprecated and may be removed in the future. Use rfc822.AddressList instead. """ def __init__(self, field): """Initialize a new instance. `field' is an unparsed address header field, containing one or more addresses. """ self.specials = '()<>@,:;.\"[]' self.pos = 0 self.LWS = ' \t' self.CR = '\r\n' self.atomends = self.specials + self.LWS + self.CR # Note that RFC 2822 now specifies `.' as obs-phrase, meaning that it # is obsolete syntax. RFC 2822 requires that we recognize obsolete # syntax, so allow dots in phrases. self.phraseends = self.atomends.replace('.', '') self.field = field self.commentlist = [] def gotonext(self): """Parse up to the start of the next address.""" while self.pos < len(self.field): if self.field[self.pos] in self.LWS + '\n\r': self.pos = self.pos + 1 elif self.field[self.pos] == '(': self.commentlist.append(self.getcomment()) else: break def getaddrlist(self): """Parse all addresses. Returns a list containing all of the addresses. """ result = [] ad = self.getaddress() while ad: result += ad ad = self.getaddress() return result def getaddress(self): """Parse the next address.""" self.commentlist = [] self.gotonext() oldpos = self.pos oldcl = self.commentlist plist = self.getphraselist() self.gotonext() returnlist = [] if self.pos >= len(self.field): # Bad email address technically, no domain. if plist: returnlist = [(' '.join(self.commentlist), plist[0])] elif self.field[self.pos] in '.@': # email address is just an addrspec # this isn't very efficient since we start over self.pos = oldpos self.commentlist = oldcl addrspec = self.getaddrspec() returnlist = [(' '.join(self.commentlist), addrspec)] elif self.field[self.pos] == ':': # address is a group returnlist = [] fieldlen = len(self.field) self.pos += 1 while self.pos < len(self.field): self.gotonext() if self.pos < fieldlen and self.field[self.pos] == ';': self.pos += 1 break returnlist = returnlist + self.getaddress() elif self.field[self.pos] == '<': # Address is a phrase then a route addr routeaddr = self.getrouteaddr() if self.commentlist: returnlist = [(' '.join(plist) + ' (' + \ ' '.join(self.commentlist) + ')', routeaddr)] else: returnlist = [(' '.join(plist), routeaddr)] else: if plist: returnlist = [(' '.join(self.commentlist), plist[0])] elif self.field[self.pos] in self.specials: self.pos += 1 self.gotonext() if self.pos < len(self.field) and self.field[self.pos] == ',': self.pos += 1 return returnlist def getrouteaddr(self): """Parse a route address (Return-path value). This method just skips all the route stuff and returns the addrspec. """ if self.field[self.pos] != '<': return expectroute = 0 self.pos += 1 self.gotonext() adlist = "" while self.pos < len(self.field): if expectroute: self.getdomain() expectroute = 0 elif self.field[self.pos] == '>': self.pos += 1 break elif self.field[self.pos] == '@': self.pos += 1 expectroute = 1 elif self.field[self.pos] == ':': self.pos += 1 else: adlist = self.getaddrspec() self.pos += 1 break self.gotonext() return adlist def getaddrspec(self): """Parse an RFC 2822 addr-spec.""" aslist = [] self.gotonext() while self.pos < len(self.field): if self.field[self.pos] == '.': aslist.append('.') self.pos += 1 elif self.field[self.pos] == '"': aslist.append('"%s"' % self.getquote()) elif self.field[self.pos] in self.atomends: break else: aslist.append(self.getatom()) self.gotonext() if self.pos >= len(self.field) or self.field[self.pos] != '@': return ''.join(aslist) aslist.append('@') self.pos += 1 self.gotonext() return ''.join(aslist) + self.getdomain() def getdomain(self): """Get the complete domain name from an address.""" sdlist = [] while self.pos < len(self.field): if self.field[self.pos] in self.LWS: self.pos += 1 elif self.field[self.pos] == '(': self.commentlist.append(self.getcomment()) elif self.field[self.pos] == '[': sdlist.append(self.getdomainliteral()) elif self.field[self.pos] == '.': self.pos += 1 sdlist.append('.') elif self.field[self.pos] in self.atomends: break else: sdlist.append(self.getatom()) return ''.join(sdlist) def getdelimited(self, beginchar, endchars, allowcomments = 1): """Parse a header fragment delimited by special characters. `beginchar' is the start character for the fragment. If self is not looking at an instance of `beginchar' then getdelimited returns the empty string. `endchars' is a sequence of allowable end-delimiting characters. Parsing stops when one of these is encountered. If `allowcomments' is non-zero, embedded RFC 2822 comments are allowed within the parsed fragment. """ if self.field[self.pos] != beginchar: return '' slist = [''] quote = 0 self.pos += 1 while self.pos < len(self.field): if quote == 1: slist.append(self.field[self.pos]) quote = 0 elif self.field[self.pos] in endchars: self.pos += 1 break elif allowcomments and self.field[self.pos] == '(': slist.append(self.getcomment()) continue # have already advanced pos from getcomment elif self.field[self.pos] == '\\': quote = 1 else: slist.append(self.field[self.pos]) self.pos += 1 return ''.join(slist) def getquote(self): """Get a quote-delimited fragment from self's field.""" return self.getdelimited('"', '"\r', 0) def getcomment(self): """Get a parenthesis-delimited fragment from self's field.""" return self.getdelimited('(', ')\r', 1) def getdomainliteral(self): """Parse an RFC 2822 domain-literal.""" return '[%s]' % self.getdelimited('[', ']\r', 0) def getatom(self, atomends=None): """Parse an RFC 2822 atom. Optional atomends specifies a different set of end token delimiters (the default is to use self.atomends). This is used e.g. in getphraselist() since phrase endings must not include the `.' (which is legal in phrases).""" atomlist = [''] if atomends is None: atomends = self.atomends while self.pos < len(self.field): if self.field[self.pos] in atomends: break else: atomlist.append(self.field[self.pos]) self.pos += 1 return ''.join(atomlist) def getphraselist(self): """Parse a sequence of RFC 2822 phrases. A phrase is a sequence of words, which are in turn either RFC 2822 atoms or quoted-strings. Phrases are canonicalized by squeezing all runs of continuous whitespace into one space. """ plist = [] while self.pos < len(self.field): if self.field[self.pos] in self.LWS: self.pos += 1 elif self.field[self.pos] == '"': plist.append(self.getquote()) elif self.field[self.pos] == '(': self.commentlist.append(self.getcomment()) elif self.field[self.pos] in self.phraseends: break else: plist.append(self.getatom(self.phraseends)) return plist class AddressList(AddrlistClass): """An AddressList encapsulates a list of parsed RFC 2822 addresses.""" def __init__(self, field): AddrlistClass.__init__(self, field) if field: self.addresslist = self.getaddrlist() else: self.addresslist = [] def __len__(self): return len(self.addresslist) def __str__(self): return ", ".join(map(dump_address_pair, self.addresslist)) def __add__(self, other): # Set union newaddr = AddressList(None) newaddr.addresslist = self.addresslist[:] for x in other.addresslist: if not x in self.addresslist: newaddr.addresslist.append(x) return newaddr def __iadd__(self, other): # Set union, in-place for x in other.addresslist: if not x in self.addresslist: self.addresslist.append(x) return self def __sub__(self, other): # Set difference newaddr = AddressList(None) for x in self.addresslist: if not x in other.addresslist: newaddr.addresslist.append(x) return newaddr def __isub__(self, other): # Set difference, in-place for x in other.addresslist: if x in self.addresslist: self.addresslist.remove(x) return self def __getitem__(self, index): # Make indexing, slices, and 'in' work return self.addresslist[index] def dump_address_pair(pair): """Dump a (name, address) pair in a canonicalized form.""" if pair[0]: return '"' + pair[0] + '" <' + pair[1] + '>' else: return pair[1] # Parse a date field _monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec', 'january', 'february', 'march', 'april', 'may', 'june', 'july', 'august', 'september', 'october', 'november', 'december'] _daynames = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] # The timezone table does not include the military time zones defined # in RFC822, other than Z. According to RFC1123, the description in # RFC822 gets the signs wrong, so we can't rely on any such time # zones. RFC1123 recommends that numeric timezone indicators be used # instead of timezone names. _timezones = {'UT':0, 'UTC':0, 'GMT':0, 'Z':0, 'AST': -400, 'ADT': -300, # Atlantic (used in Canada) 'EST': -500, 'EDT': -400, # Eastern 'CST': -600, 'CDT': -500, # Central 'MST': -700, 'MDT': -600, # Mountain 'PST': -800, 'PDT': -700 # Pacific } def parsedate_tz(data): """Convert a date string to a time tuple. Accounts for military timezones. """ if not data: return None data = data.split() if data[0][-1] in (',', '.') or data[0].lower() in _daynames: # There's a dayname here. Skip it del data[0] else: # no space after the "weekday,"? i = data[0].rfind(',') if i >= 0: data[0] = data[0][i+1:] if len(data) == 3: # RFC 850 date, deprecated stuff = data[0].split('-') if len(stuff) == 3: data = stuff + data[1:] if len(data) == 4: s = data[3] i = s.find('+') if i > 0: data[3:] = [s[:i], s[i+1:]] else: data.append('') # Dummy tz if len(data) < 5: return None data = data[:5] [dd, mm, yy, tm, tz] = data mm = mm.lower() if not mm in _monthnames: dd, mm = mm, dd.lower() if not mm in _monthnames: return None mm = _monthnames.index(mm)+1 if mm > 12: mm = mm - 12 if dd[-1] == ',': dd = dd[:-1] i = yy.find(':') if i > 0: yy, tm = tm, yy if yy[-1] == ',': yy = yy[:-1] if not yy[0].isdigit(): yy, tz = tz, yy if tm[-1] == ',': tm = tm[:-1] tm = tm.split(':') if len(tm) == 2: [thh, tmm] = tm tss = '0' elif len(tm) == 3: [thh, tmm, tss] = tm else: return None try: yy = int(yy) dd = int(dd) thh = int(thh) tmm = int(tmm) tss = int(tss) except ValueError: return None tzoffset = None tz = tz.upper() if tz in _timezones: tzoffset = _timezones[tz] else: try: tzoffset = int(tz) except ValueError: pass # Convert a timezone offset into seconds ; -0500 -> -18000 if tzoffset: if tzoffset < 0: tzsign = -1 tzoffset = -tzoffset else: tzsign = 1 tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60) return (yy, mm, dd, thh, tmm, tss, 0, 1, 0, tzoffset) def parsedate(data): """Convert a time string to a time tuple.""" t = parsedate_tz(data) if t is None: return t return t[:9] def mktime_tz(data): """Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp.""" if data[9] is None: # No zone info, so localtime is better assumption than GMT return time.mktime(data[:8] + (-1,)) else: t = time.mktime(data[:8] + (0,)) return t - data[9] - time.timezone def formatdate(timeval=None): """Returns time format preferred for Internet standards. Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123 According to RFC 1123, day and month names must always be in English. If not for that, this code could use strftime(). It can't because strftime() honors the locale and could generate non-English names. """ if timeval is None: timeval = time.time() timeval = time.gmtime(timeval) return "%s, %02d %s %04d %02d:%02d:%02d GMT" % ( ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")[timeval[6]], timeval[2], ("Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec")[timeval[1]-1], timeval[0], timeval[3], timeval[4], timeval[5]) # When used as script, run a small test program. # The first command line argument must be a filename containing one # message in RFC-822 format. if __name__ == '__main__': import sys, os file = os.path.join(os.environ['HOME'], 'Mail/inbox/1') if sys.argv[1:]: file = sys.argv[1] f = open(file, 'r') m = Message(f) print 'From:', m.getaddr('from') print 'To:', m.getaddrlist('to') print 'Subject:', m.getheader('subject') print 'Date:', m.getheader('date') date = m.getdate_tz('date') tz = date[-1] date = time.localtime(mktime_tz(date)) if date: print 'ParsedDate:', time.asctime(date), hhmmss = tz hhmm, ss = divmod(hhmmss, 60) hh, mm = divmod(hhmm, 60) print "%+03d%02d" % (hh, mm), if ss: print ".%02d" % ss, print else: print 'ParsedDate:', None m.rewindbody() n = 0 while f.readline(): n += 1 print 'Lines:', n print '-'*70 print 'len =', len(m) if 'Date' in m: print 'Date =', m['Date'] if 'X-Nonsense' in m: pass print 'keys =', m.keys() print 'values =', m.values() print 'items =', m.items()
apache-2.0
kobejean/tensorflow
tensorflow/python/kernel_tests/resource_variable_ops_test.py
1
41301
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tensorflow.ops.resource_variable_ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import copy import gc import os import pickle import numpy as np from tensorflow.python.eager import context from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import init_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import resource_variable_ops from tensorflow.python.ops import state_ops from tensorflow.python.ops import variable_scope from tensorflow.python.ops import variables from tensorflow.python.platform import test from tensorflow.python.training import momentum from tensorflow.python.training import saver from tensorflow.python.training import training_util from tensorflow.python.util import compat class ResourceVariableOpsTest(test_util.TensorFlowTestCase): def tearDown(self): gc.collect() # This will only contain uncollectable garbage, i.e. reference cycles # involving objects with __del__ defined. self.assertEqual(0, len(gc.garbage)) def testHandleDtypeShapeMatch(self): with self.cached_session(): handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[]) with self.assertRaises(ValueError): resource_variable_ops.assign_variable_op( handle, constant_op.constant(0.0, dtype=dtypes.float32)).run() with self.assertRaises(ValueError): resource_variable_ops.assign_variable_op(handle, constant_op.constant( [0], dtype=dtypes.int32)).run() resource_variable_ops.assign_variable_op(handle, constant_op.constant( 0, dtype=dtypes.int32)).run() def testGPUInt64(self): if not context.context().num_gpus(): return with context.eager_mode(), context.device("gpu:0"): v = resource_variable_ops.ResourceVariable(1, dtype=dtypes.int64) self.assertAllEqual(1, v.numpy()) def testEagerNameNotIdentity(self): with context.eager_mode(): v0 = resource_variable_ops.ResourceVariable(1.0, name="a") v1 = resource_variable_ops.ResourceVariable(2.0, name="a") self.assertAllEqual(v0.numpy(), 1.0) self.assertAllEqual(v1.numpy(), 2.0) def testEagerNameNotNeeded(self): with context.eager_mode(): v0 = resource_variable_ops.ResourceVariable(1.0) self.assertAllEqual(v0.numpy(), 1.0) def testReadVariableDtypeMismatchEager(self): with context.eager_mode(): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1], name="foo") resource_variable_ops.assign_variable_op(handle, 1) with self.assertRaisesRegexp(errors.InvalidArgumentError, "Trying to read variable with wrong dtype. " "Expected float got int32."): _ = resource_variable_ops.read_variable_op(handle, dtype=dtypes.float32) def testEagerInitializedValue(self): with context.eager_mode(): variable = resource_variable_ops.ResourceVariable(1.0, name="eager-init") self.assertAllEqual(variable.numpy(), 1.0) self.assertAllEqual(variable.initialized_value().numpy(), 1.0) def testEagerBool(self): with context.eager_mode(): v = resource_variable_ops.ResourceVariable(False, name="bool_test") self.assertAllEqual(bool(v), False) def testEagerDeepCopy(self): with context.eager_mode(): init_value = np.ones((4, 4, 4)) variable = resource_variable_ops.ResourceVariable(init_value, name="init") copied_variable = copy.deepcopy(variable) copied_variable.assign(4 * np.ones((4, 4, 4))) # Copying the variable should create a new underlying tensor with distinct # values. self.assertFalse(np.allclose(variable.numpy(), copied_variable.numpy())) def testGraphDeepCopy(self): with self.cached_session(): init_value = np.ones((4, 4, 4)) variable = resource_variable_ops.ResourceVariable(init_value, name="init") with self.assertRaises(NotImplementedError): copy.deepcopy(variable) @test_util.run_in_graph_and_eager_modes def testStridedSliceAssign(self): v = resource_variable_ops.ResourceVariable([1.0, 2.0]) self.evaluate(variables.global_variables_initializer()) self.evaluate(v[0].assign(2.0)) self.assertAllEqual(self.evaluate(v), [2.0, 2.0]) def testDifferentAssignGraph(self): with ops.Graph().as_default(): v = resource_variable_ops.ResourceVariable(1.0) ops.reset_default_graph() v.assign(2.0) # Note: this fails if we run convert_to_tensor on not the # variable graph. def testFetchHandle(self): with self.cached_session(): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1], name="foo") self.assertGreater(len(handle.eval()), 0) def testCachedValueReadBeforeWrite(self): with self.cached_session() as sess: v = resource_variable_ops.ResourceVariable(0.0, caching_device="cpu:0") sess.run(v.initializer) value, _ = sess.run([v, v.assign_add(1.0)]) self.assertAllEqual(value, 0.0) def testAssignVariableDtypeMismatchEager(self): with context.eager_mode(): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1], name="foo") resource_variable_ops.assign_variable_op( handle, constant_op.constant([1])) with self.assertRaisesRegexp(errors.InvalidArgumentError, "Trying to assign variable with wrong " "dtype. Expected int32 got float."): resource_variable_ops.assign_variable_op( handle, constant_op.constant([1.], dtype=dtypes.float32)) def testUnprintableHandle(self): with context.eager_mode(): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1], name="foo") self.assertIn("<unprintable>", str(handle)) self.assertIn("<unprintable>", repr(handle)) @test_util.run_in_graph_and_eager_modes def testDtypeSurvivesIdentity(self): handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[]) id_handle = array_ops.identity(handle) self.evaluate(resource_variable_ops.assign_variable_op( id_handle, constant_op.constant(0, dtype=dtypes.int32))) def testUnreadOpName(self): v = resource_variable_ops.ResourceVariable(1.0) self.assertNotEqual(v.name, v.assign_add(1.0).name) @test_util.run_in_graph_and_eager_modes def testCreateRead(self): handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[]) self.evaluate(resource_variable_ops.assign_variable_op( handle, constant_op.constant(1, dtype=dtypes.int32))) value = self.evaluate( resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)) self.assertAllEqual(1, value) @test_util.run_in_graph_and_eager_modes def testManyAssigns(self): handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[]) create = resource_variable_ops.assign_variable_op( handle, constant_op.constant(1, dtype=dtypes.int32)) with ops.control_dependencies([create]): first_read = resource_variable_ops.read_variable_op( handle, dtype=dtypes.int32) with ops.control_dependencies([first_read]): write = resource_variable_ops.assign_variable_op( handle, constant_op.constant(2, dtype=dtypes.int32)) with ops.control_dependencies([write]): second_read = resource_variable_ops.read_variable_op( handle, dtype=dtypes.int32) f, s = self.evaluate([first_read, second_read]) self.assertEqual(f, 1) self.assertEqual(s, 2) @test_util.run_in_graph_and_eager_modes def testAssignAdd(self): handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[]) self.evaluate(resource_variable_ops.assign_variable_op( handle, constant_op.constant(1, dtype=dtypes.int32))) self.evaluate(resource_variable_ops.assign_add_variable_op( handle, constant_op.constant(1, dtype=dtypes.int32))) read = self.evaluate( resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)) self.assertEqual(read, 2) @test_util.run_in_graph_and_eager_modes def testScatterAdd(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op( handle, constant_op.constant([[1]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_add( handle, [0], constant_op.constant([[2]], dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[3]]) @test_util.run_in_graph_and_eager_modes def testScatterSub(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op( handle, constant_op.constant([[1]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_sub( handle, [0], constant_op.constant([[2]], dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[-1]]) @test_util.run_in_graph_and_eager_modes def testScatterMul(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op( handle, constant_op.constant([[1]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_mul( handle, [0], constant_op.constant([[5]], dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[5]]) def testEagerPickle(self): with context.eager_mode(): tmp_dir = self.get_temp_dir() fname = os.path.join(tmp_dir, "var.pickle") with open(fname, "wb") as f: v = resource_variable_ops.ResourceVariable(10.0) pickle.dump(v, f) with open(fname, "rb") as f: v = pickle.load(f) self.assertAllEqual(v.numpy(), 10.0) @test_util.run_in_graph_and_eager_modes def testScatterDiv(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op( handle, constant_op.constant([[6]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_div( handle, [0], constant_op.constant([[3]], dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[2]]) def testUseResource(self): v = variables.VariableV1(1.0, use_resource=True) self.assertTrue(isinstance(v, resource_variable_ops.ResourceVariable)) def testEagerNoUseResource(self): with context.eager_mode(): v = variables.Variable(1.0) self.assertTrue(isinstance(v, resource_variable_ops.ResourceVariable)) @test_util.run_in_graph_and_eager_modes def testScatterMin(self): with ops.device("cpu:0"): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op(handle, constant_op.constant( [[6]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_min(handle, [0], constant_op.constant( [[3]], dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[3]]) def testMetagraph(self): with ops.Graph().as_default(): with variable_scope.variable_scope("foo", use_resource=True): a = variable_scope.get_variable("a", initializer=10.0) momentum.MomentumOptimizer( learning_rate=0.001, momentum=0.1).minimize( a, colocate_gradients_with_ops=True, global_step=training_util.get_or_create_global_step()) graph = ops.get_default_graph() meta_graph_def = saver.export_meta_graph(graph=graph) with ops.Graph().as_default(): saver.import_meta_graph(meta_graph_def, import_scope="") meta_graph_two = saver.export_meta_graph(graph=graph) self.assertEqual(meta_graph_def, meta_graph_two) @test_util.run_in_graph_and_eager_modes def testScatterMax(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op( handle, constant_op.constant([[6]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_max( handle, [0], constant_op.constant([[3]], dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[6]]) @test_util.run_in_graph_and_eager_modes def testScatterAddScalar(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op( handle, constant_op.constant([[1]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_add( handle, [0], constant_op.constant(2, dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[3]]) @test_util.run_in_graph_and_eager_modes def testScatterSubScalar(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op( handle, constant_op.constant([[1]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_sub( handle, [0], constant_op.constant(2, dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[-1]]) @test_util.run_in_graph_and_eager_modes def testScatterMulScalar(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op( handle, constant_op.constant([[1]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_mul( handle, [0], constant_op.constant(5, dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[5]]) @test_util.run_in_graph_and_eager_modes def testScatterDivScalar(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op( handle, constant_op.constant([[6]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_div( handle, [0], constant_op.constant(3, dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[2]]) @test_util.run_in_graph_and_eager_modes def testScatterMinScalar(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op( handle, constant_op.constant([[6]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_min( handle, [0], constant_op.constant(3, dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[3]]) @test_util.run_in_graph_and_eager_modes def testScatterMaxScalar(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.int32, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op( handle, constant_op.constant([[6]], dtype=dtypes.int32))) self.evaluate( resource_variable_ops.resource_scatter_max( handle, [0], constant_op.constant(3, dtype=dtypes.int32))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32) self.assertEqual(self.evaluate(read), [[6]]) def testScatterUpdateString(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.string, shape=[1, 1]) self.evaluate(resource_variable_ops.assign_variable_op( handle, constant_op.constant([["a"]], dtype=dtypes.string))) self.evaluate(resource_variable_ops.resource_scatter_update( handle, [0], constant_op.constant([["b"]], dtype=dtypes.string))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.string) self.assertEqual(compat.as_bytes(self.evaluate(read)[0][0]), compat.as_bytes("b")) def testScatterUpdateStringScalar(self): handle = resource_variable_ops.var_handle_op( dtype=dtypes.string, shape=[1, 1]) self.evaluate( resource_variable_ops.assign_variable_op(handle, constant_op.constant( [["a"]], dtype=dtypes.string))) self.evaluate( resource_variable_ops.resource_scatter_update(handle, [0], constant_op.constant( "b", dtype=dtypes.string))) read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.string) self.assertEqual( compat.as_bytes(self.evaluate(read)[0][0]), compat.as_bytes("b")) # TODO(alive): get this to work in Eager mode. def testGPU(self): with self.test_session(use_gpu=True): abc = variable_scope.get_variable( "abc", shape=[1], initializer=init_ops.ones_initializer(), use_resource=True) self.evaluate(variables.global_variables_initializer()) self.assertEqual( self.evaluate( resource_variable_ops.var_is_initialized_op(abc.handle)), True) def testScatterBool(self): with context.eager_mode(): ref = resource_variable_ops.ResourceVariable( [False, True, False], trainable=False) indices = math_ops.range(3) updates = constant_op.constant([True, True, True]) state_ops.scatter_update(ref, indices, updates) self.assertAllEqual(ref.read_value(), [True, True, True]) @test_util.run_in_graph_and_eager_modes def testConstraintArg(self): constraint = lambda x: x v = resource_variable_ops.ResourceVariable( initial_value=lambda: 1, constraint=constraint, name="var0") self.assertEqual(v.constraint, constraint) constraint = 0 with self.assertRaises(ValueError): v = resource_variable_ops.ResourceVariable( initial_value=lambda: 1, constraint=constraint, name="var1") # TODO(alive): how should this work in Eager mode? def testInitFn(self): with self.cached_session(): v = resource_variable_ops.ResourceVariable( initial_value=lambda: 1, dtype=dtypes.float32) self.assertEqual(v.handle.op.colocation_groups(), v.initializer.inputs[1].op.colocation_groups()) def testHandleNumpy(self): with context.eager_mode(): with self.assertRaises(ValueError): resource_variable_ops.ResourceVariable( 1.0, name="handle-numpy").handle.numpy() def testCountUpTo(self): with context.eager_mode(): v = resource_variable_ops.ResourceVariable(0, name="upto") self.assertAllEqual(v.count_up_to(1), 0) with self.assertRaises(errors.OutOfRangeError): v.count_up_to(1) def testCountUpToFunction(self): with context.eager_mode(): v = resource_variable_ops.ResourceVariable(0, name="upto") self.assertAllEqual(state_ops.count_up_to(v, 1), 0) with self.assertRaises(errors.OutOfRangeError): state_ops.count_up_to(v, 1) @test_util.run_in_graph_and_eager_modes def testInitFnDtype(self): v = resource_variable_ops.ResourceVariable( initial_value=lambda: 1, dtype=dtypes.float32, name="var0") self.assertEqual(dtypes.float32, v.value().dtype) @test_util.run_in_graph_and_eager_modes def testInitFnNoDtype(self): v = resource_variable_ops.ResourceVariable(initial_value=lambda: 1, name="var2") self.assertEqual(dtypes.int32, v.value().dtype) @test_util.run_in_graph_and_eager_modes def testInitializeAllVariables(self): v = resource_variable_ops.ResourceVariable(1, dtype=dtypes.float32, name="var0") self.evaluate(variables.global_variables_initializer()) self.assertEqual(1.0, self.evaluate(v.value())) @test_util.run_in_graph_and_eager_modes def testOperatorOverload(self): v = resource_variable_ops.ResourceVariable(1.0, name="var0") self.evaluate(variables.global_variables_initializer()) self.assertEqual(2.0, self.evaluate(v + v)) @test_util.run_in_graph_and_eager_modes def testAssignMethod(self): v = resource_variable_ops.ResourceVariable(1.0, name="var0") self.evaluate(variables.global_variables_initializer()) self.evaluate(v.assign(2.0)) self.assertEqual(2.0, self.evaluate(v.value())) # Tests for the 'read_value' argument: assign_with_read = v.assign(3.0, read_value=True) self.assertEqual(3.0, self.evaluate(assign_with_read)) assign_without_read = v.assign(4.0, read_value=False) if context.executing_eagerly(): self.assertIsNone(assign_without_read) else: self.assertIsInstance(assign_without_read, ops.Operation) self.evaluate(assign_without_read) self.assertEqual(4.0, self.evaluate(v.value())) @test_util.run_in_graph_and_eager_modes def testLoad(self): v = resource_variable_ops.ResourceVariable(1.0, name="var0") self.evaluate(variables.global_variables_initializer()) v.load(2.0) self.assertEqual(2.0, self.evaluate(v.value())) def testVariableDefInitializedInstances(self): with ops.Graph().as_default(), self.cached_session() as sess: v_def = resource_variable_ops.ResourceVariable( initial_value=constant_op.constant(3.0)).to_proto() with ops.Graph().as_default(), self.cached_session() as sess: # v describes a VariableDef-based variable without an initial value. v = resource_variable_ops.ResourceVariable(variable_def=v_def) self.assertEqual(3.0, sess.run(v.initialized_value())) # initialized_value should not rerun the initializer_op if the variable # has already been initialized elsewhere. sess.run(v.assign(1.0)) self.assertEqual(1.0, v.initialized_value().eval()) v_def.ClearField("initial_value_name") with ops.Graph().as_default(), self.cached_session() as sess: # Restoring a legacy VariableDef proto that does not have # initial_value_name set should still work. v = resource_variable_ops.ResourceVariable(variable_def=v_def) # We should also be able to re-export the variable to a new meta graph. self.assertProtoEquals(v_def, v.to_proto()) # But attempts to use initialized_value will result in errors. with self.assertRaises(ValueError): sess.run(v.initialized_value()) def testTrainableInProto(self): with ops.Graph().as_default(): non_trainable_variable = resource_variable_ops.ResourceVariable( trainable=False, initial_value=constant_op.constant(10.0)) self.assertEqual( False, resource_variable_ops.ResourceVariable( variable_def=non_trainable_variable.to_proto()) .trainable) trainable_variable = resource_variable_ops.ResourceVariable( trainable=True, initial_value=constant_op.constant(10.0)) self.assertEqual( True, resource_variable_ops.ResourceVariable( variable_def=trainable_variable.to_proto()) .trainable) @test_util.run_in_graph_and_eager_modes def testSparseRead(self): init_value = np.reshape(np.arange(np.power(4, 3)), (4, 4, 4)) v = resource_variable_ops.ResourceVariable( constant_op.constant(init_value, dtype=dtypes.int32), name="var3") self.evaluate(variables.global_variables_initializer()) value = self.evaluate(v.sparse_read([0, 3, 1, 2])) self.assertAllEqual(init_value[[0, 3, 1, 2], ...], value) def testToFromProto(self): with self.cached_session(): v = resource_variable_ops.ResourceVariable(1.0) variables.global_variables_initializer().run() w = resource_variable_ops.ResourceVariable.from_proto(v.to_proto()) self.assertEquals(2, math_ops.add(w, 1).eval()) self.assertEquals(v._handle, w._handle) self.assertEquals(v._graph_element, w._graph_element) @test_util.run_in_graph_and_eager_modes def testAssignAddMethod(self): v = resource_variable_ops.ResourceVariable(1.0, name="var0") self.evaluate(variables.global_variables_initializer()) self.evaluate(v.assign_add(1.0)) self.assertEqual(2.0, self.evaluate(v.value())) # Tests for the 'read_value' argument: assign_with_read = v.assign_add(1.0, read_value=True) self.assertEqual(3.0, self.evaluate(assign_with_read)) assign_without_read = v.assign_add(1.0, read_value=False) if context.executing_eagerly(): self.assertIsNone(assign_without_read) else: self.assertIsInstance(assign_without_read, ops.Operation) self.evaluate(assign_without_read) self.assertEqual(4.0, self.evaluate(v.value())) @test_util.run_in_graph_and_eager_modes def testAssignSubMethod(self): v = resource_variable_ops.ResourceVariable(3.0, name="var0") self.evaluate(variables.global_variables_initializer()) self.evaluate(v.assign_sub(1.0)) self.assertEqual(2.0, self.evaluate(v.value())) # Tests for the 'read_value' argument: assign_with_read = v.assign_sub(1.0, read_value=True) self.assertEqual(1.0, self.evaluate(assign_with_read)) assign_without_read = v.assign_sub(1.0, read_value=False) if context.executing_eagerly(): self.assertIsNone(assign_without_read) else: self.assertIsInstance(assign_without_read, ops.Operation) self.evaluate(assign_without_read) self.assertEqual(0.0, self.evaluate(v.value())) @test_util.run_in_graph_and_eager_modes def testDestroyResource(self): v = resource_variable_ops.ResourceVariable(3.0, name="var0") self.evaluate(variables.global_variables_initializer()) self.assertEqual(3.0, self.evaluate(v.value())) self.evaluate(resource_variable_ops.destroy_resource_op(v.handle)) with self.assertRaises(errors.FailedPreconditionError): self.evaluate(v.value()) # Handle to a resource not actually created. handle = resource_variable_ops.var_handle_op(dtype=dtypes.int32, shape=[]) # Should raise no exception self.evaluate(resource_variable_ops.destroy_resource_op( handle, ignore_lookup_error=True)) def testAssignDifferentShapes(self): with self.cached_session() as sess, variable_scope.variable_scope( "foo", use_resource=True): var = variable_scope.get_variable("x", shape=[1, 1], dtype=dtypes.float32) placeholder = array_ops.placeholder(dtypes.float32) assign = var.assign(placeholder) sess.run( [assign], feed_dict={placeholder: np.zeros(shape=[2, 2], dtype=np.float32)}) def testAssignDifferentShapesEager(self): with context.eager_mode(): with variable_scope.variable_scope("foo"): var = variable_scope.get_variable("x", shape=[1, 1], dtype=dtypes.float32) with self.assertRaisesRegexp(ValueError, "Shapes.*and.*are incompatible"): assign = var.assign(np.zeros(shape=[2, 2])) self.evaluate(assign) def testDtypeAfterFromProto(self): v = resource_variable_ops.ResourceVariable(2.0) w = resource_variable_ops.ResourceVariable.from_proto(v.to_proto()) self.assertIsInstance(w.dtype, dtypes.DType) self.assertEqual(v.dtype, w.dtype) # TODO(alive): get caching to work in eager mode. def testCachingDevice(self): with ops.device("/job:server/task:1"): v = resource_variable_ops.ResourceVariable( 2.0, caching_device="/job:localhost") self.assertEqual("/job:localhost", v.value().device) with self.assertRaises(ValueError): _ = v.value().op.get_attr("_class") with ops.colocate_with(v.op): w = resource_variable_ops.ResourceVariable( 2.0, caching_device="/job:localhost") self.assertEqual("/job:localhost", w.value().device) with self.assertRaises(ValueError): _ = w.value().op.get_attr("_class") def testSharedName(self): with self.cached_session(): v = resource_variable_ops.ResourceVariable(300.0, name="var4") variables.global_variables_initializer().run() w = resource_variable_ops.var_handle_op( dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="var4", # Needed in Eager since we get a unique container name by default. container=ops.get_default_graph()._container) w_read = resource_variable_ops.read_variable_op(w, v.dtype.base_dtype) self.assertEqual(300.0, w_read.eval()) x = resource_variable_ops.var_handle_op( dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="var5", container=ops.get_default_graph()._container) with self.assertRaisesOpError("Resource .*/var5/.* does not exist"): resource_variable_ops.read_variable_op(x, v.dtype.base_dtype).eval() def testSharedNameWithNamescope(self): with self.cached_session(): with ops.name_scope("foo"): v = resource_variable_ops.ResourceVariable(300.0, name="var6") self.assertEqual("foo/var6", v._shared_name) # pylint: disable=protected-access self.assertEqual("foo/var6:0", v.name) self.evaluate(variables.global_variables_initializer()) w = resource_variable_ops.var_handle_op( dtype=v.dtype.base_dtype, shape=v.get_shape(), shared_name="foo/var6", # Needed in Eager since we get a unique container name by default. container=ops.get_default_graph()._container) w_read = resource_variable_ops.read_variable_op(w, v.dtype.base_dtype) self.assertEqual(300.0, self.evaluate(w_read)) @test_util.run_in_graph_and_eager_modes def testShape(self): v = resource_variable_ops.ResourceVariable( name="var4", initial_value=array_ops.ones(shape=[10, 20, 35])) self.assertEqual("(10, 20, 35)", str(v.shape)) self.assertEqual("(10, 20, 35)", str(v.get_shape())) self.assertEqual("(10, 20, 35)", str(v.value().shape)) self.assertEqual("(3, 20, 35)", str(v.sparse_read([0, 1, 2]).shape)) if not context.executing_eagerly(): self.assertEqual( "<unknown>", str(v.sparse_read(array_ops.placeholder(dtypes.int32)).shape)) def testSetInitialValue(self): with self.cached_session(): # Initialize variable with a value different from the initial value passed # in the constructor. v = resource_variable_ops.ResourceVariable(2.0) v.initializer.run(feed_dict={v.initial_value: 3.0}) self.assertEqual(3.0, v.value().eval()) def testControlFlowInitialization(self): """Expects an error if an initializer is in a control-flow scope.""" def cond(i, _): return i < 10 def body(i, _): zero = array_ops.zeros([], dtype=dtypes.int32) v = resource_variable_ops.ResourceVariable(initial_value=zero) return (i + 1, v.read_value()) with self.assertRaisesRegexp(ValueError, "inside a control-flow"): control_flow_ops.while_loop(cond, body, [0, 0]) def testVariableEager(self): with context.eager_mode(): init = array_ops.ones(shape=[10, 20, 35], dtype=dtypes.int32) constraint = lambda x: x with ops.name_scope("foo"): v = resource_variable_ops.ResourceVariable( name="var7", initial_value=init, caching_device="cpu:0", constraint=constraint) # Test properties self.assertEqual(dtypes.int32, v.dtype) self.assertEqual("foo/var7:0", v.name) self.assertAllEqual([10, 20, 35], v.shape.as_list()) self.assertTrue(isinstance(v.handle, ops.EagerTensor)) self.assertEqual(constraint, v.constraint) self.assertAllEqual(init.numpy(), v.read_value().numpy()) self.assertAllEqual(init.numpy(), v.value().numpy()) # Callable init. callable_init = lambda: init * 2 v2 = resource_variable_ops.ResourceVariable( initial_value=callable_init, name="var7") self.assertEqual("var7:0", v2.name) self.assertAllEqual(2 * init.numpy(), v2.read_value().numpy()) # Test assign_add. new_v2_val = v2.assign_add(v.read_value()) self.assertAllEqual(v.read_value().numpy() * 3, new_v2_val.numpy()) # Test assign_sub. new_v2_val = v2.assign_sub(v.read_value()) self.assertAllEqual(v.read_value().numpy() * 2, new_v2_val.numpy()) # Test assign. v2.assign(v.read_value()) self.assertAllEqual(v.read_value().numpy(), v2.read_value().numpy()) # Test load v2.load(2 * v.read_value()) self.assertAllEqual(2 * v.read_value().numpy(), v2.read_value().numpy()) # Test convert_to_tensor t = ops.convert_to_tensor(v) self.assertAllEqual(t.numpy(), v.read_value().numpy()) # Test operations self.assertAllEqual((v * 2).numpy(), (v + v).numpy()) def testContainerEager(self): with context.eager_mode(): v1 = resource_variable_ops.ResourceVariable(initial_value=lambda: 1, name="same") with ops.container("different"): v2 = resource_variable_ops.ResourceVariable(initial_value=lambda: 0, name="same") v2.assign(2) self.assertEqual(1, v1.read_value().numpy()) self.assertEqual(2, v2.read_value().numpy()) def testDestruction(self): with context.eager_mode(): var = resource_variable_ops.ResourceVariable(initial_value=1.0, name="var8") var_handle = var._handle del var with self.assertRaisesRegexp(errors.NotFoundError, r"Resource .* does not exist."): resource_variable_ops.destroy_resource_op(var_handle, ignore_lookup_error=False) def testScatterUpdate(self): with context.eager_mode(): v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="update") state_ops.scatter_update(v, [1], [3.0]) self.assertAllEqual([1.0, 3.0], v.numpy()) def testScatterAddStateOps(self): with context.eager_mode(): v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="add") state_ops.scatter_add(v, [1], [3]) self.assertAllEqual([1.0, 5.0], v.numpy()) def testScatterSubStateOps(self): with context.eager_mode(): v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="sub") state_ops.scatter_sub(v, [1], [3]) self.assertAllEqual([1.0, -1.0], v.numpy()) def testScatterNdAddStateOps(self): with context.eager_mode(): v = resource_variable_ops.ResourceVariable( [1, 1, 1, 1, 1, 1, 1, 1], dtype=dtypes.float32, name="add") indices = constant_op.constant([[4], [3], [1], [7]], dtype=dtypes.int32) updates = constant_op.constant([9, 10, 11, 12], dtype=dtypes.float32) expected = np.array([1, 12, 1, 11, 10, 1, 1, 13]) state_ops.scatter_nd_add(v, indices, updates) self.assertAllClose(expected, v.numpy()) def testScatterUpdateCast(self): with context.eager_mode(): v = resource_variable_ops.ResourceVariable([1.0, 2.0], name="update") state_ops.scatter_update(v, [1], [3]) self.assertAllEqual([1.0, 3.0], v.numpy()) @test_util.run_in_graph_and_eager_modes def testScatterUpdateInvalidArgs(self): v = resource_variable_ops.ResourceVariable([0, 1, 2, 3], name="update") # The exact error and message differ between graph construction (where the # error is realized during shape inference at graph construction time) and # eager execution (where the error is realized during kernel execution). with self.assertRaisesRegexp(Exception, r"shape.*2.*3"): state_ops.scatter_update(v, [0, 1], [0, 1, 2]) class _MixedPrecisionVariableTest(test_util.TensorFlowTestCase): @test_util.run_in_graph_and_eager_modes() def test_dense_var_to_tensor_read_dtype_same_as_var_dtype(self): # read_dtype is same as dtype v = resource_variable_ops.ResourceVariable(1.0, dtype=dtypes.float32) v = resource_variable_ops._MixedPrecisionVariable(v, dtypes.float32) if not context.executing_eagerly(): v.initializer.run() # dtype is not read_dtype, return NotImplemented self.assertEqual( NotImplemented, v._dense_var_to_tensor(dtype=dtypes.float16)) self.assertEqual(NotImplemented, v._dense_var_to_tensor(dtype=dtypes.float16, as_ref=True)) # as_ref is False t = v._dense_var_to_tensor(as_ref=False) self.assertTrue(isinstance(t, ops.Tensor)) self.assertEqual(t.dtype, dtypes.float32) self.assertEqual(self.evaluate(t), 1.0) t = v._dense_var_to_tensor(dtype=dtypes.float32, as_ref=False) self.assertTrue(isinstance(t, ops.Tensor)) self.assertEqual(t.dtype, dtypes.float32) self.assertEqual(self.evaluate(t), 1.0) # as_ref is True self.assertEqual(NotImplemented, v._dense_var_to_tensor(as_ref=True)) self.assertEqual(NotImplemented, v._dense_var_to_tensor(dtype=dtypes.float32, as_ref=True)) @test_util.run_in_graph_and_eager_modes() def test_dense_var_to_tensor_read_dtype_different_from_var_dtype(self): # read_dtype is different from dtype v = resource_variable_ops.ResourceVariable(1.0, dtype=dtypes.float32) v = resource_variable_ops._MixedPrecisionVariable(v, dtypes.float16) if not context.executing_eagerly(): v.initializer.run() # as_ref is False t = v._dense_var_to_tensor(as_ref=False) self.assertTrue(isinstance(t, ops.Tensor)) self.assertEqual(t.dtype, dtypes.float16) self.assertEqual(self.evaluate(t), 1.0) t = v._dense_var_to_tensor(dtype=dtypes.float16, as_ref=False) self.assertTrue(isinstance(t, ops.Tensor)) self.assertEqual(t.dtype, dtypes.float16) self.assertEqual(self.evaluate(t), 1.0) # as_ref is True self.assertEqual(NotImplemented, v._dense_var_to_tensor(as_ref=True)) self.assertEqual(NotImplemented, v._dense_var_to_tensor(dtype=dtypes.float16, as_ref=True)) if __name__ == "__main__": test.main()
apache-2.0
idncom/odoo
addons/mass_mailing/models/mass_mailing_stats.py
222
5066
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013-today OpenERP SA (<http://www.openerp.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # ############################################################################## from openerp.osv import osv, fields class MailMailStats(osv.Model): """ MailMailStats models the statistics collected about emails. Those statistics are stored in a separated model and table to avoid bloating the mail_mail table with statistics values. This also allows to delete emails send with mass mailing without loosing the statistics about them. """ _name = 'mail.mail.statistics' _description = 'Email Statistics' _rec_name = 'message_id' _order = 'message_id' _columns = { 'mail_mail_id': fields.many2one('mail.mail', 'Mail', ondelete='set null', select=True), 'mail_mail_id_int': fields.integer( 'Mail ID (tech)', help='ID of the related mail_mail. This field is an integer field because' 'the related mail_mail can be deleted separately from its statistics.' 'However the ID is needed for several action and controllers.' ), 'message_id': fields.char('Message-ID'), 'model': fields.char('Document model'), 'res_id': fields.integer('Document ID'), # campaign / wave data 'mass_mailing_id': fields.many2one( 'mail.mass_mailing', 'Mass Mailing', ondelete='set null', ), 'mass_mailing_campaign_id': fields.related( 'mass_mailing_id', 'mass_mailing_campaign_id', type='many2one', ondelete='set null', relation='mail.mass_mailing.campaign', string='Mass Mailing Campaign', store=True, readonly=True, ), # Bounce and tracking 'scheduled': fields.datetime('Scheduled', help='Date when the email has been created'), 'sent': fields.datetime('Sent', help='Date when the email has been sent'), 'exception': fields.datetime('Exception', help='Date of technical error leading to the email not being sent'), 'opened': fields.datetime('Opened', help='Date when the email has been opened the first time'), 'replied': fields.datetime('Replied', help='Date when this email has been replied for the first time.'), 'bounced': fields.datetime('Bounced', help='Date when this email has bounced.'), } _defaults = { 'scheduled': fields.datetime.now, } def create(self, cr, uid, values, context=None): if 'mail_mail_id' in values: values['mail_mail_id_int'] = values['mail_mail_id'] res = super(MailMailStats, self).create(cr, uid, values, context=context) return res def _get_ids(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, domain=None, context=None): if not ids and mail_mail_ids: base_domain = [('mail_mail_id_int', 'in', mail_mail_ids)] elif not ids and mail_message_ids: base_domain = [('message_id', 'in', mail_message_ids)] else: base_domain = [('id', 'in', ids or [])] if domain: base_domain = ['&'] + domain + base_domain return self.search(cr, uid, base_domain, context=context) def set_opened(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None): stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('opened', '=', False)], context) self.write(cr, uid, stat_ids, {'opened': fields.datetime.now()}, context=context) return stat_ids def set_replied(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None): stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('replied', '=', False)], context) self.write(cr, uid, stat_ids, {'replied': fields.datetime.now()}, context=context) return stat_ids def set_bounced(self, cr, uid, ids=None, mail_mail_ids=None, mail_message_ids=None, context=None): stat_ids = self._get_ids(cr, uid, ids, mail_mail_ids, mail_message_ids, [('bounced', '=', False)], context) self.write(cr, uid, stat_ids, {'bounced': fields.datetime.now()}, context=context) return stat_ids
agpl-3.0
kbogert/falconunity
external_libs/libnifalcon/compily_buildd/python/get_version.py
2
1570
#!/usr/bin/env python import subprocess import re import os import sys from optparse import OptionParser def proc(cmd,cwd): stdout = subprocess.Popen(cmd, cwd=cwd, shell=True, stdout=subprocess.PIPE).communicate()[0] if stdout: stdout.strip() return stdout def get_version(cwd): if not os.path.isdir(cwd): raise ValueError("Specified directory '%s' is not a directory" % cwd) head = proc('git rev-parse --verify HEAD', cwd) if head: tag = proc('git name-rev --tags HEAD', cwd) if re.search('^HEAD\s+(.*~[0-9]*|undefined)$', tag): ver = 'git-'+head[:8] else: ver = tag.split('/')[1].split('^')[0] if proc('git diff-index HEAD', cwd): ver += '-dirty' else: ver = 'UNDEF' #version was somehow getting a newline in it, return stripped to remove? return ver.strip() version_file = """#include "version.h" const char COMPILE_TIME[] = __DATE__ " " __TIME__; const char VERSION[] = "%s"; """ if __name__ == '__main__': parser = OptionParser() parser.add_option('-f', '--file', dest='file', action='store', default='', help='File to output version information to') parser.add_option('-d', '--dir', dest='dir', action='store', default='', help='Git Repo Directory to get version from') (options, args) = parser.parse_args() if not options.file or not options.dir: parser.print_help() sys.exit(1) f = open(options.file, "w") f.write(version_file % (get_version(options.dir))) f.close()
lgpl-3.0
tomwhartung/tomhartung.com
Site/Site/urls.py
1
1215
"""Site URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/2.1/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.contrib import admin from django.conf.urls import * """ When upgrading to django 2.0+, we can use the path method: https://docs.djangoproject.com/en/2.0/ref/urls/#path from django.urls import include, path The routes we are using are quite simple, and we already have a version of this file that uses the path method: ./urls-uses_path-save_for_2.0.py More: https://stackoverflow.com/questions/47947673/is-it-better-to-use-path-or-url-in-urls-py-for-django-2-0 """ urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'', include('content.urls')), ]
gpl-2.0
KapecK/or-tools
examples/data/nonogram_regular/nonogram_car.py
74
1201
# Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an 'AS IS' BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Problem from ECLiPSe # http:#eclipse.crosscoreop.com/eclipse/examples/nono.ecl.txt # Problem n3 ( http:#www.pro.or.jp/~fuji/java/puzzle/nonogram/index-eng.html ) # 'Car' # rows = 10; row_rule_len = 4; row_rules = [ [0,0,0,4], [0,1,1,6], [0,1,1,6], [0,1,1,6], [0,0,4,9], [0,0,1,1], [0,0,1,1], [0,2,7,2], [1,1,1,1], [0,0,2,2] ] cols = 15; col_rule_len = 2; col_rules = [ [0,4], [1,2], [1,1], [5,1], [1,2], [1,1], [5,1], [1,1], [4,1], [4,1], [4,2], [4,1], [4,1], [4,2], [0,4] ]
apache-2.0
stiandre/sdhash-integration
external/tools/build/v2/util/order.py
49
4201
# Copyright (C) 2003 Vladimir Prus # Use, modification, and distribution is subject to the Boost Software # License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy # at http://www.boost.org/LICENSE_1_0.txt) class Order: """Allows ordering arbitrary objects with regard to arbitrary binary relation. The primary use case is the gcc toolset, which is sensitive to library order: if library 'a' uses symbols from library 'b', then 'a' must be present before 'b' on the linker's command line. This requirement can be lifted for gcc with GNU ld, but for gcc with Solaris LD (and for Solaris toolset as well), the order always matters. So, we need to store order requirements and then order libraries according to them. It it not possible to use dependency graph as order requirements. What we need is "use symbols" relationship while dependency graph provides "needs to be updated" relationship. For example:: lib a : a.cpp b; lib b ; For static linking, the 'a' library need not depend on 'b'. However, it still should come before 'b' on the command line. """ def __init__ (self): self.constraints_ = [] def add_pair (self, first, second): """ Adds the constraint that 'first' should precede 'second'. """ self.constraints_.append ((first, second)) def order (self, objects): """ Given a list of objects, reorder them so that the constains specified by 'add_pair' are satisfied. The algorithm was adopted from an awk script by Nikita Youshchenko (yoush at cs dot msu dot su) """ # The algorithm used is the same is standard transitive closure, # except that we're not keeping in-degree for all vertices, but # rather removing edges. result = [] if not objects: return result constraints = self.__eliminate_unused_constraits (objects) # Find some library that nobody depends upon and add it to # the 'result' array. obj = None while objects: new_objects = [] while objects: obj = objects [0] if self.__has_no_dependents (obj, constraints): # Emulate break ; new_objects.extend (objects [1:]) objects = [] else: new_objects.append (obj) obj = None objects = objects [1:] if not obj: raise BaseException ("Circular order dependencies") # No problem with placing first. result.append (obj) # Remove all containts where 'obj' comes first, # since they are already satisfied. constraints = self.__remove_satisfied (constraints, obj) # Add the remaining objects for further processing # on the next iteration objects = new_objects return result def __eliminate_unused_constraits (self, objects): """ Eliminate constraints which mention objects not in 'objects'. In graph-theory terms, this is finding subgraph induced by ordered vertices. """ result = [] for c in self.constraints_: if c [0] in objects and c [1] in objects: result.append (c) return result def __has_no_dependents (self, obj, constraints): """ Returns true if there's no constraint in 'constraints' where 'obj' comes second. """ failed = False while constraints and not failed: c = constraints [0] if c [1] == obj: failed = True constraints = constraints [1:] return not failed def __remove_satisfied (self, constraints, obj): result = [] for c in constraints: if c [0] != obj: result.append (c) return result
apache-2.0
anatol/namcap
Namcap/rules/perllocal.py
4
1144
# # namcap rules - perllocal # Copyright (C) 2003-2009 Jason Chu <jason@archlinux.org> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # from Namcap.ruleclass import * class package(TarballRule): name = "perllocal" description = "Verifies the absence of perllocal.pod." def analyze(self, pkginfo, tar): j = 'perllocal.pod' for i in tar.getnames(): if i[-len(j):] == j: self.errors.append(("perllocal-pod-present %s", i)) # vim: set ts=4 sw=4 noet:
gpl-2.0
nrwahl2/ansible
lib/ansible/plugins/callback/syslog_json.py
44
2802
# Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import logging import logging.handlers import socket from ansible.plugins.callback import CallbackBase class CallbackModule(CallbackBase): """ logs ansible-playbook and ansible runs to a syslog server in json format make sure you have in ansible.cfg: callback_plugins = <path_to_callback_plugins_folder> and put the plugin in <path_to_callback_plugins_folder> This plugin makes use of the following environment variables: SYSLOG_SERVER (optional): defaults to localhost SYSLOG_PORT (optional): defaults to 514 SYSLOG_FACILITY (optional): defaults to user """ CALLBACK_VERSION = 2.0 CALLBACK_TYPE = 'aggregate' CALLBACK_NAME = 'syslog_json' CALLBACK_NEEDS_WHITELIST = True def __init__(self): super(CallbackModule, self).__init__() self.logger = logging.getLogger('ansible logger') self.logger.setLevel(logging.DEBUG) self.handler = logging.handlers.SysLogHandler( address=(os.getenv('SYSLOG_SERVER', 'localhost'), int(os.getenv('SYSLOG_PORT', 514))), facility=os.getenv('SYSLOG_FACILITY', logging.handlers.SysLogHandler.LOG_USER) ) self.logger.addHandler(self.handler) self.hostname = socket.gethostname() def runner_on_failed(self, host, res, ignore_errors=False): self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname, host, self._dump_results(res))) def runner_on_ok(self, host, res): self.logger.info('%s ansible-command: task execution OK; host: %s; message: %s' % (self.hostname, host, self._dump_results(res))) def runner_on_skipped(self, host, item=None): self.logger.info('%s ansible-command: task execution SKIPPED; host: %s; message: %s' % (self.hostname, host, 'skipped')) def runner_on_unreachable(self, host, res): self.logger.error('%s ansible-command: task execution UNREACHABLE; host: %s; message: %s' % (self.hostname, host, self._dump_results(res))) def runner_on_async_failed(self, host, res, jid): self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname, host, self._dump_results(res))) def playbook_on_import_for_host(self, host, imported_file): self.logger.info('%s ansible-command: playbook IMPORTED; host: %s; message: imported file %s' % (self.hostname, host, imported_file)) def playbook_on_not_import_for_host(self, host, missing_file): self.logger.info('%s ansible-command: playbook NOT IMPORTED; host: %s; message: missing file %s' % (self.hostname, host, missing_file))
gpl-3.0
TsinghuaX/edx-platform
common/lib/xmodule/xmodule/modulestore/tests/test_publish.py
2
8729
""" Test the publish code (primary causing orphans) """ import uuid import mock import unittest import datetime import random from xmodule.modulestore.inheritance import InheritanceMixin from xmodule.modulestore.mongo import MongoModuleStore, DraftMongoModuleStore from xmodule.modulestore import Location from xmodule.fields import Date from xmodule.modulestore.exceptions import ItemNotFoundError from xmodule.modulestore.mongo.draft import DIRECT_ONLY_CATEGORIES class TestPublish(unittest.TestCase): """ Test the publish code (primary causing orphans) """ # Snippet of what would be in the django settings envs file db_config = { 'host': 'localhost', 'db': 'test_xmodule', } modulestore_options = { 'default_class': 'xmodule.raw_module.RawDescriptor', 'fs_root': '', 'render_template': mock.Mock(return_value=""), 'xblock_mixins': (InheritanceMixin,) } def setUp(self): self.db_config['collection'] = 'modulestore{0}'.format(uuid.uuid4().hex) self.old_mongo = MongoModuleStore(self.db_config, **self.modulestore_options) self.draft_mongo = DraftMongoModuleStore(self.db_config, **self.modulestore_options) self.addCleanup(self.tear_down_mongo) self.course_location = None def tear_down_mongo(self): # old_mongo doesn't give a db attr, but all of the dbs are the same and draft and pub use same collection dbref = self.old_mongo.collection.database dbref.drop_collection(self.old_mongo.collection) dbref.connection.close() def _create_item(self, category, name, data, metadata, parent_category, parent_name, runtime): """ Create the item in either draft or direct based on category and attach to its parent. """ location = self.course_location.replace(category=category, name=name) if category in DIRECT_ONLY_CATEGORIES: mongo = self.old_mongo else: mongo = self.draft_mongo mongo.create_and_save_xmodule(location, data, metadata, runtime) if isinstance(data, basestring): fields = {'data': data} else: fields = data.copy() fields.update(metadata) if parent_name: # add child to parent in mongo parent_location = self.course_location.replace(category=parent_category, name=parent_name) parent = self.draft_mongo.get_item(parent_location) parent.children.append(location.url()) if parent_category in DIRECT_ONLY_CATEGORIES: mongo = self.old_mongo else: mongo = self.draft_mongo mongo.update_children(parent_location, parent.children) def _create_course(self): """ Create the course, publish all verticals * some detached items """ date_proxy = Date() metadata = { 'start': date_proxy.to_json(datetime.datetime(2000, 3, 13, 4)), 'display_name': 'Migration test course', } data = { 'wiki_slug': 'test_course_slug' } fields = metadata.copy() fields.update(data) self.course_location = Location('i4x', 'test_org', 'test_course', 'course', 'runid') self.old_mongo.create_and_save_xmodule(self.course_location, data, metadata) runtime = self.draft_mongo.get_item(self.course_location).runtime self._create_item('chapter', 'Chapter1', {}, {'display_name': 'Chapter 1'}, 'course', 'runid', runtime) self._create_item('chapter', 'Chapter2', {}, {'display_name': 'Chapter 2'}, 'course', 'runid', runtime) self._create_item('vertical', 'Vert1', {}, {'display_name': 'Vertical 1'}, 'chapter', 'Chapter1', runtime) self._create_item('vertical', 'Vert2', {}, {'display_name': 'Vertical 2'}, 'chapter', 'Chapter1', runtime) self._create_item('html', 'Html1', "<p>Goodbye</p>", {'display_name': 'Parented Html'}, 'vertical', 'Vert1', runtime) self._create_item( 'discussion', 'Discussion1', "discussion discussion_category=\"Lecture 1\" discussion_id=\"a08bfd89b2aa40fa81f2c650a9332846\" discussion_target=\"Lecture 1\"/>\n", { "discussion_category": "Lecture 1", "discussion_target": "Lecture 1", "display_name": "Lecture 1 Discussion", "discussion_id": "a08bfd89b2aa40fa81f2c650a9332846" }, 'vertical', 'Vert1', runtime ) self._create_item('html', 'Html2', "<p>Hellow</p>", {'display_name': 'Hollow Html'}, 'vertical', 'Vert1', runtime) self._create_item( 'discussion', 'Discussion2', "discussion discussion_category=\"Lecture 2\" discussion_id=\"b08bfd89b2aa40fa81f2c650a9332846\" discussion_target=\"Lecture 2\"/>\n", { "discussion_category": "Lecture 2", "discussion_target": "Lecture 2", "display_name": "Lecture 2 Discussion", "discussion_id": "b08bfd89b2aa40fa81f2c650a9332846" }, 'vertical', 'Vert2', runtime ) self._create_item('static_tab', 'staticuno', "<p>tab</p>", {'display_name': 'Tab uno'}, None, None, runtime) self._create_item('about', 'overview', "<p>overview</p>", {}, None, None, runtime) self._create_item('course_info', 'updates', "<ol><li><h2>Sep 22</h2><p>test</p></li></ol>", {}, None, None, runtime) def _xmodule_recurse(self, item, action): """ Applies action depth-first down tree and to item last. A copy of cms.djangoapps.contentstore.views.helpers._xmodule_recurse to reproduce its use and behavior outside of django. """ for child in item.get_children(): self._xmodule_recurse(child, action) action(item) def test_publish_draft_delete(self): """ To reproduce a bug (STUD-811) publish a vertical, convert to draft, delete a child, move a child, publish. See if deleted and moved children still is connected or exists in db (bug was disconnected but existed) """ self._create_course() userid = random.getrandbits(32) location = self.course_location.replace(category='vertical', name='Vert1') item = self.draft_mongo.get_item(location, 2) self._xmodule_recurse( item, lambda i: self.draft_mongo.publish(i.location, userid) ) # verify status item = self.draft_mongo.get_item(location, 0) self.assertFalse(getattr(item, 'is_draft', False), "Item was published. Draft should not exist") # however, children are still draft, but I'm not sure that's by design # convert back to draft self.draft_mongo.convert_to_draft(location) # both draft and published should exist draft_vert = self.draft_mongo.get_item(location, 0) self.assertTrue(getattr(draft_vert, 'is_draft', False), "Item was converted to draft but doesn't say so") item = self.old_mongo.get_item(location, 0) self.assertFalse(getattr(item, 'is_draft', False), "Published item doesn't say so") # delete the discussion (which oddly is not in draft mode) location = self.course_location.replace(category='discussion', name='Discussion1') self.draft_mongo.delete_item(location) # remove pointer from draft vertical (verify presence first to ensure process is valid) self.assertIn(location.url(), draft_vert.children) draft_vert.children.remove(location.url()) # move the other child other_child_loc = self.course_location.replace(category='html', name='Html2') draft_vert.children.remove(other_child_loc.url()) other_vert = self.draft_mongo.get_item(self.course_location.replace(category='vertical', name='Vert2'), 0) other_vert.children.append(other_child_loc.url()) self.draft_mongo.update_children(draft_vert.location, draft_vert.children) self.draft_mongo.update_children(other_vert.location, other_vert.children) # publish self._xmodule_recurse( draft_vert, lambda i: self.draft_mongo.publish(i.location, userid) ) item = self.old_mongo.get_item(draft_vert.location, 0) self.assertNotIn(location.url(), item.children) with self.assertRaises(ItemNotFoundError): self.draft_mongo.get_item(location) self.assertNotIn(other_child_loc.url(), item.children) self.assertTrue(self.draft_mongo.has_item(None, other_child_loc), "Oops, lost moved item")
agpl-3.0
kennho/oppia
core/platform/search/gae_search_services.py
4
12354
# coding: utf-8 # # Copyright 2014 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an 'AS-IS' BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Provides search services.""" import datetime import logging import numbers import feconf from google.appengine.api import search as gae_search DEFAULT_NUM_RETRIES = 3 class SearchFailureError(Exception): """This error is raised when a search operation fails. The original_exception will point to what went wrong inside the gae sdk. Other platform implementations should have a similar way of revealing platform specific errors.""" def __init__(self, original_exception=None): super(SearchFailureError, self).__init__( '%s: %s' % (type(original_exception), original_exception.message)) self.original_exception = original_exception def add_documents_to_index(documents, index, retries=DEFAULT_NUM_RETRIES): """Adds a document to an index. Args: - documents: a list of documents. Each document should be a dictionary. Every key in the document is a field name, and the corresponding value will be the field's value. If there is a key named 'id', its value will be used as the document's id. If there is a key named 'rank', its value will be used as the document's rank. By default, search results are returned ordered by descending rank. If there is a key named 'language_code', its value will be used as the document's language. Otherwise, feconf.DEFAULT_LANGUAGE_CODE is used. - index: the name of the index to insert the document into, a string. - retries: the number of times to retry inserting the documents. Returns: returns a list of document ids of the documents that were added. Raises: - SearchFailureError: raised when the indexing fails. If it fails for any document, none will be inserted. - ValueError: raised when invalid values are given. """ if not isinstance(index, basestring): raise ValueError( 'Index must be the unicode/str name of an index, got %s' % type(index)) index = gae_search.Index(index) gae_docs = [_dict_to_search_document(d) for d in documents] try: logging.debug('adding the following docs to index %s: %s', index.name, documents) results = index.put(gae_docs, deadline=5) except gae_search.PutError as e: logging.exception('PutError raised.') if retries > 1: for res in e.results: if res.code == gae_search.OperationResult.TRANSIENT_ERROR: new_retries = retries - 1 logging.debug('%d tries left, retrying.' % (new_retries)) return add_documents_to_index( documents=documents, index=index.name, retries=new_retries) # At this pint, either we don't have any tries left, or none of the # results has a transient error code. raise SearchFailureError(e) return [r.id for r in results] def _dict_to_search_document(d): if not isinstance(d, dict): raise ValueError('document should be a dictionary, got %s' % type(d)) doc_id = d.get('id') rank = d.get('rank') language_code = d.get('language_code') fields = [] for key, value in d.iteritems(): if key not in ['id', 'rank']: fields += _make_fields(key, value) doc = gae_search.Document( doc_id=doc_id, fields=fields, rank=rank, language=language_code) return doc def _make_fields(key, value): if isinstance(value, list): _validate_list(key, value) return [_make_fields(key, v)[0] for v in value] if isinstance(value, basestring): return [gae_search.TextField(name=key, value=value)] if isinstance(value, numbers.Number): return [gae_search.NumberField(name=key, value=value)] if isinstance(value, datetime.datetime) or isinstance( value, datetime.date): return [gae_search.DateField(name=key, value=value)] raise ValueError( 'Value for document field %s should be a (unicode) string, numeric ' 'type, datetime.date, datetime.datetime or list of such types, got %s' % (key, type(value))) def _validate_list(key, value): """Validates a list to be included as document fields. The key is just passed in to make better error messages.""" for ind, element in enumerate(value): if not (isinstance(element, basestring) or isinstance(element, datetime.date) or isinstance(element, datetime.datetime) or isinstance(element, numbers.Number)): raise ValueError( 'All values of a multi-valued field must be numbers, strings, ' 'date or datetime instances, The %dth value for field %s has' ' type %s.' % (ind, key, type(element))) def delete_documents_from_index( doc_ids, index, retries=DEFAULT_NUM_RETRIES): """Deletes documents from an index. Args: - doc_ids: a list of document ids of documents to be deleted from the index. - index: the name of the index to delete the document from, a string. - retries: the number of times to retry deleting the documents. Raises: - SearchFailureError: raised when the deletion fails. If it fails for any document, none will be deleted. """ if not isinstance(index, basestring): raise ValueError( 'Index must be the unicode/str name of an index, got %s' % type(index)) for ind, doc_id in enumerate(doc_ids): if not isinstance(doc_id, basestring): raise ValueError('all doc_ids must be string, got %s at index %d' % (type(doc_id), ind)) index = gae_search.Index(index) try: logging.debug('Attempting to delete documents from index %s, ids: %s' % (index.name, ', '.join(doc_ids))) index.delete(doc_ids, deadline=5) except gae_search.DeleteError as e: logging.exception('Something went wrong during deletion.') if retries > 1: for res in e.results: if res.code == gae_search.OperationResult.TRANSIENT_ERROR: new_retries = retries - 1 logging.debug('%d tries left, retrying.' % (new_retries)) delete_documents_from_index( doc_ids=doc_ids, index=index.name, retries=new_retries) return raise SearchFailureError(e) def clear_index(index_name): """Clears an index completely. WARNING: This does all the clearing in-request, and may therefore fail if there are too many entries in the index. Args: - index: the name of the index to delete the document from, a string. """ index = gae_search.Index(index_name) while True: doc_ids = [ document.doc_id for document in index.get_range(ids_only=True)] if not doc_ids: break index.delete(doc_ids) def search(query_string, index, cursor=None, limit=feconf.GALLERY_PAGE_SIZE, sort='', ids_only=False, retries=DEFAULT_NUM_RETRIES): """Searches for documents in an index. Args: - query_string: the search query. The syntax used is described here: https://developers.google.com/appengine/docs/python/search/query_strings - index: the name of the index to search. - cursor: a cursor string, as returned by this function. Pass this in to get the next 'page' of results. Leave as None to start at the beginning. - sort: a string indicating how to sort results. This should be a string of space separated values. Each value should start with a '+' or a '-' character indicating whether to sort in ascending or descending order respectively. This character should be followed by a field name to sort on. - limit: the maximum number of documents to return. - ids_only: whether to only return document ids. - retries: the number of times to retry searching the index. Returns: returns a tuple with two elements: - a list of dictionaries representing search documents. If ids_only is True, this will be a list of strings, doc_ids. - a cursor that you can pass back in to get the next page of results. This wil be a web safe string that you can use in urls. It will be None if there is no next page. """ sort_options = None if cursor is None: gae_cursor = gae_search.Cursor() else: gae_cursor = gae_search.Cursor(web_safe_string=cursor) if sort: expr = _string_to_sort_expressions(sort) sort_options = gae_search.SortOptions(expr) options = gae_search.QueryOptions( limit=limit, cursor=gae_cursor, ids_only=ids_only, sort_options=sort_options) try: query = gae_search.Query(query_string, options=options) except gae_search.QueryError as e: # This can happen for query strings like "NOT" or a string that # contains backslashes. logging.exception('Could not parse query string %s' % query_string) return [], None index = gae_search.Index(index) try: logging.debug('attempting a search with query %s' % query) results = index.search(query) except gae_search.TransientError as e: logging.exception('something went wrong while searching.') if retries > 1: logging.debug('%d attempts left, retrying...' % (retries - 1)) return search( query_string, index.name, cursor=cursor, limit=limit, sort=sort, ids_only=ids_only, retries=retries - 1) else: raise SearchFailureError(e) result_cursor_str = None if results.cursor: result_cursor_str = results.cursor.web_safe_string if ids_only: result_docs = [doc.doc_id for doc in results.results] else: result_docs = [ _search_document_to_dict(doc) for doc in results.results] return result_docs, result_cursor_str def _string_to_sort_expressions(input_string): sort_expressions = [] s_tokens = input_string.split() for expression in s_tokens: if expression.startswith('+'): direction = gae_search.SortExpression.ASCENDING elif expression.startswith('-'): direction = gae_search.SortExpression.DESCENDING else: raise ValueError( 'Fields in the sort expression must start with "+"' ' or "-" to indicate sort direction.' ' The field %s has no such indicator' ' in expression "%s".' % (expression, input_string)) sort_expressions.append( gae_search.SortExpression(expression[1:], direction)) return sort_expressions def get_document_from_index(doc_id, index): """Returns a document with a give doc_id(s) from the index. args: - doc_id: a doc_id as a string - index: the name of an index, a string. returns - the requested document as a dict """ index = gae_search.Index(index) return _search_document_to_dict(index.get(doc_id)) def _search_document_to_dict(doc): d = {'id': doc.doc_id, 'language_code': doc.language, 'rank': doc.rank} for field in doc.fields: d[field.name] = field.value return d
apache-2.0
zamattiac/SHARE
share/normalize/v1_push.py
1
4809
import re from share.normalize import ctx, tools from share.normalize.parsers import Parser from share.normalize.normalizer import Normalizer THE_REGEX = re.compile(r'(^the\s|\sthe\s)') class WorkIdentifier(Parser): uri = ctx class AgentIdentifier(Parser): uri = ctx class IsAffiliatedWith(Parser): # Moved below Agent definition to resolve cyclical references # related = tools.Delegate(OrgAgent) pass class Agent(Parser): schema = tools.GuessAgentType(ctx.name) name = ctx.name related_agents = tools.Map( tools.Delegate(IsAffiliatedWith), tools.Try(ctx.affiliation) ) identifiers = tools.Map( tools.Delegate(AgentIdentifier), tools.Map( tools.IRI(), tools.Try(ctx.sameAs), tools.Try(ctx.email) ) ) class Extra: givenName = tools.Try(ctx.givenName) familyName = tools.Try(ctx.familyName) additonalName = tools.Try(ctx.additionalName) name = tools.Try(ctx.name) class OrgAgent(Agent): schema = tools.GuessAgentType(ctx.name, default='organization') IsAffiliatedWith.related = tools.Delegate(OrgAgent) class Creator(Parser): agent = tools.Delegate(Agent, ctx) cited_as = ctx.name order_cited = ctx('index') class Publisher(Parser): agent = tools.Delegate(OrgAgent, ctx) cited_as = ctx.name class FundingAgent(Parser): schema = tools.GuessAgentType(ctx.sponsorName, default='organization') name = ctx.sponsorName identifiers = tools.Map( tools.Delegate(AgentIdentifier), tools.IRI(tools.Try(ctx.sponsorIdentifier)) ) class Award(Parser): name = ctx.awardName uri = tools.IRI(tools.Try(ctx.awardIdentifier)) class ThroughAwards(Parser): award = tools.Delegate(Award, ctx) class Funder(Parser): agent = tools.Delegate(FundingAgent, ctx.sponsor) cited_as = ctx.sponsor.sponsorName awards = tools.Map( tools.Delegate(ThroughAwards), tools.Try(ctx.award) ) class Tag(Parser): name = ctx class ThroughTags(Parser): tag = tools.Delegate(Tag, ctx) class Subject(Parser): name = ctx class ThroughSubjects(Parser): subject = tools.Delegate(Subject, ctx) class CreativeWork(Parser): title = ctx.title description = tools.Try(ctx.description) is_deleted = tools.RunPython('_is_deleted', tools.Try(ctx.otherProperties)) date_updated = tools.ParseDate(tools.Try(ctx.providerUpdatedDateTime)) rights = tools.Join(tools.Try(ctx.licenses.uri)) # Note: this is only taking the first language in the case of multiple languages language = tools.ParseLanguage( tools.Try(ctx.languages[0]), ) related_agents = tools.Concat( tools.Map( tools.Delegate(Creator), tools.Try(ctx.contributors) ), tools.Map( tools.Delegate(Publisher), tools.Try(ctx.publisher) ), tools.Map( tools.Delegate(Funder), tools.Try(ctx.sponsorships) ) ) identifiers = tools.Map( tools.Delegate(WorkIdentifier), tools.Map( tools.IRI(), tools.RunPython( 'unique', tools.Concat( tools.Try(ctx.uris.canonicalUri), tools.Try(ctx.uris.providerUris), tools.Try(ctx.uris.descriptorUris), tools.Try(ctx.uris.objectUris) ) ) ) ) subjects = tools.Map( tools.Delegate(ThroughSubjects), tools.Subjects( tools.Try(ctx.subjects) ) ) tags = tools.Map( tools.Delegate(ThroughTags), tools.Try(ctx.tags), tools.Try(ctx.subjects) ) class Extra: """ Fields that are combined in the base parser are relisted as singular elements that match their original entry to preserve raw data structure. """ freeToRead = tools.Try(ctx.freeToRead) languages = tools.Try(ctx.languages) licenses = tools.Try(ctx.licenses) otherProperties = tools.Try(ctx.otherProperties) publisher = tools.Try(ctx.publisher) subjects = tools.Try(ctx.subjects) sponsorships = tools.Try(ctx.sponsorships) tags = tools.Try(ctx.tags) uris = tools.Try(ctx.uris) version = tools.Try(ctx.version) def unique(self, items): return list(sorted(set(items))) def _is_deleted(self, properties): for prop in properties or []: if prop['name'] == 'status': return 'deleted' in prop['properties'].get('status', []) return False class V1Normalizer(Normalizer): root_parser = CreativeWork
apache-2.0
Southpaw-TACTIC/Team
src/python/Lib/site-packages/win32com/server/dispatcher.py
1
9715
"""Dispatcher Please see policy.py for a discussion on dispatchers and policies """ import pythoncom, traceback, win32api from sys import exc_info # from win32com.server.exception import IsCOMServerException from win32com.util import IIDToInterfaceName import win32com class DispatcherBase: """ The base class for all Dispatchers. This dispatcher supports wrapping all operations in exception handlers, and all the necessary delegation to the policy. This base class supports the printing of "unexpected" exceptions. Note, however, that exactly where the output of print goes may not be useful! A derived class may provide additional semantics for this. """ def __init__(self, policyClass, object): self.policy = policyClass(object) # The logger we should dump to. If None, we should send to the # default location (typically 'print') self.logger = getattr(win32com, "logger", None) # Note the "return self._HandleException_()" is purely to stop pychecker # complaining - _HandleException_ will itself raise an exception for the # pythoncom framework, so the result will never be seen. def _CreateInstance_(self, clsid, reqIID): try: self.policy._CreateInstance_(clsid, reqIID) return pythoncom.WrapObject(self, reqIID) except: return self._HandleException_() def _QueryInterface_(self, iid): try: return self.policy._QueryInterface_(iid) except: return self._HandleException_() def _Invoke_(self, dispid, lcid, wFlags, args): try: return self.policy._Invoke_(dispid, lcid, wFlags, args) except: return self._HandleException_() def _GetIDsOfNames_(self, names, lcid): try: return self.policy._GetIDsOfNames_(names, lcid) except: return self._HandleException_() def _GetTypeInfo_(self, index, lcid): try: return self.policy._GetTypeInfo_(index, lcid) except: return self._HandleException_() def _GetTypeInfoCount_(self): try: return self.policy._GetTypeInfoCount_() except: return self._HandleException_() def _GetDispID_(self, name, fdex): try: return self.policy._GetDispID_(name, fdex) except: return self._HandleException_() def _InvokeEx_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider): try: return self.policy._InvokeEx_(dispid, lcid, wFlags, args, kwargs, serviceProvider) except: return self._HandleException_() def _DeleteMemberByName_(self, name, fdex): try: return self.policy._DeleteMemberByName_(name, fdex) except: return self._HandleException_() def _DeleteMemberByDispID_(self, id): try: return self.policy._DeleteMemberByDispID_(id) except: return self._HandleException_() def _GetMemberProperties_(self, id, fdex): try: return self.policy._GetMemberProperties_(id, fdex) except: return self._HandleException_() def _GetMemberName_(self, dispid): try: return self.policy._GetMemberName_(dispid) except: return self._HandleException_() def _GetNextDispID_(self, fdex, flags): try: return self.policy._GetNextDispID_(fdex, flags) except: return self._HandleException_() def _GetNameSpaceParent_(self): try: return self.policy._GetNameSpaceParent_() except: return self._HandleException_() def _HandleException_(self): """Called whenever an exception is raised. Default behaviour is to print the exception. """ # If not a COM exception, print it for the developer. if not IsCOMServerException(): if self.logger is not None: self.logger.exception("pythoncom server error") else: traceback.print_exc() # But still raise it for the framework. reraise() def _trace_(self, *args): if self.logger is not None: record = " ".join(map(str, args)) self.logger.debug(record) else: for arg in args[:-1]: print arg, print args[-1] class DispatcherTrace(DispatcherBase): """A dispatcher, which causes a 'print' line for each COM function called. """ def _QueryInterface_(self, iid): rc = DispatcherBase._QueryInterface_(self, iid) if not rc: self._trace_("in %s._QueryInterface_ with unsupported IID %s (%s)" % (`self.policy._obj_`, IIDToInterfaceName(iid),iid)) return rc def _GetIDsOfNames_(self, names, lcid): self._trace_("in _GetIDsOfNames_ with '%s' and '%d'\n" % (names, lcid)) return DispatcherBase._GetIDsOfNames_(self, names, lcid) def _GetTypeInfo_(self, index, lcid): self._trace_("in _GetTypeInfo_ with index=%d, lcid=%d\n" % (index, lcid)) return DispatcherBase._GetTypeInfo_(self, index, lcid) def _GetTypeInfoCount_(self): self._trace_("in _GetTypeInfoCount_\n") return DispatcherBase._GetTypeInfoCount_(self) def _Invoke_(self, dispid, lcid, wFlags, args): self._trace_("in _Invoke_ with", dispid, lcid, wFlags, args) return DispatcherBase._Invoke_(self, dispid, lcid, wFlags, args) def _GetDispID_(self, name, fdex): self._trace_("in _GetDispID_ with", name, fdex) return DispatcherBase._GetDispID_(self, name, fdex) def _InvokeEx_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider): self._trace_("in %r._InvokeEx_-%s%r [%x,%s,%r]" % (self.policy._obj_, dispid, args, wFlags, lcid, serviceProvider)) return DispatcherBase._InvokeEx_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider) def _DeleteMemberByName_(self, name, fdex): self._trace_("in _DeleteMemberByName_ with", name, fdex) return DispatcherBase._DeleteMemberByName_(self, name, fdex) def _DeleteMemberByDispID_(self, id): self._trace_("in _DeleteMemberByDispID_ with", id) return DispatcherBase._DeleteMemberByDispID_(self, id) def _GetMemberProperties_(self, id, fdex): self._trace_("in _GetMemberProperties_ with", id, fdex) return DispatcherBase._GetMemberProperties_(self, id, fdex) def _GetMemberName_(self, dispid): self._trace_("in _GetMemberName_ with", dispid) return DispatcherBase._GetMemberName_(self, dispid) def _GetNextDispID_(self, fdex, flags): self._trace_("in _GetNextDispID_ with", fdex, flags) return DispatcherBase._GetNextDispID_(self, fdex, flags) def _GetNameSpaceParent_(self): self._trace_("in _GetNameSpaceParent_") return DispatcherBase._GetNameSpaceParent_(self) class DispatcherWin32trace(DispatcherTrace): """A tracing dispatcher that sends its output to the win32trace remote collector. """ def __init__(self, policyClass, object): DispatcherTrace.__init__(self, policyClass, object) if self.logger is None: # If we have no logger, setup our output. import win32traceutil # Sets up everything. self._trace_("Object with win32trace dispatcher created (object=%s)" % `object`) class DispatcherOutputDebugString(DispatcherTrace): """A tracing dispatcher that sends its output to win32api.OutputDebugString """ def _trace_(self, *args): for arg in args[:-1]: win32api.OutputDebugString(str(arg)+" ") win32api.OutputDebugString(str(args[-1])+"\n") class DispatcherWin32dbg(DispatcherBase): """A source-level debugger dispatcher A dispatcher which invokes the debugger as an object is instantiated, or when an unexpected exception occurs. Requires Pythonwin. """ def __init__(self, policyClass, ob): # No one uses this, and it just causes py2exe to drag all of # pythonwin in. #import pywin.debugger pywin.debugger.brk() print "The DispatcherWin32dbg dispatcher is deprecated!" print "Please let me know if this is a problem." print "Uncomment the relevant lines in dispatcher.py to re-enable" # DEBUGGER Note - You can either: # * Hit Run and wait for a (non Exception class) exception to occur! # * Set a breakpoint and hit run. # * Step into the object creation (a few steps away!) DispatcherBase.__init__(self, policyClass, ob) def _HandleException_(self): """ Invoke the debugger post mortem capability """ # Save details away. typ, val, tb = exc_info() #import pywin.debugger, pywin.debugger.dbgcon debug = 0 try: raise typ, val except Exception: # AARG - What is this Exception??? # Use some inside knowledge to borrow a Debugger option which dictates if we # stop at "expected" exceptions. debug = pywin.debugger.GetDebugger().get_option(pywin.debugger.dbgcon.OPT_STOP_EXCEPTIONS) except: debug = 1 if debug: try: pywin.debugger.post_mortem(tb, typ, val) # The original exception except: traceback.print_exc() # But still raise it. del tb reraise() def reraise(): """Handy function for re-raising errors. Note: storing a traceback in a local variable can introduce reference loops if you aren't careful. Specifically, that local variable should not be within an execution context contained with the traceback. By using a utility function, we ensure that our local variable holding the traceback is not referenced by the traceback itself. """ t, v, tb = exc_info() raise t, v, tb try: import win32trace DefaultDebugDispatcher = DispatcherWin32trace except ImportError: # no win32trace module - just use a print based one. DefaultDebugDispatcher = DispatcherTrace
epl-1.0
jrrembert/django
django/contrib/gis/gdal/prototypes/errcheck.py
586
4229
""" This module houses the error-checking routines used by the GDAL ctypes prototypes. """ from ctypes import c_void_p, string_at from django.contrib.gis.gdal.error import ( GDALException, SRSException, check_err, ) from django.contrib.gis.gdal.libgdal import lgdal from django.utils import six # Helper routines for retrieving pointers and/or values from # arguments passed in by reference. def arg_byref(args, offset=-1): "Returns the pointer argument's by-reference value." return args[offset]._obj.value def ptr_byref(args, offset=-1): "Returns the pointer argument passed in by-reference." return args[offset]._obj # ### String checking Routines ### def check_const_string(result, func, cargs, offset=None, cpl=False): """ Similar functionality to `check_string`, but does not free the pointer. """ if offset: check_err(result, cpl=cpl) ptr = ptr_byref(cargs, offset) return ptr.value else: return result def check_string(result, func, cargs, offset=-1, str_result=False): """ Checks the string output returned from the given function, and frees the string pointer allocated by OGR. The `str_result` keyword may be used when the result is the string pointer, otherwise the OGR error code is assumed. The `offset` keyword may be used to extract the string pointer passed in by-reference at the given slice offset in the function arguments. """ if str_result: # For routines that return a string. ptr = result if not ptr: s = None else: s = string_at(result) else: # Error-code return specified. check_err(result) ptr = ptr_byref(cargs, offset) # Getting the string value s = ptr.value # Correctly freeing the allocated memory behind GDAL pointer # with the VSIFree routine. if ptr: lgdal.VSIFree(ptr) return s # ### DataSource, Layer error-checking ### # ### Envelope checking ### def check_envelope(result, func, cargs, offset=-1): "Checks a function that returns an OGR Envelope by reference." env = ptr_byref(cargs, offset) return env # ### Geometry error-checking routines ### def check_geom(result, func, cargs): "Checks a function that returns a geometry." # OGR_G_Clone may return an integer, even though the # restype is set to c_void_p if isinstance(result, six.integer_types): result = c_void_p(result) if not result: raise GDALException('Invalid geometry pointer returned from "%s".' % func.__name__) return result def check_geom_offset(result, func, cargs, offset=-1): "Chcks the geometry at the given offset in the C parameter list." check_err(result) geom = ptr_byref(cargs, offset=offset) return check_geom(geom, func, cargs) # ### Spatial Reference error-checking routines ### def check_srs(result, func, cargs): if isinstance(result, six.integer_types): result = c_void_p(result) if not result: raise SRSException('Invalid spatial reference pointer returned from "%s".' % func.__name__) return result # ### Other error-checking routines ### def check_arg_errcode(result, func, cargs, cpl=False): """ The error code is returned in the last argument, by reference. Check its value with `check_err` before returning the result. """ check_err(arg_byref(cargs), cpl=cpl) return result def check_errcode(result, func, cargs, cpl=False): """ Check the error code returned (c_int). """ check_err(result, cpl=cpl) def check_pointer(result, func, cargs): "Makes sure the result pointer is valid." if isinstance(result, six.integer_types): result = c_void_p(result) if result: return result else: raise GDALException('Invalid pointer returned from "%s"' % func.__name__) def check_str_arg(result, func, cargs): """ This is for the OSRGet[Angular|Linear]Units functions, which require that the returned string pointer not be freed. This returns both the double and string values. """ dbl = result ptr = cargs[-1]._obj return dbl, ptr.value.decode()
bsd-3-clause
UltronAI/Deep-Learning
CS231n/reference/CS231n-master/assignment3/cs231n/fast_layers.py
1
9560
import numpy as np try: from cs231n.im2col_cython import col2im_cython, im2col_cython from cs231n.im2col_cython import col2im_6d_cython except ImportError: print 'run the following from the cs231n directory and try again:' print 'python setup.py build_ext --inplace' print 'You may also need to restart your iPython kernel' from cs231n.im2col import * def conv_forward_im2col(x, w, b, conv_param): """ A fast implementation of the forward pass for a convolutional layer based on im2col and col2im. """ N, C, H, W = x.shape num_filters, _, filter_height, filter_width = w.shape stride, pad = conv_param['stride'], conv_param['pad'] # Check dimensions assert (W + 2 * pad - filter_width) % stride == 0, 'width does not work' assert (H + 2 * pad - filter_height) % stride == 0, 'height does not work' # Create output out_height = (H + 2 * pad - filter_height) / stride + 1 out_width = (W + 2 * pad - filter_width) / stride + 1 out = np.zeros((N, num_filters, out_height, out_width), dtype=x.dtype) # x_cols = im2col_indices(x, w.shape[2], w.shape[3], pad, stride) x_cols = im2col_cython(x, w.shape[2], w.shape[3], pad, stride) res = w.reshape((w.shape[0], -1)).dot(x_cols) + b.reshape(-1, 1) out = res.reshape(w.shape[0], out.shape[2], out.shape[3], x.shape[0]) out = out.transpose(3, 0, 1, 2) cache = (x, w, b, conv_param, x_cols) return out, cache def conv_forward_strides(x, w, b, conv_param): N, C, H, W = x.shape F, _, HH, WW = w.shape stride, pad = conv_param['stride'], conv_param['pad'] # Check dimensions #assert (W + 2 * pad - WW) % stride == 0, 'width does not work' #assert (H + 2 * pad - HH) % stride == 0, 'height does not work' # Pad the input p = pad x_padded = np.pad(x, ((0, 0), (0, 0), (p, p), (p, p)), mode='constant') # Figure out output dimensions H += 2 * pad W += 2 * pad out_h = (H - HH) / stride + 1 out_w = (W - WW) / stride + 1 # Perform an im2col operation by picking clever strides shape = (C, HH, WW, N, out_h, out_w) strides = (H * W, W, 1, C * H * W, stride * W, stride) strides = x.itemsize * np.array(strides) x_stride = np.lib.stride_tricks.as_strided(x_padded, shape=shape, strides=strides) x_cols = np.ascontiguousarray(x_stride) x_cols.shape = (C * HH * WW, N * out_h * out_w) # Now all our convolutions are a big matrix multiply res = w.reshape(F, -1).dot(x_cols) + b.reshape(-1, 1) # Reshape the output res.shape = (F, N, out_h, out_w) out = res.transpose(1, 0, 2, 3) # Be nice and return a contiguous array # The old version of conv_forward_fast doesn't do this, so for a fair # comparison we won't either out = np.ascontiguousarray(out) cache = (x, w, b, conv_param, x_cols) return out, cache def conv_backward_strides(dout, cache): x, w, b, conv_param, x_cols = cache stride, pad = conv_param['stride'], conv_param['pad'] N, C, H, W = x.shape F, _, HH, WW = w.shape _, _, out_h, out_w = dout.shape db = np.sum(dout, axis=(0, 2, 3)) dout_reshaped = dout.transpose(1, 0, 2, 3).reshape(F, -1) dw = dout_reshaped.dot(x_cols.T).reshape(w.shape) dx_cols = w.reshape(F, -1).T.dot(dout_reshaped) dx_cols.shape = (C, HH, WW, N, out_h, out_w) dx = col2im_6d_cython(dx_cols, N, C, H, W, HH, WW, pad, stride) return dx, dw, db def conv_backward_im2col(dout, cache): """ A fast implementation of the backward pass for a convolutional layer based on im2col and col2im. """ x, w, b, conv_param, x_cols = cache stride, pad = conv_param['stride'], conv_param['pad'] db = np.sum(dout, axis=(0, 2, 3)) num_filters, _, filter_height, filter_width = w.shape dout_reshaped = dout.transpose(1, 2, 3, 0).reshape(num_filters, -1) dw = dout_reshaped.dot(x_cols.T).reshape(w.shape) dx_cols = w.reshape(num_filters, -1).T.dot(dout_reshaped) # dx = col2im_indices(dx_cols, x.shape, filter_height, filter_width, pad, stride) dx = col2im_cython(dx_cols, x.shape[0], x.shape[1], x.shape[2], x.shape[3], filter_height, filter_width, pad, stride) return dx, dw, db conv_forward_fast = conv_forward_strides conv_backward_fast = conv_backward_strides def max_pool_forward_fast(x, pool_param): """ A fast implementation of the forward pass for a max pooling layer. This chooses between the reshape method and the im2col method. If the pooling regions are square and tile the input image, then we can use the reshape method which is very fast. Otherwise we fall back on the im2col method, which is not much faster than the naive method. """ N, C, H, W = x.shape pool_height, pool_width = pool_param['pool_height'], pool_param['pool_width'] stride = pool_param['stride'] same_size = pool_height == pool_width == stride tiles = H % pool_height == 0 and W % pool_width == 0 if same_size and tiles: out, reshape_cache = max_pool_forward_reshape(x, pool_param) cache = ('reshape', reshape_cache) else: out, im2col_cache = max_pool_forward_im2col(x, pool_param) cache = ('im2col', im2col_cache) return out, cache def max_pool_backward_fast(dout, cache): """ A fast implementation of the backward pass for a max pooling layer. This switches between the reshape method an the im2col method depending on which method was used to generate the cache. """ method, real_cache = cache if method == 'reshape': return max_pool_backward_reshape(dout, real_cache) elif method == 'im2col': return max_pool_backward_im2col(dout, real_cache) else: raise ValueError('Unrecognized method "%s"' % method) def max_pool_forward_reshape(x, pool_param): """ A fast implementation of the forward pass for the max pooling layer that uses some clever reshaping. This can only be used for square pooling regions that tile the input. """ N, C, H, W = x.shape pool_height, pool_width = pool_param['pool_height'], pool_param['pool_width'] stride = pool_param['stride'] assert pool_height == pool_width == stride, 'Invalid pool params' assert H % pool_height == 0 assert W % pool_height == 0 x_reshaped = x.reshape(N, C, H / pool_height, pool_height, W / pool_width, pool_width) out = x_reshaped.max(axis=3).max(axis=4) cache = (x, x_reshaped, out) return out, cache def max_pool_backward_reshape(dout, cache): """ A fast implementation of the backward pass for the max pooling layer that uses some clever broadcasting and reshaping. This can only be used if the forward pass was computed using max_pool_forward_reshape. NOTE: If there are multiple argmaxes, this method will assign gradient to ALL argmax elements of the input rather than picking one. In this case the gradient will actually be incorrect. However this is unlikely to occur in practice, so it shouldn't matter much. One possible solution is to split the upstream gradient equally among all argmax elements; this should result in a valid subgradient. You can make this happen by uncommenting the line below; however this results in a significant performance penalty (about 40% slower) and is unlikely to matter in practice so we don't do it. """ x, x_reshaped, out = cache dx_reshaped = np.zeros_like(x_reshaped) out_newaxis = out[:, :, :, np.newaxis, :, np.newaxis] mask = (x_reshaped == out_newaxis) dout_newaxis = dout[:, :, :, np.newaxis, :, np.newaxis] dout_broadcast, _ = np.broadcast_arrays(dout_newaxis, dx_reshaped) dx_reshaped[mask] = dout_broadcast[mask] dx_reshaped /= np.sum(mask, axis=(3, 5), keepdims=True) dx = dx_reshaped.reshape(x.shape) return dx def max_pool_forward_im2col(x, pool_param): """ An implementation of the forward pass for max pooling based on im2col. This isn't much faster than the naive version, so it should be avoided if possible. """ N, C, H, W = x.shape pool_height, pool_width = pool_param['pool_height'], pool_param['pool_width'] stride = pool_param['stride'] assert (H - pool_height) % stride == 0, 'Invalid height' assert (W - pool_width) % stride == 0, 'Invalid width' out_height = (H - pool_height) / stride + 1 out_width = (W - pool_width) / stride + 1 x_split = x.reshape(N * C, 1, H, W) x_cols = im2col(x_split, pool_height, pool_width, padding=0, stride=stride) x_cols_argmax = np.argmax(x_cols, axis=0) x_cols_max = x_cols[x_cols_argmax, np.arange(x_cols.shape[1])] out = x_cols_max.reshape(out_height, out_width, N, C).transpose(2, 3, 0, 1) cache = (x, x_cols, x_cols_argmax, pool_param) return out, cache def max_pool_backward_im2col(dout, cache): """ An implementation of the backward pass for max pooling based on im2col. This isn't much faster than the naive version, so it should be avoided if possible. """ x, x_cols, x_cols_argmax, pool_param = cache N, C, H, W = x.shape pool_height, pool_width = pool_param['pool_height'], pool_param['pool_width'] stride = pool_param['stride'] dout_reshaped = dout.transpose(2, 3, 0, 1).flatten() dx_cols = np.zeros_like(x_cols) dx_cols[x_cols_argmax, np.arange(dx_cols.shape[1])] = dout_reshaped dx = col2im_indices(dx_cols, (N * C, 1, H, W), pool_height, pool_width, padding=0, stride=stride) dx = dx.reshape(x.shape) return dx
mit
mgit-at/ansible
lib/ansible/modules/network/fortios/fortios_address.py
57
10231
#!/usr/bin/python # # Ansible module to manage IP addresses on fortios devices # (c) 2016, Benjamin Jolivot <bjolivot@gmail.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ --- module: fortios_address version_added: "2.4" author: "Benjamin Jolivot (@bjolivot)" short_description: Manage fortios firewall address objects description: - This module provide management of firewall addresses on FortiOS devices. extends_documentation_fragment: fortios options: state: description: - Specifies if address need to be added or deleted. required: true choices: ['present', 'absent'] name: description: - Name of the address to add or delete. required: true type: description: - Type of the address. choices: ['iprange', 'fqdn', 'ipmask', 'geography'] value: description: - Address value, based on type. If type=fqdn, somthing like www.google.com. If type=ipmask, you can use simple ip (192.168.0.1), ip+mask (192.168.0.1 255.255.255.0) or CIDR (192.168.0.1/32). start_ip: description: - First ip in range (used only with type=iprange). end_ip: description: - Last ip in range (used only with type=iprange). country: description: - 2 letter country code (like FR). interface: description: - interface name the address apply to. default: any comment: description: - free text to describe address. notes: - This module requires netaddr python library. """ EXAMPLES = """ - name: Register french addresses fortios_address: host: 192.168.0.254 username: admin password: p4ssw0rd state: present name: "fromfrance" type: geography country: FR comment: "French geoip address" - name: Register some fqdn fortios_address: host: 192.168.0.254 username: admin password: p4ssw0rd state: present name: "Ansible" type: fqdn value: www.ansible.com comment: "Ansible website" - name: Register google DNS fortios_address: host: 192.168.0.254 username: admin password: p4ssw0rd state: present name: "google_dns" type: ipmask value: 8.8.8.8 """ RETURN = """ firewall_address_config: description: full firewall adresses config string. returned: always type: string change_string: description: The commands executed by the module. returned: only if config changed type: string """ from ansible.module_utils.network.fortios.fortios import fortios_argument_spec, fortios_required_if from ansible.module_utils.network.fortios.fortios import backup, AnsibleFortios from ansible.module_utils.basic import AnsibleModule # check for netaddr lib try: from netaddr import IPNetwork HAS_NETADDR = True except: HAS_NETADDR = False # define valid country list for GEOIP address type FG_COUNTRY_LIST = ( 'ZZ', 'A1', 'A2', 'O1', 'AD', 'AE', 'AF', 'AG', 'AI', 'AL', 'AM', 'AN', 'AO', 'AP', 'AQ', 'AR', 'AS', 'AT', 'AU', 'AW', 'AX', 'AZ', 'BA', 'BB', 'BD', 'BE', 'BF', 'BG', 'BH', 'BI', 'BJ', 'BL', 'BM', 'BN', 'BO', 'BQ', 'BR', 'BS', 'BT', 'BV', 'BW', 'BY', 'BZ', 'CA', 'CC', 'CD', 'CF', 'CG', 'CH', 'CI', 'CK', 'CL', 'CM', 'CN', 'CO', 'CR', 'CU', 'CV', 'CW', 'CX', 'CY', 'CZ', 'DE', 'DJ', 'DK', 'DM', 'DO', 'DZ', 'EC', 'EE', 'EG', 'EH', 'ER', 'ES', 'ET', 'EU', 'FI', 'FJ', 'FK', 'FM', 'FO', 'FR', 'GA', 'GB', 'GD', 'GE', 'GF', 'GG', 'GH', 'GI', 'GL', 'GM', 'GN', 'GP', 'GQ', 'GR', 'GS', 'GT', 'GU', 'GW', 'GY', 'HK', 'HM', 'HN', 'HR', 'HT', 'HU', 'ID', 'IE', 'IL', 'IM', 'IN', 'IO', 'IQ', 'IR', 'IS', 'IT', 'JE', 'JM', 'JO', 'JP', 'KE', 'KG', 'KH', 'KI', 'KM', 'KN', 'KP', 'KR', 'KW', 'KY', 'KZ', 'LA', 'LB', 'LC', 'LI', 'LK', 'LR', 'LS', 'LT', 'LU', 'LV', 'LY', 'MA', 'MC', 'MD', 'ME', 'MF', 'MG', 'MH', 'MK', 'ML', 'MM', 'MN', 'MO', 'MP', 'MQ', 'MR', 'MS', 'MT', 'MU', 'MV', 'MW', 'MX', 'MY', 'MZ', 'NA', 'NC', 'NE', 'NF', 'NG', 'NI', 'NL', 'NO', 'NP', 'NR', 'NU', 'NZ', 'OM', 'PA', 'PE', 'PF', 'PG', 'PH', 'PK', 'PL', 'PM', 'PN', 'PR', 'PS', 'PT', 'PW', 'PY', 'QA', 'RE', 'RO', 'RS', 'RU', 'RW', 'SA', 'SB', 'SC', 'SD', 'SE', 'SG', 'SH', 'SI', 'SJ', 'SK', 'SL', 'SM', 'SN', 'SO', 'SR', 'SS', 'ST', 'SV', 'SX', 'SY', 'SZ', 'TC', 'TD', 'TF', 'TG', 'TH', 'TJ', 'TK', 'TL', 'TM', 'TN', 'TO', 'TR', 'TT', 'TV', 'TW', 'TZ', 'UA', 'UG', 'UM', 'US', 'UY', 'UZ', 'VA', 'VC', 'VE', 'VG', 'VI', 'VN', 'VU', 'WF', 'WS', 'YE', 'YT', 'ZA', 'ZM', 'ZW' ) def get_formated_ipaddr(input_ip): """ Format given ip address string to fortigate format (ip netmask) Args: * **ip_str** (string) : string representing ip address accepted format: - ip netmask (ex: 192.168.0.10 255.255.255.0) - ip (ex: 192.168.0.10) - CIDR (ex: 192.168.0.10/24) Returns: formated ip if ip is valid (ex: "192.168.0.10 255.255.255.0") False if ip is not valid """ try: if " " in input_ip: # ip netmask format str_ip, str_netmask = input_ip.split(" ") ip = IPNetwork(str_ip) mask = IPNetwork(str_netmask) return "%s %s" % (str_ip, str_netmask) else: ip = IPNetwork(input_ip) return "%s %s" % (str(ip.ip), str(ip.netmask)) except: return False return False def main(): argument_spec = dict( state=dict(required=True, choices=['present', 'absent']), name=dict(required=True), type=dict(choices=['iprange', 'fqdn', 'ipmask', 'geography'], default='ipmask'), value=dict(), start_ip=dict(), end_ip=dict(), country=dict(), interface=dict(default='any'), comment=dict(), ) # merge argument_spec from module_utils/fortios.py argument_spec.update(fortios_argument_spec) # Load module module = AnsibleModule( argument_spec=argument_spec, required_if=fortios_required_if, supports_check_mode=True, ) result = dict(changed=False) if not HAS_NETADDR: module.fail_json(msg='Could not import the python library netaddr required by this module') # check params if module.params['state'] == 'absent': if module.params['type'] != "ipmask": module.fail_json(msg='Invalid argument type=%s when state=absent' % module.params['type']) if module.params['value'] is not None: module.fail_json(msg='Invalid argument `value` when state=absent') if module.params['start_ip'] is not None: module.fail_json(msg='Invalid argument `start_ip` when state=absent') if module.params['end_ip'] is not None: module.fail_json(msg='Invalid argument `end_ip` when state=absent') if module.params['country'] is not None: module.fail_json(msg='Invalid argument `country` when state=absent') if module.params['interface'] != "any": module.fail_json(msg='Invalid argument `interface` when state=absent') if module.params['comment'] is not None: module.fail_json(msg='Invalid argument `comment` when state=absent') else: # state=present # validate IP if module.params['type'] == "ipmask": formated_ip = get_formated_ipaddr(module.params['value']) if formated_ip is not False: module.params['value'] = get_formated_ipaddr(module.params['value']) else: module.fail_json(msg="Bad ip address format") # validate country if module.params['type'] == "geography": if module.params['country'] not in FG_COUNTRY_LIST: module.fail_json(msg="Invalid country argument, need to be in `diagnose firewall ipgeo country-list`") # validate iprange if module.params['type'] == "iprange": if module.params['start_ip'] is None: module.fail_json(msg="Missing argument 'start_ip' when type is iprange") if module.params['end_ip'] is None: module.fail_json(msg="Missing argument 'end_ip' when type is iprange") # init forti object fortigate = AnsibleFortios(module) # Config path config_path = 'firewall address' # load config fortigate.load_config(config_path) # Absent State if module.params['state'] == 'absent': fortigate.candidate_config[config_path].del_block(module.params['name']) # Present state if module.params['state'] == 'present': # define address params new_addr = fortigate.get_empty_configuration_block(module.params['name'], 'edit') if module.params['comment'] is not None: new_addr.set_param('comment', '"%s"' % (module.params['comment'])) if module.params['type'] == 'iprange': new_addr.set_param('type', 'iprange') new_addr.set_param('start-ip', module.params['start_ip']) new_addr.set_param('end-ip', module.params['end_ip']) if module.params['type'] == 'geography': new_addr.set_param('type', 'geography') new_addr.set_param('country', '"%s"' % (module.params['country'])) if module.params['interface'] != 'any': new_addr.set_param('associated-interface', '"%s"' % (module.params['interface'])) if module.params['value'] is not None: if module.params['type'] == 'fqdn': new_addr.set_param('type', 'fqdn') new_addr.set_param('fqdn', '"%s"' % (module.params['value'])) if module.params['type'] == 'ipmask': new_addr.set_param('subnet', module.params['value']) # add the new address object to the device fortigate.add_block(module.params['name'], new_addr) # Apply changes (check mode is managed directly by the fortigate object) fortigate.apply_changes() if __name__ == '__main__': main()
gpl-3.0