repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
kyvinh/home-assistant
homeassistant/components/media_player/mpchc.py
11
5002
""" Support to interface with the MPC-HC Web API. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/media_player.mpchc/ """ import logging import re import requests import voluptuous as vol from homeassistant.components.media_player import ( SUPPORT_VOLUME_MUTE, SUPPORT_PAUSE, SUPPORT_STOP, SUPPORT_NEXT_TRACK, SUPPORT_PREVIOUS_TRACK, SUPPORT_VOLUME_STEP, SUPPORT_PLAY, MediaPlayerDevice, PLATFORM_SCHEMA) from homeassistant.const import ( STATE_OFF, STATE_IDLE, STATE_PAUSED, STATE_PLAYING, CONF_NAME, CONF_HOST, CONF_PORT) import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'MPC-HC' DEFAULT_PORT = 13579 SUPPORT_MPCHC = SUPPORT_VOLUME_MUTE | SUPPORT_PAUSE | SUPPORT_STOP | \ SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_VOLUME_STEP | \ SUPPORT_PLAY PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, }) # pylint: disable=unused-argument def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the MPC-HC platform.""" name = config.get(CONF_NAME) url = '{}:{}'.format(config.get(CONF_HOST), config.get(CONF_PORT)) add_devices([MpcHcDevice(name, url)]) class MpcHcDevice(MediaPlayerDevice): """Representation of a MPC-HC server.""" def __init__(self, name, url): """Initialize the MPC-HC device.""" self._name = name self._url = url self.update() def update(self): """Get the latest details.""" self._player_variables = dict() try: response = requests.get('{}/variables.html'.format(self._url), data=None, timeout=3) mpchc_variables = re.findall(r'<p id="(.+?)">(.+?)</p>', response.text) self._player_variables = dict() for var in mpchc_variables: self._player_variables[var[0]] = var[1].lower() except requests.exceptions.RequestException: _LOGGER.error("Could not connect to MPC-HC at: %s", self._url) def _send_command(self, command_id): """Send a command to MPC-HC via its window message ID.""" try: params = {"wm_command": command_id} requests.get("{}/command.html".format(self._url), params=params, timeout=3) except requests.exceptions.RequestException: _LOGGER.error("Could not send command %d to MPC-HC at: %s", command_id, self._url) @property def name(self): """Return the name of the device.""" return self._name @property def state(self): """Return the state of the device.""" state = self._player_variables.get('statestring', None) if state is None: return STATE_OFF if state == 'playing': return STATE_PLAYING elif state == 'paused': return STATE_PAUSED else: return STATE_IDLE @property def media_title(self): """Title of current playing media.""" return self._player_variables.get('file', None) @property def volume_level(self): """Volume level of the media player (0..1).""" return int(self._player_variables.get('volumelevel', 0)) / 100.0 @property def is_volume_muted(self): """Boolean if volume is currently muted.""" return self._player_variables.get('muted', '0') == '1' @property def media_duration(self): """Duration of current playing media in seconds.""" duration = self._player_variables.get('durationstring', "00:00:00").split(':') return \ int(duration[0]) * 3600 + \ int(duration[1]) * 60 + \ int(duration[2]) @property def supported_features(self): """Flag media player features that are supported.""" return SUPPORT_MPCHC def volume_up(self): """Volume up the media player.""" self._send_command(907) def volume_down(self): """Volume down media player.""" self._send_command(908) def mute_volume(self, mute): """Mute the volume.""" self._send_command(909) def media_play(self): """Send play command.""" self._send_command(887) def media_pause(self): """Send pause command.""" self._send_command(888) def media_stop(self): """Send stop command.""" self._send_command(890) def media_next_track(self): """Send next track command.""" self._send_command(921) def media_previous_track(self): """Send previous track command.""" self._send_command(920)
apache-2.0
ychen820/microblog
y/google-cloud-sdk/platform/gsutil/gslib/tests/test_parallel_cp.py
16
9892
# -*- coding: utf-8 -*- # Copyright 2010 Google Inc. All Rights Reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """Tests for parallel uploads ported from gsutil naming tests. Currently, the mock storage service is not thread-safe and therefore not suitable for multiprocess/multithreaded testing. Since parallel composite uploads necessarily create at least one worker thread outside of main, these tests are present in this file as temporary (slower) integration tests to provide validation for parallel composite uploads until a thread-safe mock storage service rewrite. Tests for relative paths are not included as integration_testcase does not support modifying the current working directory. """ import os import gslib.tests.testcase as testcase from gslib.tests.util import ObjectToURI as suri from gslib.tests.util import PerformsFileToObjectUpload from gslib.util import Retry class TestParallelCp(testcase.GsUtilIntegrationTestCase): """Unit tests for gsutil naming logic.""" @PerformsFileToObjectUpload def testCopyingTopLevelFileToBucket(self): """Tests copying one top-level file to a bucket.""" src_file = self.CreateTempFile(file_name='f0') dst_bucket_uri = self.CreateBucket() self.RunGsUtil(['cp', src_file, suri(dst_bucket_uri)]) lines = self.AssertNObjectsInBucket(dst_bucket_uri, 1) self.assertEqual(suri(dst_bucket_uri, 'f0'), lines[0]) @PerformsFileToObjectUpload def testCopyingMultipleFilesToBucket(self): """Tests copying multiple files to a bucket.""" src_file0 = self.CreateTempFile(file_name='f0') src_file1 = self.CreateTempFile(file_name='f1') dst_bucket_uri = self.CreateBucket() self.RunGsUtil(['cp', src_file0, src_file1, suri(dst_bucket_uri)]) lines = self.AssertNObjectsInBucket(dst_bucket_uri, 2) self.assertEqual(suri(dst_bucket_uri, 'f0'), lines[0]) self.assertEqual(suri(dst_bucket_uri, 'f1'), lines[1]) @PerformsFileToObjectUpload def testCopyingNestedFileToBucketSubdir(self): """Tests copying a nested file to a bucket subdir. Tests that we correctly translate local FS-specific delimiters ('\' on Windows) to bucket delimiter (/). """ tmpdir = self.CreateTempDir() subdir = os.path.join(tmpdir, 'subdir') os.mkdir(subdir) src_file = self.CreateTempFile(tmpdir=tmpdir, file_name='obj', contents='') dst_bucket_uri = self.CreateBucket() # Make an object under subdir so next copy will treat subdir as a subdir. self.RunGsUtil(['cp', src_file, suri(dst_bucket_uri, 'subdir/a')]) self.RunGsUtil(['cp', src_file, suri(dst_bucket_uri, 'subdir')]) lines = self.AssertNObjectsInBucket(dst_bucket_uri, 2) self.assertEqual(suri(dst_bucket_uri, 'subdir/a'), lines[0]) self.assertEqual(suri(dst_bucket_uri, 'subdir/obj'), lines[1]) @PerformsFileToObjectUpload def testCopyingAbsolutePathDirToBucket(self): """Tests recursively copying absolute path directory to a bucket.""" dst_bucket_uri = self.CreateBucket() src_dir_root = self.CreateTempDir(test_files=[ 'f0', 'f1', 'f2.txt', ('dir0', 'dir1', 'nested')]) self.RunGsUtil(['cp', '-R', src_dir_root, suri(dst_bucket_uri)]) src_tmpdir = os.path.split(src_dir_root)[1] lines = self.AssertNObjectsInBucket(dst_bucket_uri, 4) self.assertEqual(suri(dst_bucket_uri, src_tmpdir, 'dir0', 'dir1', 'nested'), lines[0]) self.assertEqual(suri(dst_bucket_uri, src_tmpdir, 'f0'), lines[1]) self.assertEqual(suri(dst_bucket_uri, src_tmpdir, 'f1'), lines[2]) self.assertEqual(suri(dst_bucket_uri, src_tmpdir, 'f2.txt'), lines[3]) @PerformsFileToObjectUpload def testCopyingDirContainingOneFileToBucket(self): """Tests copying a directory containing 1 file to a bucket. We test this case to ensure that correct bucket handling isn't dependent on the copy being treated as a multi-source copy. """ dst_bucket_uri = self.CreateBucket() src_dir = self.CreateTempDir(test_files=[('dir0', 'dir1', 'foo')]) self.RunGsUtil(['cp', '-R', os.path.join(src_dir, 'dir0', 'dir1'), suri(dst_bucket_uri)]) lines = self.AssertNObjectsInBucket(dst_bucket_uri, 1) self.assertEqual(suri(dst_bucket_uri, 'dir1', 'foo'), lines[0]) @PerformsFileToObjectUpload def testCopyingFileToObjectWithConsecutiveSlashes(self): """Tests copying a file to an object containing consecutive slashes.""" src_file = self.CreateTempFile(file_name='f0') dst_bucket_uri = self.CreateBucket() self.RunGsUtil(['cp', src_file, suri(dst_bucket_uri) + '//obj']) lines = self.AssertNObjectsInBucket(dst_bucket_uri, 1) self.assertEqual(suri(dst_bucket_uri) + '//obj', lines[0]) @PerformsFileToObjectUpload def testCopyingObjsAndFilesToBucket(self): """Tests copying objects and files to a bucket.""" src_bucket_uri = self.CreateBucket() self.CreateObject(src_bucket_uri, object_name='f1', contents='foo') src_dir = self.CreateTempDir(test_files=['f2']) dst_bucket_uri = self.CreateBucket() self.RunGsUtil(['cp', '-R', suri(src_bucket_uri, '**'), '%s%s**' % (src_dir, os.sep), suri(dst_bucket_uri)]) lines = self.AssertNObjectsInBucket(dst_bucket_uri, 2) self.assertEqual(suri(dst_bucket_uri, 'f1'), lines[0]) self.assertEqual(suri(dst_bucket_uri, 'f2'), lines[1]) @PerformsFileToObjectUpload def testCopyingSubdirRecursiveToNonexistentSubdir(self): """Tests copying a directory with a single file recursively to a bucket. The file should end up in a new bucket subdirectory with the file's directory structure starting below the recursive copy point, as in Unix cp. Example: filepath: dir1/dir2/foo cp -r dir1 dir3 Results in dir3/dir2/foo being created. """ src_dir = self.CreateTempDir() self.CreateTempFile(tmpdir=src_dir + '/dir1/dir2', file_name='foo') dst_bucket_uri = self.CreateBucket() self.RunGsUtil(['cp', '-R', src_dir + '/dir1', suri(dst_bucket_uri, 'dir3')]) lines = self.AssertNObjectsInBucket(dst_bucket_uri, 1) self.assertEqual(suri(dst_bucket_uri, 'dir3/dir2/foo'), lines[0]) @PerformsFileToObjectUpload def testCopyingWildcardedFilesToBucketSubDir(self): """Tests copying wildcarded files to a bucket subdir.""" # Test with and without final slash on dest subdir. for final_dst_char in ('', '/'): dst_bucket_uri = self.CreateBucket() self.CreateObject(dst_bucket_uri, object_name='subdir0/existing', contents='foo') self.CreateObject(dst_bucket_uri, object_name='subdir1/existing', contents='foo') src_dir = self.CreateTempDir(test_files=['f0', 'f1', 'f2']) for i in range(2): self.RunGsUtil( ['cp', os.path.join(src_dir, 'f?'), suri(dst_bucket_uri, 'subdir%d' % i) + final_dst_char]) @Retry(AssertionError, tries=3, timeout_secs=1) def _Check1(): """Validate files were copied to the correct destinations.""" stdout = self.RunGsUtil(['ls', suri(dst_bucket_uri, 'subdir%d' % i, '**')], return_stdout=True) lines = stdout.split('\n') self.assertEqual(5, len(lines)) self.assertEqual(suri(dst_bucket_uri, 'subdir%d' % i, 'existing'), lines[0]) self.assertEqual(suri(dst_bucket_uri, 'subdir%d' % i, 'f0'), lines[1]) self.assertEqual(suri(dst_bucket_uri, 'subdir%d' % i, 'f1'), lines[2]) self.assertEqual(suri(dst_bucket_uri, 'subdir%d' % i, 'f2'), lines[3]) _Check1() @PerformsFileToObjectUpload def testCopyingOneNestedFileToBucketSubDir(self): """Tests copying one nested file to a bucket subdir.""" # Test with and without final slash on dest subdir. for final_dst_char in ('', '/'): dst_bucket_uri = self.CreateBucket() self.CreateObject(dst_bucket_uri, object_name='d0/placeholder', contents='foo') self.CreateObject(dst_bucket_uri, object_name='d1/placeholder', contents='foo') for i in range(2): src_dir = self.CreateTempDir(test_files=[('d3', 'd4', 'nested', 'f1')]) self.RunGsUtil(['cp', '-r', suri(src_dir, 'd3'), suri(dst_bucket_uri, 'd%d' % i) + final_dst_char]) lines = self.AssertNObjectsInBucket(dst_bucket_uri, 4) self.assertEqual(suri(dst_bucket_uri, 'd0', 'd3', 'd4', 'nested', 'f1'), lines[0]) self.assertEqual(suri(dst_bucket_uri, 'd0', 'placeholder'), lines[1]) self.assertEqual(suri(dst_bucket_uri, 'd1', 'd3', 'd4', 'nested', 'f1'), lines[2]) self.assertEqual(suri(dst_bucket_uri, 'd1', 'placeholder'), lines[3])
bsd-3-clause
jd-delatorre/professional-js
node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
1825
17014
# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """GYP backend that generates Eclipse CDT settings files. This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML files that can be imported into an Eclipse CDT project. The XML file contains a list of include paths and symbols (i.e. defines). Because a full .cproject definition is not created by this generator, it's not possible to properly define the include dirs and symbols for each file individually. Instead, one set of includes/symbols is generated for the entire project. This works fairly well (and is a vast improvement in general), but may still result in a few indexer issues here and there. This generator has no automated tests, so expect it to be broken. """ from xml.sax.saxutils import escape import os.path import subprocess import gyp import gyp.common import gyp.msvs_emulation import shlex import xml.etree.cElementTree as ET generator_wants_static_library_dependencies_adjusted = False generator_default_variables = { } for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']: # Some gyp steps fail if these are empty(!), so we convert them to variables generator_default_variables[dirname] = '$' + dirname for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', 'CONFIGURATION_NAME']: generator_default_variables[unused] = '' # Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as # part of the path when dealing with generated headers. This value will be # replaced dynamically for each configuration. generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \ '$SHARED_INTERMEDIATE_DIR' def CalculateVariables(default_variables, params): generator_flags = params.get('generator_flags', {}) for key, val in generator_flags.items(): default_variables.setdefault(key, val) flavor = gyp.common.GetFlavor(params) default_variables.setdefault('OS', flavor) if flavor == 'win': # Copy additional generator configuration data from VS, which is shared # by the Eclipse generator. import gyp.generator.msvs as msvs_generator generator_additional_non_configuration_keys = getattr(msvs_generator, 'generator_additional_non_configuration_keys', []) generator_additional_path_sections = getattr(msvs_generator, 'generator_additional_path_sections', []) gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) def CalculateGeneratorInputInfo(params): """Calculate the generator specific info that gets fed to input (called by gyp).""" generator_flags = params.get('generator_flags', {}) if generator_flags.get('adjust_static_libraries', False): global generator_wants_static_library_dependencies_adjusted generator_wants_static_library_dependencies_adjusted = True def GetAllIncludeDirectories(target_list, target_dicts, shared_intermediate_dirs, config_name, params, compiler_path): """Calculate the set of include directories to be used. Returns: A list including all the include_dir's specified for every target followed by any include directories that were added as cflag compiler options. """ gyp_includes_set = set() compiler_includes_list = [] # Find compiler's default include dirs. if compiler_path: command = shlex.split(compiler_path) command.extend(['-E', '-xc++', '-v', '-']) proc = subprocess.Popen(args=command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = proc.communicate()[1] # Extract the list of include dirs from the output, which has this format: # ... # #include "..." search starts here: # #include <...> search starts here: # /usr/include/c++/4.6 # /usr/local/include # End of search list. # ... in_include_list = False for line in output.splitlines(): if line.startswith('#include'): in_include_list = True continue if line.startswith('End of search list.'): break if in_include_list: include_dir = line.strip() if include_dir not in compiler_includes_list: compiler_includes_list.append(include_dir) flavor = gyp.common.GetFlavor(params) if flavor == 'win': generator_flags = params.get('generator_flags', {}) for target_name in target_list: target = target_dicts[target_name] if config_name in target['configurations']: config = target['configurations'][config_name] # Look for any include dirs that were explicitly added via cflags. This # may be done in gyp files to force certain includes to come at the end. # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and # remove this. if flavor == 'win': msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) cflags = msvs_settings.GetCflags(config_name) else: cflags = config['cflags'] for cflag in cflags: if cflag.startswith('-I'): include_dir = cflag[2:] if include_dir not in compiler_includes_list: compiler_includes_list.append(include_dir) # Find standard gyp include dirs. if config.has_key('include_dirs'): include_dirs = config['include_dirs'] for shared_intermediate_dir in shared_intermediate_dirs: for include_dir in include_dirs: include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR', shared_intermediate_dir) if not os.path.isabs(include_dir): base_dir = os.path.dirname(target_name) include_dir = base_dir + '/' + include_dir include_dir = os.path.abspath(include_dir) gyp_includes_set.add(include_dir) # Generate a list that has all the include dirs. all_includes_list = list(gyp_includes_set) all_includes_list.sort() for compiler_include in compiler_includes_list: if not compiler_include in gyp_includes_set: all_includes_list.append(compiler_include) # All done. return all_includes_list def GetCompilerPath(target_list, data, options): """Determine a command that can be used to invoke the compiler. Returns: If this is a gyp project that has explicit make settings, try to determine the compiler from that. Otherwise, see if a compiler was specified via the CC_target environment variable. """ # First, see if the compiler is configured in make's settings. build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) make_global_settings_dict = data[build_file].get('make_global_settings', {}) for key, value in make_global_settings_dict: if key in ['CC', 'CXX']: return os.path.join(options.toplevel_dir, value) # Check to see if the compiler was specified as an environment variable. for key in ['CC_target', 'CC', 'CXX']: compiler = os.environ.get(key) if compiler: return compiler return 'gcc' def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path): """Calculate the defines for a project. Returns: A dict that includes explict defines declared in gyp files along with all of the default defines that the compiler uses. """ # Get defines declared in the gyp files. all_defines = {} flavor = gyp.common.GetFlavor(params) if flavor == 'win': generator_flags = params.get('generator_flags', {}) for target_name in target_list: target = target_dicts[target_name] if flavor == 'win': msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) extra_defines = msvs_settings.GetComputedDefines(config_name) else: extra_defines = [] if config_name in target['configurations']: config = target['configurations'][config_name] target_defines = config['defines'] else: target_defines = [] for define in target_defines + extra_defines: split_define = define.split('=', 1) if len(split_define) == 1: split_define.append('1') if split_define[0].strip() in all_defines: # Already defined continue all_defines[split_define[0].strip()] = split_define[1].strip() # Get default compiler defines (if possible). if flavor == 'win': return all_defines # Default defines already processed in the loop above. if compiler_path: command = shlex.split(compiler_path) command.extend(['-E', '-dM', '-']) cpp_proc = subprocess.Popen(args=command, cwd='.', stdin=subprocess.PIPE, stdout=subprocess.PIPE) cpp_output = cpp_proc.communicate()[0] cpp_lines = cpp_output.split('\n') for cpp_line in cpp_lines: if not cpp_line.strip(): continue cpp_line_parts = cpp_line.split(' ', 2) key = cpp_line_parts[1] if len(cpp_line_parts) >= 3: val = cpp_line_parts[2] else: val = '1' all_defines[key] = val return all_defines def WriteIncludePaths(out, eclipse_langs, include_dirs): """Write the includes section of a CDT settings export file.""" out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \ 'settingswizards.IncludePaths">\n') out.write(' <language name="holder for library settings"></language>\n') for lang in eclipse_langs: out.write(' <language name="%s">\n' % lang) for include_dir in include_dirs: out.write(' <includepath workspace_path="false">%s</includepath>\n' % include_dir) out.write(' </language>\n') out.write(' </section>\n') def WriteMacros(out, eclipse_langs, defines): """Write the macros section of a CDT settings export file.""" out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \ 'settingswizards.Macros">\n') out.write(' <language name="holder for library settings"></language>\n') for lang in eclipse_langs: out.write(' <language name="%s">\n' % lang) for key in sorted(defines.iterkeys()): out.write(' <macro><name>%s</name><value>%s</value></macro>\n' % (escape(key), escape(defines[key]))) out.write(' </language>\n') out.write(' </section>\n') def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): options = params['options'] generator_flags = params.get('generator_flags', {}) # build_dir: relative path from source root to our output files. # e.g. "out/Debug" build_dir = os.path.join(generator_flags.get('output_dir', 'out'), config_name) toplevel_build = os.path.join(options.toplevel_dir, build_dir) # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the # SHARED_INTERMEDIATE_DIR. Include both possible locations. shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'), os.path.join(toplevel_build, 'gen')] GenerateCdtSettingsFile(target_list, target_dicts, data, params, config_name, os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), options, shared_intermediate_dirs) GenerateClasspathFile(target_list, target_dicts, options.toplevel_dir, toplevel_build, os.path.join(toplevel_build, 'eclipse-classpath.xml')) def GenerateCdtSettingsFile(target_list, target_dicts, data, params, config_name, out_name, options, shared_intermediate_dirs): gyp.common.EnsureDirExists(out_name) with open(out_name, 'w') as out: out.write('<?xml version="1.0" encoding="UTF-8"?>\n') out.write('<cdtprojectproperties>\n') eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File', 'GNU C++', 'GNU C', 'Assembly'] compiler_path = GetCompilerPath(target_list, data, options) include_dirs = GetAllIncludeDirectories(target_list, target_dicts, shared_intermediate_dirs, config_name, params, compiler_path) WriteIncludePaths(out, eclipse_langs, include_dirs) defines = GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path) WriteMacros(out, eclipse_langs, defines) out.write('</cdtprojectproperties>\n') def GenerateClasspathFile(target_list, target_dicts, toplevel_dir, toplevel_build, out_name): '''Generates a classpath file suitable for symbol navigation and code completion of Java code (such as in Android projects) by finding all .java and .jar files used as action inputs.''' gyp.common.EnsureDirExists(out_name) result = ET.Element('classpath') def AddElements(kind, paths): # First, we need to normalize the paths so they are all relative to the # toplevel dir. rel_paths = set() for path in paths: if os.path.isabs(path): rel_paths.add(os.path.relpath(path, toplevel_dir)) else: rel_paths.add(path) for path in sorted(rel_paths): entry_element = ET.SubElement(result, 'classpathentry') entry_element.set('kind', kind) entry_element.set('path', path) AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir)) AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir)) # Include the standard JRE container and a dummy out folder AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER']) # Include a dummy out folder so that Eclipse doesn't use the default /bin # folder in the root of the project. AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')]) ET.ElementTree(result).write(out_name) def GetJavaJars(target_list, target_dicts, toplevel_dir): '''Generates a sequence of all .jars used as inputs.''' for target_name in target_list: target = target_dicts[target_name] for action in target.get('actions', []): for input_ in action['inputs']: if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'): if os.path.isabs(input_): yield input_ else: yield os.path.join(os.path.dirname(target_name), input_) def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir): '''Generates a sequence of all likely java package root directories.''' for target_name in target_list: target = target_dicts[target_name] for action in target.get('actions', []): for input_ in action['inputs']: if (os.path.splitext(input_)[1] == '.java' and not input_.startswith('$')): dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name), input_)) # If there is a parent 'src' or 'java' folder, navigate up to it - # these are canonical package root names in Chromium. This will # break if 'src' or 'java' exists in the package structure. This # could be further improved by inspecting the java file for the # package name if this proves to be too fragile in practice. parent_search = dir_ while os.path.basename(parent_search) not in ['src', 'java']: parent_search, _ = os.path.split(parent_search) if not parent_search or parent_search == toplevel_dir: # Didn't find a known root, just return the original path yield dir_ break else: yield parent_search def GenerateOutput(target_list, target_dicts, data, params): """Generate an XML settings file that can be imported into a CDT project.""" if params['options'].generator_output: raise NotImplementedError("--generator_output not implemented for eclipse") user_config = params.get('generator_flags', {}).get('config', None) if user_config: GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) else: config_names = target_dicts[target_list[0]]['configurations'].keys() for config_name in config_names: GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
mit
mollstam/UnrealPy
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/boto-2.38.0/tests/unit/awslambda/__init__.py
586
1123
# Copyright (c) 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. #
mit
prateekgupta3991/justforlearn
languages/pt.py
42
4720
# coding: utf8 { '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" é uma expressão opcional como "field1=\'newvalue\'". Não pode actualizar ou eliminar os resultados de um JOIN', '%Y-%m-%d': '%Y-%m-%d', '%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S', '%s rows deleted': '%s linhas eliminadas', '%s rows updated': '%s linhas actualizadas', 'About': 'About', 'Author Reference Auth User': 'Author Reference Auth User', 'Author Reference Auth User.username': 'Author Reference Auth User.username', 'Available databases and tables': 'bases de dados e tabelas disponíveis', 'Cannot be empty': 'não pode ser vazio', 'Category Create': 'Category Create', 'Category Select': 'Category Select', 'Check to delete': 'seleccione para eliminar', 'Comment Create': 'Comment Create', 'Comment Select': 'Comment Select', 'Content': 'Content', 'Controller': 'Controlador', 'Copyright': 'Direitos de cópia', 'Created By': 'Created By', 'Created On': 'Created On', 'Current request': 'pedido currente', 'Current response': 'resposta currente', 'Current session': 'sessão currente', 'DB Model': 'Modelo de BD', 'Database': 'Base de dados', 'Delete:': 'Eliminar:', 'Edit': 'Editar', 'Edit This App': 'Edite esta aplicação', 'Edit current record': 'Edição de registo currente', 'Email': 'Email', 'First Name': 'First Name', 'For %s #%s': 'For %s #%s', 'Hello World': 'Olá Mundo', 'Import/Export': 'Importar/Exportar', 'Index': 'Índice', 'Internal State': 'Estado interno', 'Invalid Query': 'Consulta Inválida', 'Last Name': 'Last Name', 'Layout': 'Esboço', 'Main Menu': 'Menu Principal', 'Menu Model': 'Menu do Modelo', 'Modified By': 'Modified By', 'Modified On': 'Modified On', 'Name': 'Name', 'New Record': 'Novo Registo', 'No Data': 'No Data', 'No databases in this application': 'Não há bases de dados nesta aplicação', 'Password': 'Password', 'Post Create': 'Post Create', 'Post Select': 'Post Select', 'Powered by': 'Suportado por', 'Query:': 'Interrogação:', 'Replyto Reference Post': 'Replyto Reference Post', 'Rows in table': 'Linhas numa tabela', 'Rows selected': 'Linhas seleccionadas', 'Stylesheet': 'Folha de estilo', 'Sure you want to delete this object?': 'Tem a certeza que deseja eliminar este objecto?', 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'A "query" é uma condição do tipo "db.table1.field1==\'value\'". Algo como "db.table1.field1==db.table2.field2" resultaria num SQL JOIN.', 'Title': 'Title', 'Update:': 'Actualização:', 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Utilize (...)&(...) para AND, (...)|(...) para OR, e ~(...) para NOT para construir interrogações mais complexas.', 'Username': 'Username', 'View': 'Vista', 'Welcome %s': 'Bem-vindo(a) %s', 'Welcome to Gluonization': 'Bem vindo ao Web2py', 'Welcome to web2py': 'Bem-vindo(a) ao web2py', 'When': 'When', 'appadmin is disabled because insecure channel': 'appadmin está desactivada pois o canal é inseguro', 'cache': 'cache', 'change password': 'alterar palavra-chave', 'Online examples': 'Exemplos online', 'Administrative interface': 'Painel administrativo', 'create new category': 'create new category', 'create new comment': 'create new comment', 'create new post': 'create new post', 'customize me!': 'Personaliza-me!', 'data uploaded': 'informação enviada', 'database': 'base de dados', 'database %s select': 'selecção de base de dados %s', 'db': 'bd', 'design': 'design', 'done!': 'concluído!', 'edit category': 'edit category', 'edit comment': 'edit comment', 'edit post': 'edit post', 'edit profile': 'Editar perfil', 'export as csv file': 'exportar como ficheiro csv', 'insert new': 'inserir novo', 'insert new %s': 'inserir novo %s', 'invalid request': 'Pedido Inválido', 'login': 'login', 'logout': 'logout', 'new record inserted': 'novo registo inserido', 'next 100 rows': 'próximas 100 linhas', 'or import from csv file': 'ou importe a partir de ficheiro csv', 'previous 100 rows': '100 linhas anteriores', 'record': 'registo', 'record does not exist': 'registo inexistente', 'record id': 'id de registo', 'register': 'register', 'search category': 'search category', 'search comment': 'search comment', 'search post': 'search post', 'select category': 'select category', 'select comment': 'select comment', 'select post': 'select post', 'selected': 'seleccionado(s)', 'show category': 'show category', 'show comment': 'show comment', 'show post': 'show post', 'state': 'estado', 'table': 'tabela', 'unable to parse csv file': 'não foi possível carregar ficheiro csv', }
mit
foobacca/django-cms
cms/plugins/file/migrations/0001_initial.py
11
3123
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'File' db.create_table('cmsplugin_file', ( ('cmsplugin_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['cms.CMSPlugin'], unique=True, primary_key=True)), ('file', self.gf('django.db.models.fields.files.FileField')(max_length=100)), ('title', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), )) db.send_create_signal('file', ['File']) def backwards(self, orm): # Deleting model 'File' db.delete_table('cmsplugin_file') models = { 'cms.cmsplugin': { 'Meta': {'object_name': 'CMSPlugin'}, 'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}), 'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}), 'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}), 'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}), 'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}), 'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}) }, 'cms.placeholder': { 'Meta': {'object_name': 'Placeholder'}, 'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}) }, 'file.file': { 'Meta': {'object_name': 'File', 'db_table': "'cmsplugin_file'", '_ormbases': ['cms.CMSPlugin']}, 'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}), 'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}) } } complete_apps = ['file']
bsd-3-clause
shaunbrady/boto
boto/cloudfront/identity.py
170
4483
# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import uuid class OriginAccessIdentity(object): def __init__(self, connection=None, config=None, id='', s3_user_id='', comment=''): self.connection = connection self.config = config self.id = id self.s3_user_id = s3_user_id self.comment = comment self.etag = None def startElement(self, name, attrs, connection): if name == 'CloudFrontOriginAccessIdentityConfig': self.config = OriginAccessIdentityConfig() return self.config else: return None def endElement(self, name, value, connection): if name == 'Id': self.id = value elif name == 'S3CanonicalUserId': self.s3_user_id = value elif name == 'Comment': self.comment = value else: setattr(self, name, value) def update(self, comment=None): new_config = OriginAccessIdentityConfig(self.connection, self.config.caller_reference, self.config.comment) if comment is not None: new_config.comment = comment self.etag = self.connection.set_origin_identity_config(self.id, self.etag, new_config) self.config = new_config def delete(self): return self.connection.delete_origin_access_identity(self.id, self.etag) def uri(self): return 'origin-access-identity/cloudfront/%s' % self.id class OriginAccessIdentityConfig(object): def __init__(self, connection=None, caller_reference='', comment=''): self.connection = connection if caller_reference: self.caller_reference = caller_reference else: self.caller_reference = str(uuid.uuid4()) self.comment = comment def to_xml(self): s = '<?xml version="1.0" encoding="UTF-8"?>\n' s += '<CloudFrontOriginAccessIdentityConfig xmlns="http://cloudfront.amazonaws.com/doc/2009-09-09/">\n' s += ' <CallerReference>%s</CallerReference>\n' % self.caller_reference if self.comment: s += ' <Comment>%s</Comment>\n' % self.comment s += '</CloudFrontOriginAccessIdentityConfig>\n' return s def startElement(self, name, attrs, connection): return None def endElement(self, name, value, connection): if name == 'Comment': self.comment = value elif name == 'CallerReference': self.caller_reference = value else: setattr(self, name, value) class OriginAccessIdentitySummary(object): def __init__(self, connection=None, id='', s3_user_id='', comment=''): self.connection = connection self.id = id self.s3_user_id = s3_user_id self.comment = comment self.etag = None def startElement(self, name, attrs, connection): return None def endElement(self, name, value, connection): if name == 'Id': self.id = value elif name == 'S3CanonicalUserId': self.s3_user_id = value elif name == 'Comment': self.comment = value else: setattr(self, name, value) def get_origin_access_identity(self): return self.connection.get_origin_access_identity_info(self.id)
mit
giannitedesco/ircnukes
nukes/player.py
1
3727
# Copyright (c) 2007 Gianni Tedesco # Released under the terms of the GNU GPL v2 or later # # Player class from globals import * class player: def __init__(self, name): self.name = name self.hand = [] self.population = 0 self.card_stack = [] self.weapon = None self.game = None self.state = PLAYER_STATE_ALIVE self.missturns = 0 return def __str__(self): return "player(%s)"%self.name def __repr__(self): return "player('%s')"%self.name def __card_by_idx(self, idx): "Return a card from the hand by index" if idx < 0 or idx >= len(self.hand): raise IllegalMoveError(self.game, self, "Bad Card Index: %d"%idx) assert len(self.card_stack) <= CARD_STACK_LEN return self.hand.pop(idx) def __card_by_name(self, name): "Return a card from the hand by name" strl = map(lambda x:x.__str__().lower(), self.hand) repl = map(lambda x:x.__repr__().lower(), self.hand) for i in range(0, len(self.hand)): if repl[i] != name.lower() and strl[i] != name.lower(): continue return self.hand.pop(i) raise IllegalMoveError(self.game, self, "Card %s not found"%name) def kill(self, suicide=False): if self.state == PLAYER_STATE_DEAD: return if self.game == None: return if suicide == False and self.game.state() == GAME_STATE_WAR: self.state = PLAYER_STATE_RETALIATE else: self.state = PLAYER_STATE_DEAD self.cards_to_hand() self.game.player_dead(self) def cards_to_hand(self): "Move all cards back in to the hand" self.hand.extend(self.card_stack) self.card_stack = [] if self.weapon != None: self.hand.append(self.weapon) self.weapon = None def flip_card(self, tgt): "Flip card to take a turn in the game" if self.state != PLAYER_STATE_ALIVE: raise IllegalMoveError(self.game, self, "You're not alive!") if self.game.cur != self: raise IllegalMoveError(self.game, self, "Not your turn") if len(self.card_stack) != CARD_STACK_LEN: raise IllegalMoveError(self.game, self, "Cards not queued") c = self.card_stack.pop(0) try: c.dequeue(self.game, self, tgt) except: self.card_stack.insert(0, c) raise def use_card(self, arg, tgt): "Use card for final retaliation" if self.game.cur != self: raise IllegalMoveError(self.game, self, "Not your turn") if self.state != PLAYER_STATE_RETALIATE: raise IllegalMoveError(self.game, self, "It's not final retaliation!") try: c = self.__card_by_idx(int(arg)) except ValueError: c = self.__card_by_name(arg) try: c.dequeue(self.game, self, tgt) except: self.hand.append(c) raise def queue_card(self, arg): "Push a card in to the queue" if len(self.card_stack) == CARD_STACK_LEN: raise IllegalMoveError(self.game, self, "Queue Full") if self.state != PLAYER_STATE_ALIVE: raise IllegalMoveError(self.game, self, "Cannot queue cards when dead") try: c = self.__card_by_idx(int(arg)) except ValueError: c = self.__card_by_name(arg) self.card_stack.append(c) if len(self.card_stack) == CARD_STACK_LEN and \ self.game.state() == GAME_STATE_INIT: self.game.game_msg("%s is ready"%self.name) return c def pwn(self, pwnage): "Decrement population" i = min(self.population, pwnage) self.population = self.population - i if self.population == 0: self.kill() def transfer_population(self, converts, tgt): "Transfer population to another player" if self.state != PLAYER_STATE_ALIVE: raise IllegalMoveError(self.game, self, "Cannot transfer population from dead enemy") i = min(self.population, converts) self.population = self.population - i tgt.population = tgt.population + i if self.population == 0: self.kill()
gpl-2.0
takeshineshiro/django
django/contrib/gis/geos/base.py
437
1280
from ctypes import c_void_p from django.contrib.gis.geos.error import GEOSException class GEOSBase(object): """ Base object for GEOS objects that has a pointer access property that controls access to the underlying C pointer. """ # Initially the pointer is NULL. _ptr = None # Default allowed pointer type. ptr_type = c_void_p # Pointer access property. def _get_ptr(self): # Raise an exception if the pointer isn't valid don't # want to be passing NULL pointers to routines -- # that's very bad. if self._ptr: return self._ptr else: raise GEOSException('NULL GEOS %s pointer encountered.' % self.__class__.__name__) def _set_ptr(self, ptr): # Only allow the pointer to be set with pointers of the # compatible type or None (NULL). if ptr is None or isinstance(ptr, self.ptr_type): self._ptr = ptr else: raise TypeError('Incompatible pointer type') # Property for controlling access to the GEOS object pointers. Using # this raises an exception when the pointer is NULL, thus preventing # the C library from attempting to access an invalid memory location. ptr = property(_get_ptr, _set_ptr)
bsd-3-clause
LaunchlabAU/launchlab-django-utils
tests/test_formatters.py
1
1359
from django.test import TestCase from launchlab_django_utils.formatters.currency import currency_display from decimal import Decimal class CurrencyFormatterTestCase(TestCase): def test_nonzero_cents_int(self): value = 1234 self.assertEqual(currency_display(value), '$12.34') def test_nonzero_cents_decimal(self): value = Decimal('1234') self.assertEqual(currency_display(value), '$12.34') def test_zero_cents_int_show_complete_true(self): value = 1400 self.assertEqual(currency_display(value, show_complete=True), '$14.00') def test_zero_cents_int_show_complete_false(self): value = 1400 self.assertEqual(currency_display(value, show_complete=False), '$14') def test_zero_cents_decimal_show_complete_true(self): value = Decimal('1400') self.assertEqual(currency_display(value, show_complete=True), '$14.00') def test_zero_cents_decimal_show_complete_false(self): value = Decimal('1400') self.assertEqual(currency_display(value, show_complete=False), '$14') def test_include_sign_true(self): value = 123 self.assertEqual(currency_display(value, include_sign=True), '$1.23') def test_include_sign_false(self): value = 123 self.assertEqual(currency_display(value, include_sign=False), '1.23')
mit
calee88/ParlAI
parlai/agents/remote_agent/agents.py
2
6850
# Copyright (c) 2017-present, Facebook, Inc. # All rights reserved. # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. An additional grant # of patent rights can be found in the PATENTS file in the same directory. from parlai.core.agents import Agent, create_agent_from_shared import copy import numpy as np import json import subprocess import zmq class RemoteAgent(Agent): """Agent which connects over ZMQ to a paired agent. The other agent is launched using the command line options set via `add_cmdline_args`.""" @staticmethod def add_cmdline_args(argparser): argparser.add_arg( '--port', default=5555, help='first port to connect to for remote agents') argparser.add_arg( '--remote-cmd', required=True, help='command to launch paired agent') argparser.add_arg( '--remote-args', help='optional arguments to pass to paired agent') def __init__(self, opt, shared=None): """Runs subprocess command to set up remote partner. Only run the subprocess command once: if using multiple threads, tell the partner how many paired agents to set up so that they can manage the multithreading effectively in their environment. (We don't run subprocess.Popen for each thread.) """ if shared and 'port' in shared: self.port = shared['port'] self.opt = copy.deepcopy(shared['opt']) else: if 'port' in opt: self.port = opt['port'] else: raise RuntimeError('You need to run RemoteAgent.' + 'add_cmdline_args(argparser) before ' + 'calling this class to set up options.') self.process = subprocess.Popen( '{cmd} {port} {numthreads} {args}'.format( cmd=opt['remote_cmd'], port=opt['port'], numthreads=opt['numthreads'], args=opt.get('remote_args', '') ).split() ) self.opt = copy.deepcopy(opt) self.connect() def connect(self): """Connect to ZMQ socket as client. Requires package zmq.""" context = zmq.Context() self.socket = context.socket(zmq.REQ) self.socket.setsockopt(zmq.LINGER, 1) self.socket.connect('tcp://localhost:{0}'.format(self.port)) print('python thread connected to ' + 'tcp://localhost:{0}'.format(self.port)) def act(self): """Send message to paired agent listening over zmq.""" if 'image' in self.observation: # can't json serialize images self.observation.pop('image', None) text = json.dumps(self.observation) self.socket.send_unicode(text) reply = self.socket.recv_unicode() return json.loads(reply) def share(self): """Increments port to use when using remote agents in Hogwild mode.""" if not hasattr(self, 'lastport'): self.lastport = self.port shared = {} shared['port'] = self.lastport + 1 shared['class'] = type(self) shared['opt'] = self.opt self.lastport += 1 return shared def shutdown(self): """Shut down paired listener with <END> signal.""" if hasattr(self, 'socket'): try: self.socket.send_unicode('<END>', zmq.NOBLOCK) except zmq.error.ZMQError: # may need to listen first try: self.socket.recv_unicode(zmq.NOBLOCK) self.socket.send_unicode('<END>', zmq.NOBLOCK) except zmq.error.ZMQError: # paired process is probably dead already pass if hasattr(self, 'process'): # try to let the subprocess clean up, but don't wait too long try: self.process.communicate(timeout=1) except subprocess.TimeoutExpired: self.process.kill() class ParsedRemoteAgent(RemoteAgent): """Same as the regular remote agent, except that this agent converts all text into vectors using its dictionary before sending them. """ def __init__(self, opt, shared=None): if 'dictionary_agent' in shared: # use this first--maybe be overriding an original dictionary self.dict = create_agent_from_shared(shared['dictionary_agent']) elif 'dictionary' in shared: # otherwise use this dictionary self.dict = shared['dictionary'] else: raise RuntimeError('ParsedRemoteAgent needs a dictionary to parse' + ' text with--pass in a dictionary using shared' + '["dictionary"] or pass in the arguments to ' + 'instantiate one using shared["dictionary_args' + '"] = (class, options, shared).') super().__init__(opt, shared) def act(self): parsed = {} for k, v in self.observation.items(): if type(v) == str: # We split on newlines because we don't treat them as charactes # in the default dictionary but our receiving agent might want # to know where the newlines are in the text block. parsed[k] = self.parse(v, split_lines=True) else: # not a string, see if it's an iterable of strings try: # if there are newlines in the label, it's part of the label parsed[k] = [self.parse(s) for s in v] except TypeError: # oops, it's not. just pass it on. parsed[k] = v super().observe(parsed) reply = super().act() unparsed = {} for k, v in reply.items(): # TODO(ahm): this fails if remote agent sends anything other than # vectors, which means that pretty much only .text will work unparsed[k] = self.parse(v) return unparsed def parse(self, s, split_lines=False): """Returns a parsed (list of indices) version of a string s. Optionally return list of vectors for each line in the string in case you need to know where those are. """ if split_lines: return [self.dict.parse(line, vec_type=list) for line in s.split('\n')] else: return self.dict.parse(s, vec_type=list) def share(self): shared = super().share() shared['dictionary_agent'] = self.dict.share() return shared
bsd-3-clause
nharraud/invenio-upgrader
invenio_upgrader/upgrades/invenio_2013_09_02_new_bibARXIVPDF.py
5
1418
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2013 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. import warnings from invenio.legacy.dbquery import run_sql from invenio.utils.text import wait_for_user depends_on = ['invenio_release_1_1_0'] def info(): return "New pdfchecker (bibARXIVPDF) table" def do_upgrade(): """ Implement your upgrades here """ run_sql("""CREATE TABLE IF NOT EXISTS bibARXIVPDF ( id_bibrec mediumint(8) unsigned NOT NULL, status ENUM('ok', 'missing') NOT NULL, date_harvested datetime NOT NULL, version tinyint(2) NOT NULL, PRIMARY KEY (id_bibrec), KEY status (status) ) ENGINE=MyISAM""") def estimate(): """ Estimate running time of upgrade in seconds (optional). """ return 1
gpl-2.0
tareqalayan/ansible
lib/ansible/module_utils/facts/network/base.py
144
2402
# This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.module_utils.facts.collector import BaseFactCollector class Network: """ This is a generic Network subclass of Facts. This should be further subclassed to implement per platform. If you subclass this, you must define: - interfaces (a list of interface names) - interface_<name> dictionary of ipv4, ipv6, and mac address information. All subclasses MUST define platform. """ platform = 'Generic' # FIXME: remove load_on_init when we can def __init__(self, module, load_on_init=False): self.module = module # TODO: more or less abstract/NotImplemented def populate(self, collected_facts=None): return {} class NetworkCollector(BaseFactCollector): # MAYBE: we could try to build this based on the arch specific implemementation of Network() or its kin name = 'network' _fact_class = Network _fact_ids = set(['interfaces', 'default_ipv4', 'default_ipv6', 'all_ipv4_addresses', 'all_ipv6_addresses']) IPV6_SCOPE = {'0': 'global', '10': 'host', '20': 'link', '40': 'admin', '50': 'site', '80': 'organization'} def collect(self, module=None, collected_facts=None): collected_facts = collected_facts or {} if not module: return {} # Network munges cached_facts by side effect, so give it a copy facts_obj = self._fact_class(module) facts_dict = facts_obj.populate(collected_facts=collected_facts) return facts_dict
gpl-3.0
ReflexBow/ghost
tools/perf/scripts/python/net_dropmonitor.py
4235
1554
# Monitor the system for dropped packets and proudce a report of drop locations and counts import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * drop_log = {} kallsyms = [] def get_kallsyms_table(): global kallsyms try: f = open("/proc/kallsyms", "r") linecount = 0 for line in f: linecount = linecount+1 f.seek(0) except: return j = 0 for line in f: loc = int(line.split()[0], 16) name = line.split()[2] j = j +1 if ((j % 100) == 0): print "\r" + str(j) + "/" + str(linecount), kallsyms.append({ 'loc': loc, 'name' : name}) print "\r" + str(j) + "/" + str(linecount) kallsyms.sort() return def get_sym(sloc): loc = int(sloc) for i in kallsyms: if (i['loc'] >= loc): return (i['name'], i['loc']-loc) return (None, 0) def print_drop_table(): print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT") for i in drop_log.keys(): (sym, off) = get_sym(i) if sym == None: sym = i print "%25s %25s %25s" % (sym, off, drop_log[i]) def trace_begin(): print "Starting trace (Ctrl-C to dump results)" def trace_end(): print "Gathering kallsyms data" get_kallsyms_table() print_drop_table() # called from perf, when it finds a correspoinding event def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, protocol, location): slocation = str(location) try: drop_log[slocation] = drop_log[slocation] + 1 except: drop_log[slocation] = 1
gpl-2.0
inonit/wagtail
wagtail/tests/customuser/migrations/0001_initial.py
4
3821
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('auth', '0001_initial'), ] operations = [ migrations.CreateModel( name='CustomUser', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('password', models.CharField(max_length=128, verbose_name='password')), ( 'last_login', models.DateTimeField(null=True, verbose_name='last login', blank=True) ), ('is_superuser', models.BooleanField( default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status' )), ('username', models.CharField(unique=True, max_length=100)), ('email', models.EmailField(max_length=255, blank=True)), ('is_staff', models.BooleanField(default=True)), ('is_active', models.BooleanField(default=True)), ('first_name', models.CharField(max_length=50, blank=True)), ('last_name', models.CharField(max_length=50, blank=True)), ( 'groups', models.ManyToManyField( related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text=( "The groups this user belongs to. " "A user will get all permissions granted to each of their groups." ), verbose_name='groups' ) ), ( 'user_permissions', models.ManyToManyField( to='auth.Permission', verbose_name='user permissions', help_text='Specific permissions for this user.', related_name='user_set', blank=True, related_query_name='user' ) ) ], options={ 'abstract': False, }, bases=(models.Model,), ), migrations.CreateModel( name='EmailUser', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('password', models.CharField(max_length=128, verbose_name='password')), ( 'last_login', models.DateTimeField(null=True, verbose_name='last login', blank=True) ), ('email', models.EmailField(unique=True, max_length=255)), ('is_staff', models.BooleanField(default=True)), ('is_active', models.BooleanField(default=True)), ('first_name', models.CharField(max_length=50, blank=True)), ('last_name', models.CharField(max_length=50, blank=True)), ('is_superuser', models.BooleanField(default=False)), ('groups', models.ManyToManyField(related_name='+', to='auth.Group', blank=True)), ('user_permissions', models.ManyToManyField(related_name='+', to='auth.Permission', blank=True)), ], options={ 'abstract': False, }, bases=(models.Model,), ), ]
bsd-3-clause
nilsFK/py-privatekonomi
py_privatekonomi/core/parsers/avanza_parser.py
1
1212
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import unicode_literals import py_privatekonomi.core.parser import re import csv from py_privatekonomi.core.parsers.csv_parser import CsvParser class AvanzaParser(py_privatekonomi.core.parser.Parser): def __init__(self): super(AvanzaParser, self).__init__("avanza") def parse(self, contents, options): # skip headers contents = contents[1:] subformatters = [ "transaction_transaction_date", "account_name", "transaction_type_name", "security_provider_name", "transaction_security_amount", "transaction_security_rate", "transaction_amount", "transaction_data_courtage", "currency_code", "transaction_data_ISIN" ] if options['filetype'] in ['csv', 'empty']: opts = { 'delimiter' : str(';'), 'quoting' : csv.QUOTE_NONE } rows = CsvParser().parse(contents, opts=opts) return (rows, subformatters) else: return (contents, subformatters)
mit
OmarIthawi/edx-platform
lms/djangoapps/dashboard/tests/test_support.py
33
4665
""" Tests for support dashboard """ from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from django.test.client import Client from django.test.utils import override_settings from django.contrib.auth.models import Permission from shoppingcart.models import CertificateItem, Order from courseware.tests.tests import TEST_DATA_MONGO_MODULESTORE from student.models import CourseEnrollment from course_modes.models import CourseMode from student.tests.factories import UserFactory from xmodule.modulestore.tests.factories import CourseFactory import datetime @override_settings( MODULESTORE=TEST_DATA_MONGO_MODULESTORE ) class RefundTests(ModuleStoreTestCase): """ Tests for the manual refund page """ def setUp(self): self.course = CourseFactory.create( org='testorg', number='run1', display_name='refundable course' ) self.course_id = self.course.location.course_key self.client = Client() self.admin = UserFactory.create( username='test_admin', email='test_admin+support@edx.org', password='foo' ) self.admin.user_permissions.add(Permission.objects.get(codename='change_courseenrollment')) self.client.login(username=self.admin.username, password='foo') self.student = UserFactory.create( username='student', email='student+refund@edx.org' ) self.course_mode = CourseMode.objects.get_or_create(course_id=self.course_id, mode_slug='verified')[0] self.order = None self.form_pars = {'course_id': str(self.course_id), 'user': self.student.email} def tearDown(self): self.course_mode.delete() Order.objects.filter(user=self.student).delete() def _enroll(self, purchase=True): # pylint: disable=C0111 CourseEnrollment.enroll(self.student, self.course_id, self.course_mode.mode_slug) if purchase: self.order = Order.get_cart_for_user(self.student) CertificateItem.add_to_order(self.order, self.course_id, 1, self.course_mode.mode_slug) self.order.purchase() self.course_mode.expiration_datetime = datetime.datetime(1983, 4, 6) self.course_mode.save() def test_support_access(self): response = self.client.get('/support/') self.assertTrue(response.status_code, 200) self.assertContains(response, 'Manual Refund') response = self.client.get('/support/refund/') self.assertTrue(response.status_code, 200) # users without the permission can't access support self.admin.user_permissions.clear() response = self.client.get('/support/') self.assertTrue(response.status_code, 302) response = self.client.get('/support/refund/') self.assertTrue(response.status_code, 302) def test_bad_courseid(self): response = self.client.post('/support/refund/', {'course_id': 'foo', 'user': self.student.email}) self.assertContains(response, 'Invalid course id') def test_bad_user(self): response = self.client.post('/support/refund/', {'course_id': str(self.course_id), 'user': 'unknown@foo.com'}) self.assertContains(response, 'User not found') def test_not_refundable(self): self._enroll() self.course_mode.expiration_datetime = datetime.datetime(2033, 4, 6) self.course_mode.save() response = self.client.post('/support/refund/', self.form_pars) self.assertContains(response, 'not past the refund window') def test_no_order(self): self._enroll(purchase=False) response = self.client.post('/support/refund/', self.form_pars) self.assertContains(response, 'No order found for %s' % self.student.username) def test_valid_order(self): self._enroll() response = self.client.post('/support/refund/', self.form_pars) self.assertContains(response, "About to refund this order") self.assertContains(response, "enrolled") self.assertContains(response, "CertificateItem Status") def test_do_refund(self): self._enroll() pars = self.form_pars pars['confirmed'] = 'true' response = self.client.post('/support/refund/', pars) self.assertTrue(response.status_code, 302) response = self.client.get(response.get('location')) # pylint: disable=E1103 self.assertContains(response, "Unenrolled %s from" % self.student) self.assertContains(response, "Refunded 1 for order id") self.assertFalse(CourseEnrollment.is_enrolled(self.student, self.course_id))
agpl-3.0
kenshay/ImageScripter
ProgramData/SystemFiles/Python/Lib/site-packages/pylint/checkers/misc.py
4
3221
# Copyright (c) 2003-2016 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:contact@logilab.fr # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html # For details: https://github.com/PyCQA/pylint/blob/master/COPYING """Check source code is ascii only or has an encoding declaration (PEP 263)""" # pylint: disable=W0511 import re import six from pylint.interfaces import IRawChecker from pylint.checkers import BaseChecker MSGS = { 'W0511': ('%s', 'fixme', 'Used when a warning note as FIXME or XXX is detected.'), 'W0512': ('Cannot decode using encoding "%s", unexpected byte at position %d', 'invalid-encoded-data', 'Used when a source line cannot be decoded using the specified ' 'source file encoding.', {'maxversion': (3, 0)}), } class EncodingChecker(BaseChecker): """checks for: * warning notes in the code like FIXME, XXX * encoding issues. """ __implements__ = IRawChecker # configuration section name name = 'miscellaneous' msgs = MSGS options = (('notes', {'type': 'csv', 'metavar': '<comma separated values>', 'default': ('FIXME', 'XXX', 'TODO'), 'help': ('List of note tags to take in consideration, ' 'separated by a comma.')}),) def _check_note(self, notes, lineno, line): # First, simply check if the notes are in the line at all. This is an # optimisation to prevent using the regular expression on every line, # but rather only on lines which may actually contain one of the notes. # This prevents a pathological problem with lines that are hundreds # of thousands of characters long. for note in self.config.notes: if note in line: break else: return match = notes.search(line) if not match: return self.add_message('fixme', args=line[match.start(1):-1], line=lineno) def _check_encoding(self, lineno, line, file_encoding): try: return six.text_type(line, file_encoding) except UnicodeDecodeError as ex: self.add_message('invalid-encoded-data', line=lineno, args=(file_encoding, ex.args[2])) def process_module(self, module): """inspect the source file to find encoding problem or fixmes like notes """ if self.config.notes: notes = re.compile( r'.*?#\s*(%s)(:*\s*.+)' % "|".join(self.config.notes)) else: notes = None if module.file_encoding: encoding = module.file_encoding else: encoding = 'ascii' with module.stream() as stream: for lineno, line in enumerate(stream): line = self._check_encoding(lineno + 1, line, encoding) if line is not None and notes: self._check_note(notes, lineno + 1, line) def register(linter): """required method to auto register this checker""" linter.register_checker(EncodingChecker(linter))
gpl-3.0
rooshilp/CMPUT410Lab6
virt_env/virt1/lib/python2.7/site-packages/django/contrib/gis/gdal/prototypes/generation.py
100
3976
""" This module contains functions that generate ctypes prototypes for the GDAL routines. """ from ctypes import c_char_p, c_double, c_int, c_void_p from django.contrib.gis.gdal.prototypes.errcheck import ( check_arg_errcode, check_errcode, check_geom, check_geom_offset, check_pointer, check_srs, check_str_arg, check_string, check_const_string) class gdal_char_p(c_char_p): pass def double_output(func, argtypes, errcheck=False, strarg=False): "Generates a ctypes function that returns a double value." func.argtypes = argtypes func.restype = c_double if errcheck: func.errcheck = check_arg_errcode if strarg: func.errcheck = check_str_arg return func def geom_output(func, argtypes, offset=None): """ Generates a function that returns a Geometry either by reference or directly (if the return_geom keyword is set to True). """ # Setting the argument types func.argtypes = argtypes if not offset: # When a geometry pointer is directly returned. func.restype = c_void_p func.errcheck = check_geom else: # Error code returned, geometry is returned by-reference. func.restype = c_int def geomerrcheck(result, func, cargs): return check_geom_offset(result, func, cargs, offset) func.errcheck = geomerrcheck return func def int_output(func, argtypes): "Generates a ctypes function that returns an integer value." func.argtypes = argtypes func.restype = c_int return func def srs_output(func, argtypes): """ Generates a ctypes prototype for the given function with the given C arguments that returns a pointer to an OGR Spatial Reference System. """ func.argtypes = argtypes func.restype = c_void_p func.errcheck = check_srs return func def const_string_output(func, argtypes, offset=None, decoding=None): func.argtypes = argtypes if offset: func.restype = c_int else: func.restype = c_char_p def _check_const(result, func, cargs): res = check_const_string(result, func, cargs, offset=offset) if res and decoding: res = res.decode(decoding) return res func.errcheck = _check_const return func def string_output(func, argtypes, offset=-1, str_result=False, decoding=None): """ Generates a ctypes prototype for the given function with the given argument types that returns a string from a GDAL pointer. The `const` flag indicates whether the allocated pointer should be freed via the GDAL library routine VSIFree -- but only applies only when `str_result` is True. """ func.argtypes = argtypes if str_result: # Use subclass of c_char_p so the error checking routine # can free the memory at the pointer's address. func.restype = gdal_char_p else: # Error code is returned func.restype = c_int # Dynamically defining our error-checking function with the # given offset. def _check_str(result, func, cargs): res = check_string(result, func, cargs, offset=offset, str_result=str_result) if res and decoding: res = res.decode(decoding) return res func.errcheck = _check_str return func def void_output(func, argtypes, errcheck=True): """ For functions that don't only return an error code that needs to be examined. """ if argtypes: func.argtypes = argtypes if errcheck: # `errcheck` keyword may be set to False for routines that # return void, rather than a status code. func.restype = c_int func.errcheck = check_errcode else: func.restype = None return func def voidptr_output(func, argtypes): "For functions that return c_void_p." func.argtypes = argtypes func.restype = c_void_p func.errcheck = check_pointer return func
apache-2.0
abhishek-ch/hue
desktop/core/ext-py/Django-1.6.10/tests/generic_relations_regress/models.py
40
5583
from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.db.models.deletion import ProtectedError __all__ = ('Link', 'Place', 'Restaurant', 'Person', 'Address', 'CharLink', 'TextLink', 'OddRelation1', 'OddRelation2', 'Contact', 'Organization', 'Note', 'Company') @python_2_unicode_compatible class Link(models.Model): content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() content_object = generic.GenericForeignKey() def __str__(self): return "Link to %s id=%s" % (self.content_type, self.object_id) @python_2_unicode_compatible class Place(models.Model): name = models.CharField(max_length=100) links = generic.GenericRelation(Link) def __str__(self): return "Place: %s" % self.name @python_2_unicode_compatible class Restaurant(Place): def __str__(self): return "Restaurant: %s" % self.name @python_2_unicode_compatible class Address(models.Model): street = models.CharField(max_length=80) city = models.CharField(max_length=50) state = models.CharField(max_length=2) zipcode = models.CharField(max_length=5) content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() content_object = generic.GenericForeignKey() def __str__(self): return '%s %s, %s %s' % (self.street, self.city, self.state, self.zipcode) @python_2_unicode_compatible class Person(models.Model): account = models.IntegerField(primary_key=True) name = models.CharField(max_length=128) addresses = generic.GenericRelation(Address) def __str__(self): return self.name class CharLink(models.Model): content_type = models.ForeignKey(ContentType) object_id = models.CharField(max_length=100) content_object = generic.GenericForeignKey() class TextLink(models.Model): content_type = models.ForeignKey(ContentType) object_id = models.TextField() content_object = generic.GenericForeignKey() class OddRelation1(models.Model): name = models.CharField(max_length=100) clinks = generic.GenericRelation(CharLink) class OddRelation2(models.Model): name = models.CharField(max_length=100) tlinks = generic.GenericRelation(TextLink) # models for test_q_object_or: class Note(models.Model): content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() content_object = generic.GenericForeignKey() note = models.TextField() class Contact(models.Model): notes = generic.GenericRelation(Note) class Organization(models.Model): name = models.CharField(max_length=255) contacts = models.ManyToManyField(Contact, related_name='organizations') @python_2_unicode_compatible class Company(models.Model): name = models.CharField(max_length=100) links = generic.GenericRelation(Link) def __str__(self): return "Company: %s" % self.name # For testing #13085 fix, we also use Note model defined above class Developer(models.Model): name = models.CharField(max_length=15) @python_2_unicode_compatible class Team(models.Model): name = models.CharField(max_length=15) members = models.ManyToManyField(Developer) def __str__(self): return "%s team" % self.name def __len__(self): return self.members.count() class Guild(models.Model): name = models.CharField(max_length=15) members = models.ManyToManyField(Developer) def __nonzero__(self): return self.members.count() class Tag(models.Model): content_type = models.ForeignKey(ContentType, related_name='g_r_r_tags') object_id = models.CharField(max_length=15) content_object = generic.GenericForeignKey() label = models.CharField(max_length=15) class Board(models.Model): name = models.CharField(primary_key=True, max_length=15) class SpecialGenericRelation(generic.GenericRelation): def __init__(self, *args, **kwargs): super(SpecialGenericRelation, self).__init__(*args, **kwargs) self.editable = True self.save_form_data_calls = 0 def save_form_data(self, *args, **kwargs): self.save_form_data_calls += 1 class HasLinks(models.Model): links = SpecialGenericRelation(Link) class Meta: abstract = True class HasLinkThing(HasLinks): pass class A(models.Model): flag = models.NullBooleanField() content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() content_object = generic.GenericForeignKey('content_type', 'object_id') class B(models.Model): a = generic.GenericRelation(A) class Meta: ordering = ('id',) class C(models.Model): b = models.ForeignKey(B) class Meta: ordering = ('id',) class D(models.Model): b = models.ForeignKey(B, null=True) class Meta: ordering = ('id',) # Ticket #22998 class Node(models.Model): content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() content = generic.GenericForeignKey('content_type', 'object_id') class Content(models.Model): nodes = generic.GenericRelation(Node) related_obj = models.ForeignKey('Related', on_delete=models.CASCADE) class Related(models.Model): pass def prevent_deletes(sender, instance, **kwargs): raise ProtectedError("Not allowed to delete.", [instance]) models.signals.pre_delete.connect(prevent_deletes, sender=Node)
apache-2.0
rcarrillocruz/ansible
lib/ansible/modules/network/fortios/fortios_ipv4_policy.py
21
11211
#!/usr/bin/python # # Ansible module to manage IPv4 policy objects in fortigate devices # (c) 2017, Benjamin Jolivot <bjolivot@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ --- module: fortios_ipv4_policy version_added: "2.3" author: "Benjamin Jolivot (@bjolivot)" short_description: Manage IPv4 policy objects on Fortinet FortiOS firewall devices description: - This module provides management of firewall IPv4 policies on FortiOS devices. extends_documentation_fragment: fortios options: id: description: - "Policy ID. Warning: policy ID number is different than Policy sequence number. The policy ID is the number assigned at policy creation. The sequence number represents the order in which the Fortigate will evaluate the rule for policy enforcement, and also the order in which rules are listed in the GUI and CLI. These two numbers do not necessarily correlate: this module is based off policy ID. TIP: policy ID can be viewed in the GUI by adding 'ID' to the display columns" required: true state: description: - Specifies if policy I(id) need to be added or deleted. choices: ['present', 'absent'] default: present src_intf: description: - Specifies source interface name. default: any dst_intf: description: - Specifies destination interface name. default: any src_addr: description: - Specifies source address (or group) object name(s). Required when I(state=present). src_addr_negate: description: - Negate source address param. default: false choices: ["true", "false"] dst_addr: description: - Specifies destination address (or group) object name(s). Required when I(state=present). dst_addr_negate: description: - Negate destination address param. default: false choices: ["true", "false"] policy_action: description: - Specifies accept or deny action policy. Required when I(state=present). choices: ['accept', 'deny'] aliases: ['action'] service: description: - "Specifies policy service(s), could be a list (ex: ['MAIL','DNS']). Required when I(state=present)." aliases: - services service_negate: description: - Negate policy service(s) defined in service value. default: false choices: ["true", "false"] schedule: description: - defines policy schedule. default: 'always' nat: description: - Enable or disable Nat. default: false choices: ["true", "false"] fixedport: description: - Use fixed port for nat. default: false choices: ["true", "false"] poolname: description: - Specifies NAT pool name. av_profile: description: - Specifies Antivirus profile name. webfilter_profile: description: - Specifies Webfilter profile name. ips_sensor: description: - Specifies IPS Sensor profile name. application_list: description: - Specifies Application Control name. logtraffic: version_added: "2.4" description: - Logs sessions that matched policy. default: utm choices: ['disable', 'utm', 'all'] logtraffic_start: version_added: "2.4" description: - Logs beginning of session as well. default: false choices: ["true", "false"] comment: description: - free text to describe policy. notes: - This module requires pyFG library. """ EXAMPLES = """ - name: Allow external DNS call fortios_ipv4_policy: host: 192.168.0.254 username: admin password: password id: 42 src_addr: internal_network dst_addr: all service: dns nat: True state: present policy_action: accept logtraffic: disable - name: Public Web fortios_ipv4_policy: host: 192.168.0.254 username: admin password: password id: 42 src_addr: all dst_addr: webservers services: - http - https state: present policy_action: accept """ RETURN = """ firewall_address_config: description: full firewall addresses config string returned: always type: string change_string: description: The commands executed by the module returned: only if config changed type: string msg_error_list: description: "List of errors returned by CLI (use -vvv for better readability)." returned: only when error type: string """ from ansible.module_utils.fortios import fortios_argument_spec, fortios_required_if from ansible.module_utils.fortios import backup, AnsibleFortios from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.pycompat24 import get_exception def main(): argument_spec = dict( comment = dict(type='str'), id = dict(type='int', required=True), src_intf = dict(default='any'), dst_intf = dict(default='any'), state = dict(choices=['present', 'absent'], default='present'), src_addr = dict(type='list'), dst_addr = dict(type='list'), src_addr_negate = dict(type='bool', default=False), dst_addr_negate = dict(type='bool', default=False), policy_action = dict(choices=['accept','deny'], aliases=['action']), service = dict(aliases=['services'], type='list'), service_negate = dict(type='bool', default=False), schedule = dict(type='str', default='always'), nat = dict(type='bool', default=False), fixedport = dict(type='bool', default=False), poolname = dict(type='str'), av_profile = dict(type='str'), webfilter_profile = dict(type='str'), ips_sensor = dict(type='str'), application_list = dict(type='str'), logtraffic = dict(choices=['disable','all','utm'], default='utm'), logtraffic_start = dict(type='bool', default=False), ) #merge global required_if & argument_spec from module_utils/fortios.py argument_spec.update(fortios_argument_spec) ipv4_policy_required_if = [ ['state', 'present', ['src_addr', 'dst_addr', 'policy_action', 'service']], ] module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_if=fortios_required_if + ipv4_policy_required_if , ) #init forti object fortigate = AnsibleFortios(module) #Security policies root path config_path = 'firewall policy' #test params #NAT related if not module.params['nat']: if module.params['poolname']: module.fail_json(msg='Poolname param requires NAT to be true.') if module.params['fixedport']: module.fail_json(msg='Fixedport param requires NAT to be true.') #log options if module.params['logtraffic_start']: if not module.params['logtraffic'] == 'all': module.fail_json(msg='Logtraffic_start param requires logtraffic to be set to "all".') #id must be str(int) for pyFG to work policy_id = str(module.params['id']) #load config fortigate.load_config(config_path) #Absent State if module.params['state'] == 'absent': fortigate.candidate_config[config_path].del_block(policy_id) #Present state elif module.params['state'] == 'present': new_policy = fortigate.get_empty_configuration_block(policy_id, 'edit') #src / dest / service / interfaces new_policy.set_param('srcintf', '"%s"' % (module.params['src_intf'])) new_policy.set_param('dstintf', '"%s"' % (module.params['dst_intf'])) new_policy.set_param('srcaddr', " ".join('"' + item + '"' for item in module.params['src_addr'])) new_policy.set_param('dstaddr', " ".join('"' + item + '"' for item in module.params['dst_addr'])) new_policy.set_param('service', " ".join('"' + item + '"' for item in module.params['service'])) # negate src / dest / service if module.params['src_addr_negate']: new_policy.set_param('srcaddr-negate', 'enable') if module.params['dst_addr_negate']: new_policy.set_param('dstaddr-negate', 'enable') if module.params['service_negate']: new_policy.set_param('service-negate', 'enable') # action new_policy.set_param('action', '%s' % (module.params['policy_action'])) #logging new_policy.set_param('logtraffic', '%s' % (module.params['logtraffic'])) if module.params['logtraffic'] == 'all': if module.params['logtraffic_start']: new_policy.set_param('logtraffic-start', 'enable') else: new_policy.set_param('logtraffic-start', 'disable') # Schedule new_policy.set_param('schedule', '%s' % (module.params['schedule'])) #NAT if module.params['nat']: new_policy.set_param('nat', 'enable') if module.params['fixedport']: new_policy.set_param('fixedport', 'enable') if module.params['poolname'] is not None: new_policy.set_param('ippool', 'enable') new_policy.set_param('poolname', '"%s"' % (module.params['poolname'])) #security profiles: if module.params['av_profile'] is not None: new_policy.set_param('av-profile', '"%s"' % (module.params['av_profile'])) if module.params['webfilter_profile'] is not None: new_policy.set_param('webfilter-profile', '"%s"' % (module.params['webfilter_profile'])) if module.params['ips_sensor'] is not None: new_policy.set_param('ips-sensor', '"%s"' % (module.params['ips_sensor'])) if module.params['application_list'] is not None: new_policy.set_param('application-list', '"%s"' % (module.params['application_list'])) # comment if module.params['comment'] is not None: new_policy.set_param('comment', '"%s"' % (module.params['comment'])) #add the new policy to the device fortigate.add_block(policy_id, new_policy) #Apply changes fortigate.apply_changes() if __name__ == '__main__': main()
gpl-3.0
Argon-Zhou/django
tests/inline_formsets/tests.py
326
6656
from __future__ import unicode_literals from django.forms.models import inlineformset_factory from django.test import TestCase, skipUnlessDBFeature from django.utils import six from .models import Child, Parent, Poem, Poet, School class DeletionTests(TestCase): def test_deletion(self): PoemFormSet = inlineformset_factory(Poet, Poem, can_delete=True, fields="__all__") poet = Poet.objects.create(name='test') poem = poet.poem_set.create(name='test poem') data = { 'poem_set-TOTAL_FORMS': '1', 'poem_set-INITIAL_FORMS': '1', 'poem_set-MAX_NUM_FORMS': '0', 'poem_set-0-id': str(poem.pk), 'poem_set-0-poet': str(poet.pk), 'poem_set-0-name': 'test', 'poem_set-0-DELETE': 'on', } formset = PoemFormSet(data, instance=poet) formset.save() self.assertTrue(formset.is_valid()) self.assertEqual(Poem.objects.count(), 0) def test_add_form_deletion_when_invalid(self): """ Make sure that an add form that is filled out, but marked for deletion doesn't cause validation errors. """ PoemFormSet = inlineformset_factory(Poet, Poem, can_delete=True, fields="__all__") poet = Poet.objects.create(name='test') data = { 'poem_set-TOTAL_FORMS': '1', 'poem_set-INITIAL_FORMS': '0', 'poem_set-MAX_NUM_FORMS': '0', 'poem_set-0-id': '', 'poem_set-0-poem': '1', 'poem_set-0-name': 'x' * 1000, } formset = PoemFormSet(data, instance=poet) # Make sure this form doesn't pass validation. self.assertEqual(formset.is_valid(), False) self.assertEqual(Poem.objects.count(), 0) # Then make sure that it *does* pass validation and delete the object, # even though the data isn't actually valid. data['poem_set-0-DELETE'] = 'on' formset = PoemFormSet(data, instance=poet) self.assertEqual(formset.is_valid(), True) formset.save() self.assertEqual(Poem.objects.count(), 0) def test_change_form_deletion_when_invalid(self): """ Make sure that a change form that is filled out, but marked for deletion doesn't cause validation errors. """ PoemFormSet = inlineformset_factory(Poet, Poem, can_delete=True, fields="__all__") poet = Poet.objects.create(name='test') poem = poet.poem_set.create(name='test poem') data = { 'poem_set-TOTAL_FORMS': '1', 'poem_set-INITIAL_FORMS': '1', 'poem_set-MAX_NUM_FORMS': '0', 'poem_set-0-id': six.text_type(poem.id), 'poem_set-0-poem': six.text_type(poem.id), 'poem_set-0-name': 'x' * 1000, } formset = PoemFormSet(data, instance=poet) # Make sure this form doesn't pass validation. self.assertEqual(formset.is_valid(), False) self.assertEqual(Poem.objects.count(), 1) # Then make sure that it *does* pass validation and delete the object, # even though the data isn't actually valid. data['poem_set-0-DELETE'] = 'on' formset = PoemFormSet(data, instance=poet) self.assertEqual(formset.is_valid(), True) formset.save() self.assertEqual(Poem.objects.count(), 0) def test_save_new(self): """ Make sure inlineformsets respect commit=False regression for #10750 """ # exclude some required field from the forms ChildFormSet = inlineformset_factory(School, Child, exclude=['father', 'mother']) school = School.objects.create(name='test') mother = Parent.objects.create(name='mother') father = Parent.objects.create(name='father') data = { 'child_set-TOTAL_FORMS': '1', 'child_set-INITIAL_FORMS': '0', 'child_set-MAX_NUM_FORMS': '0', 'child_set-0-name': 'child', } formset = ChildFormSet(data, instance=school) self.assertEqual(formset.is_valid(), True) objects = formset.save(commit=False) for obj in objects: obj.mother = mother obj.father = father obj.save() self.assertEqual(school.child_set.count(), 1) class InlineFormsetFactoryTest(TestCase): def test_inline_formset_factory(self): """ These should both work without a problem. """ inlineformset_factory(Parent, Child, fk_name='mother', fields="__all__") inlineformset_factory(Parent, Child, fk_name='father', fields="__all__") def test_exception_on_unspecified_foreign_key(self): """ Child has two ForeignKeys to Parent, so if we don't specify which one to use for the inline formset, we should get an exception. """ six.assertRaisesRegex( self, ValueError, "'inline_formsets.Child' has more than one ForeignKey to 'inline_formsets.Parent'.", inlineformset_factory, Parent, Child ) def test_fk_name_not_foreign_key_field_from_child(self): """ If we specify fk_name, but it isn't a ForeignKey from the child model to the parent model, we should get an exception. """ self.assertRaises( Exception, "fk_name 'school' is not a ForeignKey to <class 'inline_formsets.models.Parent'>", inlineformset_factory, Parent, Child, fk_name='school' ) def test_non_foreign_key_field(self): """ If the field specified in fk_name is not a ForeignKey, we should get an exception. """ six.assertRaisesRegex( self, ValueError, "'inline_formsets.Child' has no field named 'test'.", inlineformset_factory, Parent, Child, fk_name='test' ) def test_any_iterable_allowed_as_argument_to_exclude(self): # Regression test for #9171. inlineformset_factory( Parent, Child, exclude=['school'], fk_name='mother' ) inlineformset_factory( Parent, Child, exclude=('school',), fk_name='mother' ) @skipUnlessDBFeature('allows_auto_pk_0') def test_zero_primary_key(self): # Regression test for #21472 poet = Poet.objects.create(id=0, name='test') poet.poem_set.create(name='test poem') PoemFormSet = inlineformset_factory(Poet, Poem, fields="__all__", extra=0) formset = PoemFormSet(None, instance=poet) self.assertEqual(len(formset.forms), 1)
bsd-3-clause
gelanivishal/pychargify
api.py
2
19966
# -*- coding: utf-8 -*- ''' This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA Created on Nov 20, 2009 Author: Paul Trippett (paul@pyhub.com) ''' import httplib import base64 import datetime import iso8601 from itertools import chain from xml.dom import minidom try: import json except Exception, e: try: import simplejson as json except Exception, e: try: # For AppEngine users import django.utils.simplejson as json except Exception, e: print "No Json library found... Exiting." exit() class ChargifyError(Exception): """ A Chargify Releated error @license GNU General Public License """ pass class ChargifyUnAuthorized(ChargifyError): """ Returned when API authentication has failed. @license GNU General Public License """ pass class ChargifyForbidden(ChargifyError): """ Returned by valid endpoints in our application that have not been enabled for API use. @license GNU General Public License """ pass class ChargifyNotFound(ChargifyError): """ The requested resource was not found. @license GNU General Public License """ pass class ChargifyUnProcessableEntity(ChargifyError): """ Sent in response to a POST (create) or PUT (update) request that is invalid. @license GNU General Public License """ pass class ChargifyServerError(ChargifyError): """ Signals some other error @license GNU General Public License """ pass class ChargifyBase(object): """ The ChargifyBase class provides a common base for all classes in this module @license GNU General Public License """ __ignore__ = ['api_key', 'sub_domain', 'base_host', 'request_host', 'id', '__xmlnodename__'] api_key = '' sub_domain = '' base_host = '.chargify.com' request_host = '' id = None def __init__(self, apikey, subdomain): """ Initialize the Class with the API Key and SubDomain for Requests to the Chargify API """ self.api_key = apikey self.sub_domain = subdomain self.request_host = self.sub_domain + self.base_host def __get_xml_value(self, nodelist): """ Get the Text Value from an XML Node """ rc = "" for node in nodelist: if node.nodeType == node.TEXT_NODE: rc = rc + node.data return rc def __get_object_from_node(self, node, obj_type=''): """ Copy values from a node into a new Object """ if obj_type == '': constructor = globals()[self.__name__] else: constructor = globals()[obj_type] obj = constructor(self.api_key, self.sub_domain) for childnodes in node.childNodes: if childnodes.nodeType == 1 and not childnodes.nodeName == '': if childnodes.nodeName in self.__attribute_types__: obj.__setattr__(childnodes.nodeName, self._applyS(childnodes.toxml(), self.__attribute_types__[childnodes.nodeName], childnodes.nodeName)) else: node_value = self.__get_xml_value(childnodes.childNodes) if "type" in childnodes.attributes.keys(): node_type = childnodes.attributes["type"] if node_value: if node_type.nodeValue == 'datetime': node_value = datetime.datetime.fromtimestamp( iso8601.parse(node_value)) obj.__setattr__(childnodes.nodeName, node_value) return obj def fix_xml_encoding(self, xml): """ Chargify encodes non-ascii characters in CP1252. Decodes and re-encodes with xml characters. Strips out whitespace "text nodes". """ return unicode(''.join([i.strip() for i in xml.split('\n')])).encode( 'CP1252', 'replace').decode('utf-8', 'ignore').encode( 'ascii', 'xmlcharrefreplace') def _applyS(self, xml, obj_type, node_name): """ Apply the values of the passed xml data to the a class """ dom = minidom.parseString(self.fix_xml_encoding(xml)) nodes = dom.getElementsByTagName(node_name) if nodes.length == 1: return self.__get_object_from_node(nodes[0], obj_type) def _applyA(self, xml, obj_type, node_name): """ Apply the values of the passed data to a new class of the current type """ dom = minidom.parseString(self.fix_xml_encoding(xml)) nodes = dom.getElementsByTagName(node_name) objs = [] for node in nodes: objs.append(self.__get_object_from_node(node, obj_type)) return objs def _toxml(self, dom): """ Return a XML Representation of the object """ element = minidom.Element(self.__xmlnodename__) for property, value in self.__dict__.iteritems(): if not property in self.__ignore__: if property in self.__attribute_types__: element.appendChild(value._toxml(dom)) else: node = minidom.Element(property) node_txt = dom.createTextNode(str(value)) node.appendChild(node_txt) element.appendChild(node) return element def _get(self, url): """ Handle HTTP GET's to the API """ headers = { "Authorization": "Basic %s" % self._get_auth_string(), "User-Agent": "pyChargify", "Content-Type": 'text/xml' } r = httplib.HTTPSConnection(self.request_host) r.request('GET', url, None, headers) response = r.getresponse() # Unauthorized Error if response.status == 401: raise ChargifyUnAuthorized() # Forbidden Error elif response.status == 403: raise ChargifyForbidden() # Not Found Error elif response.status == 404: raise ChargifyNotFound() # Unprocessable Entity Error elif response.status == 422: raise ChargifyUnProcessableEntity() # Generic Server Errors elif response.status in [405, 500]: raise ChargifyServerError() return response.read() def _post(self, url, data): """ Handle HTTP POST's to the API """ return self._request('POST', url, data) def _put(self, url, data): """ Handle HTTP PUT's to the API """ return self._request('PUT', url, data) def _delete(self, url, data): """ Handle HTTP DELETE's to the API """ return self._request('DELETE', url, data) def _request(self, method, url, data=''): """ Handled the request and sends it to the server """ http = httplib.HTTPSConnection(self.request_host) http.putrequest(method, url) http.putheader("Authorization", "Basic %s" % self._get_auth_string()) http.putheader("User-Agent", "pychargify") http.putheader("Host", self.request_host) http.putheader("Accept", "application/xml") http.putheader("Content-Length", str(len(data))) http.putheader("Content-Type", 'text/xml; charset="UTF-8"') http.endheaders() print('sending: %s' % data) http.send(data) response = http.getresponse() # Unauthorized Error if response.status == 401: raise ChargifyUnAuthorized() # Forbidden Error elif response.status == 403: raise ChargifyForbidden() # Not Found Error elif response.status == 404: raise ChargifyNotFound() # Unprocessable Entity Error elif response.status == 422: raise ChargifyUnProcessableEntity() # Generic Server Errors elif response.status in [405, 500]: raise ChargifyServerError() return response.read() def _save(self, url, node_name): """ Save the object using the passed URL as the API end point """ dom = minidom.Document() dom.appendChild(self._toxml(dom)) request_made = { 'day': datetime.datetime.today().day, 'month': datetime.datetime.today().month, 'year': datetime.datetime.today().year } if self.id: obj = self._applyS(self._put('/' + url + '/' + self.id + '.xml', dom.toxml(encoding="utf-8")), self.__name__, node_name) if obj: if type(obj.updated_at) == datetime.datetime: if (obj.updated_at.day == request_made['day']) and \ (obj.updated_at.month == request_made['month']) and \ (obj.updated_at.year == request_made['year']): self.saved = True return (True, obj) return (False, obj) else: obj = self._applyS(self._post('/' + url + '.xml', dom.toxml(encoding="utf-8")), self.__name__, node_name) if obj: if type(obj.updated_at) == datetime.datetime: if (obj.updated_at.day == request_made['day']) and \ (obj.updated_at.month == request_made['month']) and \ (obj.updated_at.year == request_made['year']): return (True, obj) return (False, obj) def _get_auth_string(self): return base64.encodestring('%s:%s' % (self.api_key, 'x'))[:-1] class ChargifyCustomer(ChargifyBase): """ Represents Chargify Customers @license GNU General Public License """ __name__ = 'ChargifyCustomer' __attribute_types__ = {} __xmlnodename__ = 'customer' id = None first_name = '' last_name = '' email = '' organization = '' reference = '' created_at = None modified_at = None def __init__(self, apikey, subdomain, nodename=''): super(ChargifyCustomer, self).__init__(apikey, subdomain) if nodename: self.__xmlnodename__ = nodename def getAll(self): return self._applyA(self._get('/customers.xml'), self.__name__, 'customer') def getById(self, id): return self._applyS(self._get('/customers/' + str(id) + '.xml'), self.__name__, 'customer') def getByReference(self, reference): return self._applyS(self._get('/customers/lookup.xml?reference=' + str(reference)), self.__name__, 'customer') def getSubscriptions(self): obj = ChargifySubscription(self.api_key, self.sub_domain) return obj.getByCustomerId(self.id) def save(self): return self._save('customers', 'customer') class ChargifyProduct(ChargifyBase): """ Represents Chargify Products @license GNU General Public License """ __name__ = 'ChargifyProduct' __attribute_types__ = {} __xmlnodename__ = 'product' id = None price_in_cents = 0 name = '' handle = '' product_family = {} accounting_code = '' interval_unit = '' interval = 0 def __init__(self, apikey, subdomain, nodename=''): super(ChargifyProduct, self).__init__(apikey, subdomain) if nodename: self.__xmlnodename__ = nodename def getAll(self): return self._applyA(self._get('/products.xml'), self.__name__, 'product') def getById(self, id): return self._applyS(self._get('/products/' + str(id) + '.xml'), self.__name__, 'product') def getByHandle(self, handle): return self._applyS(self._get('/products/handle/' + str(handle) + '.xml'), self.__name__, 'product') def save(self): return self._save('products', 'product') def getPaymentPageUrl(self): return ('https://' + self.request_host + '/h/' + self.id + '/subscriptions/new') def getPriceInDollars(self): return round(float(self.price_in_cents) / 100, 2) def getFormattedPrice(self): return "$%.2f" % (self.getPriceInDollars()) class Usage(object): def __init__(self, id, memo, quantity): self.id = id self.quantity = int(quantity) self.memo = memo class ChargifySubscription(ChargifyBase): """ Represents Chargify Subscriptions @license GNU General Public License """ __name__ = 'ChargifySubscription' __attribute_types__ = { 'customer': 'ChargifyCustomer', 'product': 'ChargifyProduct', 'credit_card': 'ChargifyCreditCard' } __xmlnodename__ = 'subscription' id = None state = '' balance_in_cents = 0 current_period_started_at = None current_period_ends_at = None trial_started_at = None trial_ended_attrial_ended_at = None activated_at = None expires_at = None created_at = None updated_at = None customer = None product = None product_handle = '' credit_card = None def __init__(self, apikey, subdomain, nodename=''): super(ChargifySubscription, self).__init__(apikey, subdomain) if nodename: self.__xmlnodename__ = nodename def getAll(self): return self._applyA(self._get('/subscriptions.xml'), self.__name__, 'subscription') def createUsage(self, component_id, quantity, memo=None): """ Creates usage for the given component id. """ data = '''<?xml version="1.0" encoding="UTF-8"?><usage> <quantity>%d</quantity><memo>%s</memo></usage>''' % ( quantity, memo or "") dom = minidom.parseString(self.fix_xml_encoding( self._post('/subscriptions/%s/components/%d/usages.xml' % ( str(self.id), component_id), data))) return [Usage(*tuple(chain.from_iterable([[x.data for x in i.childNodes] or [None] for i in n.childNodes]))) for n in dom.getElementsByTagName('usage')] def getByCustomerId(self, customer_id): return self._applyA(self._get('/customers/' + str(customer_id) + '/subscriptions.xml'), self.__name__, 'subscription') def getBySubscriptionId(self, subscription_id): #Throws error if more than element is returned i, = self._applyA(self._get('/subscriptions/' + str(subscription_id) + '.xml'), self.__name__, 'subscription') return i def save(self): return self._save('subscriptions', 'subscription') def resetBalance(self): self._put("/subscriptions/" + self.id + "/reset_balance.xml", "") def reactivate(self): self._put("/subscriptions/" + self.id + "/reactivate.xml", "") def upgrade(self, toProductHandle): xml = """<?xml version="1.0" encoding="UTF-8"?> <subscription> <product_handle>%s</product_handle> </subscription>""" % (toProductHandle) #end improper indentation return self._applyS(self._put("/subscriptions/" + self.id + ".xml", xml), self.__name__, "subscription") def unsubscribe(self, message): xml = """<?xml version="1.0" encoding="UTF-8"?> <subscription> <cancellation_message> %s </cancellation_message> </subscription>""" % (message) self._delete("/subscriptions/" + self.id + ".xml", xml) class ChargifyCreditCard(ChargifyBase): """ Represents Chargify Credit Cards """ __name__ = 'ChargifyCreditCard' __attribute_types__ = {} __xmlnodename__ = 'credit_card_attributes' first_name = '' last_name = '' full_number = '' masked_card_number = '' expiration_month = '' expiration_year = '' cvv = '' type = '' billing_address = '' billing_city = '' billing_state = '' billing_zip = '' billing_country = '' zip = '' def __init__(self, apikey, subdomain, nodename=''): super(ChargifyCreditCard, self).__init__(apikey, subdomain) if nodename: self.__xmlnodename__ = nodename def save(self, subscription): path = "/subscriptions/%s.xml" % (subscription.id) data = u"""<?xml version="1.0" encoding="UTF-8"?> <subscription> <credit_card_attributes> <full_number>%s</full_number> <expiration_month>%s</expiration_month> <expiration_year>%s</expiration_year> <cvv>%s</cvv> <first_name>%s</first_name> <last_name>%s</last_name> <zip>%s</zip> </credit_card_attributes> </subscription>""" % (self.full_number, self.expiration_month, self.expiration_year, self.cvv, self.first_name, self.last_name, self.zip) # end improper indentation return self._applyS(self._put(path, data), self.__name__, "subscription") class ChargifyPostBack(ChargifyBase): """ Represents Chargify API Post Backs @license GNU General Public License """ subscriptions = [] def __init__(self, apikey, subdomain, postback_data): ChargifyBase.__init__(apikey, subdomain) if postback_data: self._process_postback_data(postback_data) def _process_postback_data(self, data): """ Process the Json array and fetches the Subscription Objects """ csub = ChargifySubscription(self.api_key, self.sub_domain) postdata_objects = json.loads(data) for obj in postdata_objects: self.subscriptions.append(csub.getBySubscriptionId(obj)) class Chargify: """ The Chargify class provides the main entry point to the Chargify API @license GNU General Public License """ api_key = '' sub_domain = '' def __init__(self, apikey=None, subdomain=None, cred_file=None): ''' We take either an api_key and sub_domain, or a path to a file with JSON that defines those two, or we throw an error.''' if self.api_key and self.sub_domain: self.api_key = apikey self.sub_domain = subdomain return elif cred_file: f = open(cred_file) credentials = json.loads(f.read()) self.api_key = credentials['api_key'] self.sub_domain = credentials['sub_domain'] return else: print "Need either an api_key and subdomain, or credential file. Exiting." exit() def Customer(self, nodename=''): return ChargifyCustomer(self.api_key, self.sub_domain, nodename) def Product(self, nodename=''): return ChargifyProduct(self.api_key, self.sub_domain, nodename) def Subscription(self, nodename=''): return ChargifySubscription(self.api_key, self.sub_domain, nodename) def CreditCard(self, nodename=''): return ChargifyCreditCard(self.api_key, self.sub_domain, nodename) def PostBack(self, postbackdata): return ChargifyPostBack(self.api_key, self.sub_domain, postbackdata)
gpl-2.0
haridsv/pip
pip/_vendor/html5lib/treewalkers/__init__.py
499
5766
"""A collection of modules for iterating through different kinds of tree, generating tokens identical to those produced by the tokenizer module. To create a tree walker for a new type of tree, you need to do implement a tree walker object (called TreeWalker by convention) that implements a 'serialize' method taking a tree as sole argument and returning an iterator generating tokens. """ from __future__ import absolute_import, division, unicode_literals __all__ = ["getTreeWalker", "pprint", "dom", "etree", "genshistream", "lxmletree", "pulldom"] import sys from .. import constants from ..utils import default_etree treeWalkerCache = {} def getTreeWalker(treeType, implementation=None, **kwargs): """Get a TreeWalker class for various types of tree with built-in support treeType - the name of the tree type required (case-insensitive). Supported values are: "dom" - The xml.dom.minidom DOM implementation "pulldom" - The xml.dom.pulldom event stream "etree" - A generic walker for tree implementations exposing an elementtree-like interface (known to work with ElementTree, cElementTree and lxml.etree). "lxml" - Optimized walker for lxml.etree "genshi" - a Genshi stream implementation - (Currently applies to the "etree" tree type only). A module implementing the tree type e.g. xml.etree.ElementTree or cElementTree.""" treeType = treeType.lower() if treeType not in treeWalkerCache: if treeType in ("dom", "pulldom"): name = "%s.%s" % (__name__, treeType) __import__(name) mod = sys.modules[name] treeWalkerCache[treeType] = mod.TreeWalker elif treeType == "genshi": from . import genshistream treeWalkerCache[treeType] = genshistream.TreeWalker elif treeType == "lxml": from . import lxmletree treeWalkerCache[treeType] = lxmletree.TreeWalker elif treeType == "etree": from . import etree if implementation is None: implementation = default_etree # XXX: NEVER cache here, caching is done in the etree submodule return etree.getETreeModule(implementation, **kwargs).TreeWalker return treeWalkerCache.get(treeType) def concatenateCharacterTokens(tokens): pendingCharacters = [] for token in tokens: type = token["type"] if type in ("Characters", "SpaceCharacters"): pendingCharacters.append(token["data"]) else: if pendingCharacters: yield {"type": "Characters", "data": "".join(pendingCharacters)} pendingCharacters = [] yield token if pendingCharacters: yield {"type": "Characters", "data": "".join(pendingCharacters)} def pprint(walker): """Pretty printer for tree walkers""" output = [] indent = 0 for token in concatenateCharacterTokens(walker): type = token["type"] if type in ("StartTag", "EmptyTag"): # tag name if token["namespace"] and token["namespace"] != constants.namespaces["html"]: if token["namespace"] in constants.prefixes: ns = constants.prefixes[token["namespace"]] else: ns = token["namespace"] name = "%s %s" % (ns, token["name"]) else: name = token["name"] output.append("%s<%s>" % (" " * indent, name)) indent += 2 # attributes (sorted for consistent ordering) attrs = token["data"] for (namespace, localname), value in sorted(attrs.items()): if namespace: if namespace in constants.prefixes: ns = constants.prefixes[namespace] else: ns = namespace name = "%s %s" % (ns, localname) else: name = localname output.append("%s%s=\"%s\"" % (" " * indent, name, value)) # self-closing if type == "EmptyTag": indent -= 2 elif type == "EndTag": indent -= 2 elif type == "Comment": output.append("%s<!-- %s -->" % (" " * indent, token["data"])) elif type == "Doctype": if token["name"]: if token["publicId"]: output.append("""%s<!DOCTYPE %s "%s" "%s">""" % (" " * indent, token["name"], token["publicId"], token["systemId"] if token["systemId"] else "")) elif token["systemId"]: output.append("""%s<!DOCTYPE %s "" "%s">""" % (" " * indent, token["name"], token["systemId"])) else: output.append("%s<!DOCTYPE %s>" % (" " * indent, token["name"])) else: output.append("%s<!DOCTYPE >" % (" " * indent,)) elif type == "Characters": output.append("%s\"%s\"" % (" " * indent, token["data"])) elif type == "SpaceCharacters": assert False, "concatenateCharacterTokens should have got rid of all Space tokens" else: raise ValueError("Unknown token type, %s" % type) return "\n".join(output)
mit
raghavtan/youtube-dl
youtube_dl/extractor/novamov.py
125
2664
from __future__ import unicode_literals import re from .common import InfoExtractor from ..compat import ( compat_urlparse, ) from ..utils import ( ExtractorError, ) class NovaMovIE(InfoExtractor): IE_NAME = 'novamov' IE_DESC = 'NovaMov' _VALID_URL_TEMPLATE = r'http://(?:(?:www\.)?%(host)s/(?:file|video)/|(?:(?:embed|www)\.)%(host)s/embed\.php\?(?:.*?&)?v=)(?P<id>[a-z\d]{13})' _VALID_URL = _VALID_URL_TEMPLATE % {'host': 'novamov\.com'} _HOST = 'www.novamov.com' _FILE_DELETED_REGEX = r'This file no longer exists on our servers!</h2>' _FILEKEY_REGEX = r'flashvars\.filekey="(?P<filekey>[^"]+)";' _TITLE_REGEX = r'(?s)<div class="v_tab blockborder rounded5" id="v_tab1">\s*<h3>([^<]+)</h3>' _DESCRIPTION_REGEX = r'(?s)<div class="v_tab blockborder rounded5" id="v_tab1">\s*<h3>[^<]+</h3><p>([^<]+)</p>' _TEST = { 'url': 'http://www.novamov.com/video/4rurhn9x446jj', 'md5': '7205f346a52bbeba427603ba10d4b935', 'info_dict': { 'id': '4rurhn9x446jj', 'ext': 'flv', 'title': 'search engine optimization', 'description': 'search engine optimization is used to rank the web page in the google search engine' }, 'skip': '"Invalid token" errors abound (in web interface as well as youtube-dl, there is nothing we can do about it.)' } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') page = self._download_webpage( 'http://%s/video/%s' % (self._HOST, video_id), video_id, 'Downloading video page') if re.search(self._FILE_DELETED_REGEX, page) is not None: raise ExtractorError('Video %s does not exist' % video_id, expected=True) filekey = self._search_regex(self._FILEKEY_REGEX, page, 'filekey') title = self._html_search_regex(self._TITLE_REGEX, page, 'title', fatal=False) description = self._html_search_regex(self._DESCRIPTION_REGEX, page, 'description', default='', fatal=False) api_response = self._download_webpage( 'http://%s/api/player.api.php?key=%s&file=%s' % (self._HOST, filekey, video_id), video_id, 'Downloading video api response') response = compat_urlparse.parse_qs(api_response) if 'error_msg' in response: raise ExtractorError('%s returned error: %s' % (self.IE_NAME, response['error_msg'][0]), expected=True) video_url = response['url'][0] return { 'id': video_id, 'url': video_url, 'title': title, 'description': description }
unlicense
sumanthha/fundafriend
django/contrib/staticfiles/management/commands/collectstatic.py
78
12781
from __future__ import with_statement import os import sys from optparse import make_option from django.core.files.storage import FileSystemStorage from django.core.management.base import CommandError, NoArgsCommand from django.utils.encoding import smart_str, smart_unicode from django.utils.datastructures import SortedDict from django.contrib.staticfiles import finders, storage class Command(NoArgsCommand): """ Command that allows to copy or symlink static files from different locations to the settings.STATIC_ROOT. """ option_list = NoArgsCommand.option_list + ( make_option('--noinput', action='store_false', dest='interactive', default=True, help="Do NOT prompt the user for input of any kind."), make_option('--no-post-process', action='store_false', dest='post_process', default=True, help="Do NOT post process collected files."), make_option('-i', '--ignore', action='append', default=[], dest='ignore_patterns', metavar='PATTERN', help="Ignore files or directories matching this glob-style " "pattern. Use multiple times to ignore more."), make_option('-n', '--dry-run', action='store_true', dest='dry_run', default=False, help="Do everything except modify the filesystem."), make_option('-c', '--clear', action='store_true', dest='clear', default=False, help="Clear the existing files using the storage " "before trying to copy or link the original file."), make_option('-l', '--link', action='store_true', dest='link', default=False, help="Create a symbolic link to each file instead of copying."), make_option('--no-default-ignore', action='store_false', dest='use_default_ignore_patterns', default=True, help="Don't ignore the common private glob-style patterns 'CVS', " "'.*' and '*~'."), ) help = "Collect static files in a single location." requires_model_validation = False def __init__(self, *args, **kwargs): super(NoArgsCommand, self).__init__(*args, **kwargs) self.copied_files = [] self.symlinked_files = [] self.unmodified_files = [] self.post_processed_files = [] self.storage = storage.staticfiles_storage try: self.storage.path('') except NotImplementedError: self.local = False else: self.local = True # Use ints for file times (ticket #14665), if supported if hasattr(os, 'stat_float_times'): os.stat_float_times(False) def set_options(self, **options): """ Set instance variables based on an options dict """ self.interactive = options['interactive'] self.verbosity = int(options.get('verbosity', 1)) self.symlink = options['link'] self.clear = options['clear'] self.dry_run = options['dry_run'] ignore_patterns = options['ignore_patterns'] if options['use_default_ignore_patterns']: ignore_patterns += ['CVS', '.*', '*~'] self.ignore_patterns = list(set(ignore_patterns)) self.post_process = options['post_process'] def collect(self): """ Perform the bulk of the work of collectstatic. Split off from handle_noargs() to facilitate testing. """ if self.symlink: if sys.platform == 'win32': raise CommandError("Symlinking is not supported by this " "platform (%s)." % sys.platform) if not self.local: raise CommandError("Can't symlink to a remote destination.") if self.clear: self.clear_dir('') if self.symlink: handler = self.link_file else: handler = self.copy_file found_files = SortedDict() for finder in finders.get_finders(): for path, storage in finder.list(self.ignore_patterns): # Prefix the relative path if the source storage contains it if getattr(storage, 'prefix', None): prefixed_path = os.path.join(storage.prefix, path) else: prefixed_path = path if prefixed_path not in found_files: found_files[prefixed_path] = (storage, path) handler(path, prefixed_path, storage) # Here we check if the storage backend has a post_process # method and pass it the list of modified files. if self.post_process and hasattr(self.storage, 'post_process'): processor = self.storage.post_process(found_files, dry_run=self.dry_run) for original_path, processed_path, processed in processor: if processed: self.log(u"Post-processed '%s' as '%s" % (original_path, processed_path), level=1) self.post_processed_files.append(original_path) else: self.log(u"Skipped post-processing '%s'" % original_path) return { 'modified': self.copied_files + self.symlinked_files, 'unmodified': self.unmodified_files, 'post_processed': self.post_processed_files, } def handle_noargs(self, **options): self.set_options(**options) # Warn before doing anything more. if (isinstance(self.storage, FileSystemStorage) and self.storage.location): destination_path = self.storage.location destination_display = ':\n\n %s' % destination_path else: destination_path = None destination_display = '.' if self.clear: clear_display = 'This will DELETE EXISTING FILES!' else: clear_display = 'This will overwrite existing files!' if self.interactive: confirm = raw_input(u""" You have requested to collect static files at the destination location as specified in your settings%s %s Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % (destination_display, clear_display)) if confirm != 'yes': raise CommandError("Collecting static files cancelled.") collected = self.collect() modified_count = len(collected['modified']) unmodified_count = len(collected['unmodified']) post_processed_count = len(collected['post_processed']) if self.verbosity >= 1: template = ("\n%(modified_count)s %(identifier)s %(action)s" "%(destination)s%(unmodified)s%(post_processed)s.\n") summary = template % { 'modified_count': modified_count, 'identifier': 'static file' + (modified_count != 1 and 's' or ''), 'action': self.symlink and 'symlinked' or 'copied', 'destination': (destination_path and " to '%s'" % destination_path or ''), 'unmodified': (collected['unmodified'] and ', %s unmodified' % unmodified_count or ''), 'post_processed': (collected['post_processed'] and ', %s post-processed' % post_processed_count or ''), } self.stdout.write(smart_str(summary)) def log(self, msg, level=2): """ Small log helper """ msg = smart_str(msg) if not msg.endswith("\n"): msg += "\n" if self.verbosity >= level: self.stdout.write(msg) def clear_dir(self, path): """ Deletes the given relative path using the destinatin storage backend. """ dirs, files = self.storage.listdir(path) for f in files: fpath = os.path.join(path, f) if self.dry_run: self.log(u"Pretending to delete '%s'" % smart_unicode(fpath), level=1) else: self.log(u"Deleting '%s'" % smart_unicode(fpath), level=1) self.storage.delete(fpath) for d in dirs: self.clear_dir(os.path.join(path, d)) def delete_file(self, path, prefixed_path, source_storage): """ Checks if the target file should be deleted if it already exists """ if self.storage.exists(prefixed_path): try: # When was the target file modified last time? target_last_modified = \ self.storage.modified_time(prefixed_path) except (OSError, NotImplementedError, AttributeError): # The storage doesn't support ``modified_time`` or failed pass else: try: # When was the source file modified last time? source_last_modified = source_storage.modified_time(path) except (OSError, NotImplementedError, AttributeError): pass else: # The full path of the target file if self.local: full_path = self.storage.path(prefixed_path) else: full_path = None # Skip the file if the source file is younger if target_last_modified >= source_last_modified: if not ((self.symlink and full_path and not os.path.islink(full_path)) or (not self.symlink and full_path and os.path.islink(full_path))): if prefixed_path not in self.unmodified_files: self.unmodified_files.append(prefixed_path) self.log(u"Skipping '%s' (not modified)" % path) return False # Then delete the existing file if really needed if self.dry_run: self.log(u"Pretending to delete '%s'" % path) else: self.log(u"Deleting '%s'" % path) self.storage.delete(prefixed_path) return True def link_file(self, path, prefixed_path, source_storage): """ Attempt to link ``path`` """ # Skip this file if it was already copied earlier if prefixed_path in self.symlinked_files: return self.log(u"Skipping '%s' (already linked earlier)" % path) # Delete the target file if needed or break if not self.delete_file(path, prefixed_path, source_storage): return # The full path of the source file source_path = source_storage.path(path) # Finally link the file if self.dry_run: self.log(u"Pretending to link '%s'" % source_path, level=1) else: self.log(u"Linking '%s'" % source_path, level=1) full_path = self.storage.path(prefixed_path) try: os.makedirs(os.path.dirname(full_path)) except OSError: pass os.symlink(source_path, full_path) if prefixed_path not in self.symlinked_files: self.symlinked_files.append(prefixed_path) def copy_file(self, path, prefixed_path, source_storage): """ Attempt to copy ``path`` with storage """ # Skip this file if it was already copied earlier if prefixed_path in self.copied_files: return self.log(u"Skipping '%s' (already copied earlier)" % path) # Delete the target file if needed or break if not self.delete_file(path, prefixed_path, source_storage): return # The full path of the source file source_path = source_storage.path(path) # Finally start copying if self.dry_run: self.log(u"Pretending to copy '%s'" % source_path, level=1) else: self.log(u"Copying '%s'" % source_path, level=1) if self.local: full_path = self.storage.path(prefixed_path) try: os.makedirs(os.path.dirname(full_path)) except OSError: pass with source_storage.open(path) as source_file: self.storage.save(prefixed_path, source_file) if not prefixed_path in self.copied_files: self.copied_files.append(prefixed_path)
bsd-3-clause
HaloExchange/HaloBitcoin
contrib/spendfrom/spendfrom.py
1
10215
#!/usr/bin/env python # # Use the raw transactions API to spend halobitcoins received on particular addresses, # and send any change back to that same address. # # Example usage: # spendfrom.py # Lists available funds # spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00 # # Assumes it will talk to a halobitcoind or Bitcoin-Qt running # on localhost. # # Depends on jsonrpc # from decimal import * import getpass import math import os import os.path import platform import sys import time from jsonrpc import ServiceProxy, json BASE_FEE=Decimal("0.001") def check_json_precision(): """Make sure json library being used does not lose precision converting HBTC values""" n = Decimal("20000000.00000003") satoshis = int(json.loads(json.dumps(float(n)))*1.0e8) if satoshis != 2000000000000003: raise RuntimeError("JSON encode/decode loses precision") def determine_db_dir(): """Return the default location of the halobitcoin data directory""" if platform.system() == "Darwin": return os.path.expanduser("~/Library/Application Support/HaloBitcoin/") elif platform.system() == "Windows": return os.path.join(os.environ['APPDATA'], "HaloBitcoin") return os.path.expanduser("~/.halobitcoin") def read_halobitcoin_config(dbdir): """Read the halobitcoin.conf file from dbdir, returns dictionary of settings""" from ConfigParser import SafeConfigParser class FakeSecHead(object): def __init__(self, fp): self.fp = fp self.sechead = '[all]\n' def readline(self): if self.sechead: try: return self.sechead finally: self.sechead = None else: s = self.fp.readline() if s.find('#') != -1: s = s[0:s.find('#')].strip() +"\n" return s config_parser = SafeConfigParser() config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "halobitcoin.conf")))) return dict(config_parser.items("all")) def connect_JSON(config): """Connect to a halobitcoin JSON-RPC server""" testnet = config.get('testnet', '0') testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False if not 'rpcport' in config: config['rpcport'] = 18332 if testnet else 8332 connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport']) try: result = ServiceProxy(connect) # ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors, # but also make sure the halobitcoind we're talking to is/isn't testnet: if result.getmininginfo()['testnet'] != testnet: sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n") sys.exit(1) return result except: sys.stderr.write("Error connecting to RPC server at "+connect+"\n") sys.exit(1) def unlock_wallet(halobitcoind): info = halobitcoind.getinfo() if 'unlocked_until' not in info: return True # wallet is not encrypted t = int(info['unlocked_until']) if t <= time.time(): try: passphrase = getpass.getpass("Wallet is locked; enter passphrase: ") halobitcoind.walletpassphrase(passphrase, 5) except: sys.stderr.write("Wrong passphrase\n") info = halobitcoind.getinfo() return int(info['unlocked_until']) > time.time() def list_available(halobitcoind): address_summary = dict() address_to_account = dict() for info in halobitcoind.listreceivedbyaddress(0): address_to_account[info["address"]] = info["account"] unspent = halobitcoind.listunspent(0) for output in unspent: # listunspent doesn't give addresses, so: rawtx = halobitcoind.getrawtransaction(output['txid'], 1) vout = rawtx["vout"][output['vout']] pk = vout["scriptPubKey"] # This code only deals with ordinary pay-to-halobitcoin-address # or pay-to-script-hash outputs right now; anything exotic is ignored. if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash": continue address = pk["addresses"][0] if address in address_summary: address_summary[address]["total"] += vout["value"] address_summary[address]["outputs"].append(output) else: address_summary[address] = { "total" : vout["value"], "outputs" : [output], "account" : address_to_account.get(address, "") } return address_summary def select_coins(needed, inputs): # Feel free to improve this, this is good enough for my simple needs: outputs = [] have = Decimal("0.0") n = 0 while have < needed and n < len(inputs): outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]}) have += inputs[n]["amount"] n += 1 return (outputs, have-needed) def create_tx(halobitcoind, fromaddresses, toaddress, amount, fee): all_coins = list_available(halobitcoind) total_available = Decimal("0.0") needed = amount+fee potential_inputs = [] for addr in fromaddresses: if addr not in all_coins: continue potential_inputs.extend(all_coins[addr]["outputs"]) total_available += all_coins[addr]["total"] if total_available < needed: sys.stderr.write("Error, only %f HBTC available, need %f\n"%(total_available, needed)); sys.exit(1) # # Note: # Python's json/jsonrpc modules have inconsistent support for Decimal numbers. # Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode # Decimals, I'm casting amounts to float before sending them to halobitcoind. # outputs = { toaddress : float(amount) } (inputs, change_amount) = select_coins(needed, potential_inputs) if change_amount > BASE_FEE: # don't bother with zero or tiny change change_address = fromaddresses[-1] if change_address in outputs: outputs[change_address] += float(change_amount) else: outputs[change_address] = float(change_amount) rawtx = halobitcoind.createrawtransaction(inputs, outputs) signed_rawtx = halobitcoind.signrawtransaction(rawtx) if not signed_rawtx["complete"]: sys.stderr.write("signrawtransaction failed\n") sys.exit(1) txdata = signed_rawtx["hex"] return txdata def compute_amount_in(halobitcoind, txinfo): result = Decimal("0.0") for vin in txinfo['vin']: in_info = halobitcoind.getrawtransaction(vin['txid'], 1) vout = in_info['vout'][vin['vout']] result = result + vout['value'] return result def compute_amount_out(txinfo): result = Decimal("0.0") for vout in txinfo['vout']: result = result + vout['value'] return result def sanity_test_fee(halobitcoind, txdata_hex, max_fee): class FeeError(RuntimeError): pass try: txinfo = halobitcoind.decoderawtransaction(txdata_hex) total_in = compute_amount_in(halobitcoind, txinfo) total_out = compute_amount_out(txinfo) if total_in-total_out > max_fee: raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out)) tx_size = len(txdata_hex)/2 kb = tx_size/1000 # integer division rounds down if kb > 1 and fee < BASE_FEE: raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes") if total_in < 0.01 and fee < BASE_FEE: raise FeeError("Rejecting no-fee, tiny-amount transaction") # Exercise for the reader: compute transaction priority, and # warn if this is a very-low-priority transaction except FeeError as err: sys.stderr.write((str(err)+"\n")) sys.exit(1) def main(): import optparse parser = optparse.OptionParser(usage="%prog [options]") parser.add_option("--from", dest="fromaddresses", default=None, help="addresses to get halobitcoins from") parser.add_option("--to", dest="to", default=None, help="address to get send halobitcoins to") parser.add_option("--amount", dest="amount", default=None, help="amount to send") parser.add_option("--fee", dest="fee", default="0.0", help="fee to include") parser.add_option("--datadir", dest="datadir", default=determine_db_dir(), help="location of halobitcoin.conf file with RPC username/password (default: %default)") parser.add_option("--testnet", dest="testnet", default=False, action="store_true", help="Use the test network") parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true", help="Don't broadcast the transaction, just create and print the transaction data") (options, args) = parser.parse_args() check_json_precision() config = read_halobitcoin_config(options.datadir) if options.testnet: config['testnet'] = True halobitcoind = connect_JSON(config) if options.amount is None: address_summary = list_available(halobitcoind) for address,info in address_summary.iteritems(): n_transactions = len(info['outputs']) if n_transactions > 1: print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions)) else: print("%s %.8f %s"%(address, info['total'], info['account'])) else: fee = Decimal(options.fee) amount = Decimal(options.amount) while unlock_wallet(halobitcoind) == False: pass # Keep asking for passphrase until they get it right txdata = create_tx(halobitcoind, options.fromaddresses.split(","), options.to, amount, fee) sanity_test_fee(halobitcoind, txdata, amount*Decimal("0.01")) if options.dry_run: print(txdata) else: txid = halobitcoind.sendrawtransaction(txdata) print(txid) if __name__ == '__main__': main()
mit
spinda/liquidhaskell
benchmarks/vector-algorithms-0.5.4.2/count.py
24
1603
#!/usr/bin/python # used by count.sh import re import sys import string fname = sys.argv[1] str = (open(fname, 'r')).read() #measures = [(str[a.start():(3+string.find(str,"@-}", a.start()))]) for a in list(re.finditer('{-@ measure', str)) ] other = [(str[a.start():(3+string.find(str,"@-}", a.start()))]) for a in list(re.finditer('{-@ (type|measure|data|include|predicate|Decrease|Strict)', str)) ] qualifs = [(str[a.start():(3+string.find(str,"@-}", a.start()))]) for a in list(re.finditer('{-@ qualif', str)) ] tyspecs = [(str[a.start():(3+string.find(str,"@-}", a.start()))]) for a in list(re.finditer('{-@ (?!(type|measure|data|include|predicate|qualif|Decrease|Strict))', str)) ] #print measures #print tyspecs #print other #print "Measures :\t\t count = %d \t chars = %d \t lines = %d" %(len(measures), sum(map(lambda x:len(x), measures)), sum(map(lambda x:(1+x.count('\n')), measures))) print "Type specifications:\t\t count = %d \t lines = %d" %(len(tyspecs), sum(map(lambda x:(1+x.count('\n')), tyspecs))) print "Qualifiers :\t\t count = %d \t lines = %d" %(len(qualifs), sum(map(lambda x:(1+x.count('\n')), qualifs))) print "Other Annotations :\t\t count = %d \t lines = %d" %(len(other), sum(map(lambda x:(1+x.count('\n')), other))) ftyspec = open('_'.join(["tyspec", fname.replace('/','_'), ".txt"]), 'w') fother = open('_'.join(["other", fname.replace('/','_'), ".txt"]), 'w') #tmp.write("TYSPECS\n\n") tyspecsJoined = '\n'.join(tyspecs) ftyspec.write(tyspecsJoined) #tmp.write("\n\nOTHER\n\n") otherJoined = '\n'.join(other) fother.write(otherJoined)
bsd-3-clause
Celelibi/syslinux
com32/cmenu/menugen.py
20
10693
#!/usr/bin/env python import sys, re, getopt class Menusystem: types = {"run" : "OPT_RUN", "inactive" : "OPT_INACTIVE", "checkbox" : "OPT_CHECKBOX", "radiomenu": "OPT_RADIOMENU", "sep" : "OPT_SEP", "invisible": "OPT_INVISIBLE", "radioitem": "OPT_RADIOITEM", "exitmenu" : "OPT_EXITMENU", "login" : "login", # special type "submenu" : "OPT_SUBMENU"} entry_init = { "item" : "", "info" : "", "data" : "", "ipappend" : 0, # flag to send in case of PXELINUX "helpid" : 65535, # 0xFFFF "shortcut":"-1", "state" : 0, # initial state of checkboxes "argsmenu": "", # name of menu containing arguments "perms" : "", # permission required to execute this entry "_updated" : None, # has this dictionary been updated "type" : "run" } menu_init = { "title" : "", "row" : "0xFF", # let system decide position "col" : "0xFF", "_updated" : None, "name" : "" } system_init ={ "videomode" : "0xFF", "title" : "Menu System", "top" : "1", "left" : "1" , "bot" : "21", "right":"79", "helpdir" : "/isolinux/help", "pwdfile" : "", "pwdrow" : "23", "editrow" : "23", "skipcondn" : "0", "skipcmd" : ".exit", "startfile": "", "onerrorcmd":".repeat", "exitcmd" : ".exit", "exitcmdroot" : "", "timeout" : "600", "timeoutcmd":".beep", "totaltimeout" : "0", "totaltimeoutcmd" : ".wait" } shift_flags = { "alt" : "ALT_PRESSED", "ctrl" : "CTRL_PRESSED", "shift": "SHIFT_PRESSED", "caps" : "CAPSLOCK_ON", "num" : "NUMLOCK_ON", "ins" : "INSERT_ON" } reqd_templates = ["item","login","menu","system"] def __init__(self,template): self.state = "system" self.code_template_filename = template self.menus = [] self.init_entry() self.init_menu() self.init_system() self.vtypes = " OR ".join(list(self.types.keys())) self.vattrs = " OR ".join([x for x in list(self.entry.keys()) if x[0] != "_"]) self.mattrs = " OR ".join([x for x in list(self.menu.keys()) if x[0] != "_"]) def init_entry(self): self.entry = self.entry_init.copy() def init_menu(self): self.menu = self.menu_init.copy() def init_system(self): self.system = self.system_init.copy() def add_menu(self,name): self.add_item() self.init_menu() self.menu["name"] = name self.menu["_updated"] = 1 self.menus.append( (self.menu,[]) ) def add_item(self): if self.menu["_updated"]: # menu details have changed self.menus[-1][0].update(self.menu) self.init_menu() if self.entry["_updated"]: if not self.entry["info"]: self.entry["info"] = self.entry["data"] if not self.menus: print("Error before line %d" % self.lineno) print("REASON: menu must be declared before a menu item is declared") sys.exit(1) self.menus[-1][1].append(self.entry) self.init_entry() def set_item(self,name,value): if name not in self.entry: msg = ["Unknown attribute %s in line %d" % (name,self.lineno)] msg.append("REASON: Attribute must be one of %s" % self.vattrs) return "\n".join(msg) if name=="type" and value not in self.types: msg = [ "Unrecognized type %s in line %d" % (value,self.lineno)] msg.append("REASON: Valid types are %s" % self.vtypes) return "\n".join(msg) if name=="shortcut": if (value != "-1") and not re.match("^[A-Za-z0-9]$",value): msg = [ "Invalid shortcut char '%s' in line %d" % (value,self.lineno) ] msg.append("REASON: Valid values are [A-Za-z0-9]") return "\n".join(msg) elif value != "-1": value = "'%s'" % value elif name in ["state","helpid","ipappend"]: try: value = int(value) except: return "Value of %s in line %d must be an integer" % (name,self.lineno) self.entry[name] = value self.entry["_updated"] = 1 return "" def set_menu(self,name,value): if name not in self.menu: return "Error: Unknown keyword %s" % name self.menu[name] = value self.menu["_updated"] = 1 return "" def set_system(self,name,value): if name not in self.system: return "Error: Unknown keyword %s" % name if name == "skipcondn": try: # is skipcondn a number? a = int(value) except: # it is a "-" delimited sequence value = value.lower() parts = [ self.shift_flags.get(x.strip(),None) for x in value.split("-") ] self.system["skipcondn"] = " | ".join([_f for _f in parts if _f]) else: self.system[name] = value def set(self,name,value): # remove quotes if given if (value[0] == value[-1]) and (value[0] in ['"',"'"]): # remove quotes value = value[1:-1] if self.state == "system": err = self.set_system(name,value) if not err: return if self.state == "menu": err = self.set_menu(name,value) # change state to entry it menu returns error if err: err = None self.state = "item" if self.state == "item": err = self.set_item(name,value) if not err: return # all errors so return item's error message print(err) sys.exit(1) def print_entry(self,entry,fd): entry["type"] = self.types[entry["type"]] if entry["type"] == "login": #special type fd.write(self.templates["login"] % entry) else: fd.write(self.templates["item"] % entry) def print_menu(self,menu,fd): if menu["name"] == "main": self.foundmain = 1 fd.write(self.templates["menu"] % menu) if (menu["row"] != "0xFF") or (menu["col"] != "0xFF"): fd.write(' set_menu_pos(%(row)s,%(col)s);\n' % menu) def output(self,filename): curr_template = None contents = [] self.templates = {} regbeg = re.compile(r"^--(?P<name>[a-z]+) BEGINS?--\n$") regend = re.compile(r"^--[a-z]+ ENDS?--\n$") ifd = open(self.code_template_filename,"r") for line in ifd.readlines(): b = regbeg.match(line) e = regend.match(line) if e: # end of template if curr_template: self.templates[curr_template] = "".join(contents) curr_template = None continue if b: curr_template = b.group("name") contents = [] continue if not curr_template: continue # lines between templates are ignored contents.append(line) ifd.close() missing = None for x in self.reqd_templates: if x not in self.templates: missing = x if missing: print("Template %s required but not defined in %s" % (missing,self.code_template_filename)) if filename == "-": fd = sys.stdout else: fd = open(filename,"w") self.foundmain = None fd.write(self.templates["header"]) fd.write(self.templates["system"] % self.system) for (menu,items) in self.menus: self.print_menu(menu,fd) for entry in items: self.print_entry(entry,fd) fd.write(self.templates["footer"]) fd.close() if not self.foundmain: print("main menu not found") print(self.menus) sys.exit(1) def input(self,filename): if filename == "-": fd = sys.stdin else: fd = open(filename,"r") self.lineno = 0 self.state = "system" for line in fd.readlines(): self.lineno = self.lineno + 1 if line and line[-1] in ["\r","\n"]: line = line[:-1] if line and line[-1] in ["\r","\n"]: line = line[:-1] line = line.strip() if line and line[0] in ["#",";"]: continue try: # blank line -> starting a new entry if not line: if self.state == "item": self.add_item() continue # starting a new section? if line[0] == "[" and line[-1] == "]": self.state = "menu" self.add_menu(line[1:-1]) continue # add property of current entry pos = line.find("=") # find the first = in string if pos < 0: print("Syntax error in line %d" % self.lineno) print("REASON: non-section lines must be of the form ATTRIBUTE=VALUE") sys.exit(1) attr = line[:pos].strip().lower() value = line[pos+1:].strip() self.set(attr,value) except: print("Error while parsing line %d: %s" % (self.lineno,line)) raise fd.close() self.add_item() def usage(): print(sys.argv[0]," [options]") print("--input=<file> is the name of the .menu file declaring the menu structure") print("--output=<file> is the name of generated C source") print("--template=<file> is the name of template to be used") print() print("input and output default to - (stdin and stdout respectively)") print("template defaults to adv_menu.tpl") sys.exit(1) def main(): tfile = "adv_menu.tpl" ifile = "-" ofile = "-" opts,args = getopt.getopt(sys.argv[1:], "hi:o:t:",["input=","output=","template=","help"]) if args: print("Unknown options %s" % args) usage() for o,a in opts: if o in ["-i","--input"]: ifile = a elif o in ["-o", "--output"]: ofile = a elif o in ["-t","--template"]: tfile = a elif o in ["-h","--help"]: usage() inst = Menusystem(tfile) inst.input(ifile) inst.output(ofile) if __name__ == "__main__": main()
gpl-2.0
Mauricio3000/cloth_snapshot
tests/test_snapshot.py
1
4128
import tempfile import unittest from datetime import datetime import os import pymel.core as pm from pymel.core.runtime import nClothCreate from mock import patch import tool.snapshot as ss class Test_snapshot(unittest.TestCase): def setUp(self): # New scene pm.newFile(f=True) # Get ncloth attributes from file self.ncloth_attrs = [] self.ncloth_attrsFile = os.path.dirname(__file__).\ replace('tests', 'tool') self.ncloth_attrsFile = self.ncloth_attrsFile + \ os.sep + 'ncloth_attrs.txt' if not os.path.exists(self.ncloth_attrsFile): msg = 'File not found: ', self.ncloth_attrsFile raise Exception(msg) f = open(self.ncloth_attrsFile, 'r') self.ncloth_attrs = f.readlines() f.close() # Get nucleus attributes from file self.nucleus_attrs = [] self.nucleus_attrsFile = os.path.dirname(__file__).\ replace('tests', 'tool') self.nucleus_attrsFile = self.nucleus_attrsFile + \ os.sep + 'nucleus_attrs.txt' if not os.path.exists(self.nucleus_attrsFile): msg = 'File not found: ', self.nucleus_attrsFile raise Exception(msg) f = open(self.nucleus_attrsFile, 'r') self.nucleus_attrs = f.readlines() f.close() # Create plane and ncloth self.plane = pm.polyPlane()[0] pm.select(self.plane, r=1) nClothCreate() self.ncloth = pm.PyNode('nCloth1') self.nucleus = pm.PyNode('nucleus1') # Create expected name now = datetime.now() self.name = self.ncloth.name() + '_' + \ str(now.year) + '-' + str(now.month) + '-' + \ str(now.day) + '_' + \ str(now.hour) + '-' + str(now.minute) def test_make_dict_ncloth(self): d = ss.make_dict(node=self.ncloth, typ='ncloth') self.assertEqual(len(d.keys()), len(self.ncloth_attrs)) keys = d.keys() keys.sort() self.ncloth_attrs.sort() for k, a in zip(keys, self.ncloth_attrs): self.assertEqual(k, a.strip()) def test_make_dict_nucleus(self): d = ss.make_dict(node=self.nucleus, typ='nucleus') self.assertEqual(len(d.keys()), len(self.nucleus_attrs)) keys = d.keys() keys.sort() self.nucleus_attrs.sort() for k, a in zip(keys, self.nucleus_attrs): self.assertEqual(k, a.strip()) def test_gen_filename(self): name = ss.gen_filename(node=self.ncloth) self.assertEqual(name, (self.name + '.txt')) def test_make_file(self): tempPath = tempfile.mkdtemp() d = {'a': 1, 'b': 'z'} name = str(tempPath + os.sep + self.name + '.txt') expected = ['a:1:int', 'b:z:str', '}'] ss.make_file(name=name, data=d) self.assertTrue(os.path.exists(name)) f = open(name, 'r') written = f.readlines() f.close() self.assertTrue(written, expected) @patch('tool.snapshot.make_file') @patch('tool.snapshot.gen_filename') @patch('tool.snapshot.make_dict') def test_take_snapshot(self, dict_mock, gen_mock, file_mock): path = '/foo/bar' d = {'attr': 1} dict_mock.return_value = d gen_mock.return_value = 'biz' pm.select(self.ncloth, self.nucleus, r=1) results = ss.take_snapshot(path=path) expected = ['Created: /foo/bar/biz', 'Created: /foo/bar/biz'] self.assertEquals(expected, results) dict_mock.assert_any_call(self.ncloth, 'ncloth') gen_mock.assert_any_call(self.ncloth) file_mock.assert_any_call('/foo/bar/biz', d) dict_mock.assert_any_call(self.nucleus, 'nucleus') gen_mock.assert_any_call(self.nucleus) file_mock.assert_any_call('/foo/bar/biz', d) def suite(): return unittest.TestLoader().loadTestsFromTestCase(Test_snapshot) if __name__ == '__main__': unittest.__main__()
gpl-3.0
IHTSDO/snomed-database-loader
NEO4J/snomed_g_graphdb_build_tools.py
2
21190
#!/usr/bin/python from __future__ import print_function import csv, optparse, datetime, json, sys, re, os, base64, errno, io, sqlite3, subprocess from subprocess import PIPE import snomed_g_lib_rf2, snomed_g_lib_neo4j, snomedct_constants ''' Module: snomed_g_graphdb_build_tools.py Author: Jay Pedersen, July 2016 Purpose: Driver program that either creates or updates a SNOMED_G graph database. Syntax and Semantics: python <pgm> db_build --release_type delta/snapshot/full --rf2 <location> --mode build/prep ==> creates cocncept_delta_FSN table in delta.db SQLITE file ==> accesses NEO4J graph at localhost Example: python snomed_g_graphdb_build_tools.py \ db_build --release_type full --mode build --action create \ --rf2 /cygdrive/c/sno/snomedct/SnomedCT_RF2Release_US1000124_20160301 \ --neopw abcdefgh ''' # TIMING functions def timing_start(timing_d, nm): timing_d[nm] = { 'start': datetime.datetime.now() } def timing_end(timing_d, nm): timing_d[nm]['end'] = datetime.datetime.now() def show_timings(timestamps): for key in sorted(timestamps.keys()): delta = timestamps[key]['end'] - timestamps[key]['start'] print('%-35s : %s' % (key, str(delta))) if sys.platform not in ['cygwin','win32']: # Ubuntu/Mac def get_path(relpath, pathsep): return os.path.abspath(os.path.expanduser(relpath)).rstrip(pathsep)+pathsep elif sys.platform == 'win32': # DOS, not cygwin, issue 'c:\\sno\\build\\us20160301_06/build.log' def get_path(relpath, pathsep): return os.path.abspath(os.path.expanduser(relpath)).replace('\\',pathsep).rstrip(pathsep)+pathsep else: # cygwin def get_path(relpath, pathsep): # Need t:/neo/data/sc_20140731/snomed_g_rconcept_... from '/cygdrive/t/neo/data/... s1 = os.path.realpath(os.path.expanduser(relpath)) # was abspath, realpath expands symlink m = re.match(r'/cygdrive/(.)/(.*)', s1) if m: # cygwin full path s = '%s:/%s' % (m.group(1),m.group(2)) if s[-1] != pathsep: s += pathsep else: print('*** unable to translate path <%s> ***', relpath); sys.exit(1) return s class StatusDb(): def __init__(self, filename="build_status.db"): self.dbfilename = filename db = sqlite3.connect(self.dbfilename) c = db.cursor() c.execute('CREATE TABLE IF NOT EXISTS seq (name text primary key, nextval integer)') try: c.execute('''insert into seq values ('BUILD', 0)''') except: pass else: print('sequence did not exist, primed') c.execute('CREATE TABLE IF NOT EXISTS build (\ seq INTEGER, \ step TEXT, \ command TEXT, \ result TEXT, \ status INTEGER, \ seconds INTEGER, \ output TEXT, \ error TEXT, \ start TEXT, \ end TEXT \ )' ) db.commit() c.close() db.close() # keep db closed most of the time def get_next_sequence_number(self): # obtain sequence number db = sqlite3.connect(self.dbfilename) c = db.cursor() c.execute("update seq set nextval=nextval+1 where name='BUILD'") db.commit() nextval_list = c.execute("select nextval from seq where name='BUILD'").fetchall() return nextval_list[0][0] # the next sequence number def add_record(self, seq, step, command, result, status, seconds, output, error, start, end): db = sqlite3.connect(self.dbfilename) c = db.cursor() # insert into build table c.execute('INSERT INTO build(seq, step, command, result, status, seconds, output, error, start, end) \ VALUES(?,?,?,?,?,?,?,?,?,?)', (seq, step, command, result, status, seconds, output, error, str(start), str(end))) db.commit() c.close() db.close() # keep db closed most of the time class save_and_report_results(): def __init__(self, DB, seqnum, stepnames, results_d, logfile): # conststructor def set_step_variables(stepname): self.result_s = self.results_d[stepname].get('result','<NA>') self.status = self.results_d[stepname].get('status',-100) self.expected_status = self.results_d[stepname].get('expected_status',0) self.elapsed_time = self.results_d[stepname].get('elapsed_time',-1) self.seconds = self.elapsed_time.seconds # could use .total_seconds(), eg: 7.193 versus 7 self.output = self.results_d[stepname].get('STDOUT','') if sys.version_info[0]==2: self.output.decode('utf-8') # py2.7 support, py3 all string processing is unicode self.error = self.results_d[stepname].get('STDERR','') if sys.version_info[0]==2: self.error.decode('utf-8') # py2.7 support self.cmd_start_s = str(self.results_d[stepname].get('cmd_start','<NI>')) self.cmd_end_s = str(self.results_d[stepname].get('cmd_end','<NI>')) self.command = self.results_d[stepname].get('command','<NI>') # end set_step_variables # constructor: self.DB = DB # StatusDb object (sqlite3 database) self.seqnum = seqnum # Backup sequence number, first backup ever is #1, second #2, etc self.results_d = results_d self.logfile = logfile self.stepnames = [x for x in stepnames if x in self.results_d] self.procedure_worked = all(self.results_d[stepname]['result'] == 'SUCCESS' for stepname in self.stepnames) self.failed_steps = [x for x in self.stepnames if self.results_d[x]['result'] != 'SUCCESS'] # Get to work -- Write result to DB, see if everything worked for stepname in self.stepnames: set_step_variables(stepname) # Write the status to the database self.DB.add_record(self.seqnum, stepname, self.command, self.result_s, self.status, self.seconds, self.output, self.error, self.cmd_start_s, self.cmd_end_s) # SUMMARY display print(file=self.logfile) for f in [self.logfile, sys.stdout]: print('RESULT: %s' % 'SUCCESS' if self.procedure_worked else 'FAILED (steps: %s)' % str(self.failed_steps), file=f) print(file=self.logfile) print('SUMMARY:', file=self.logfile) print(file=self.logfile) for stepname in self.stepnames: set_step_variables(stepname) print('%-25s : %-25s, duration:%s' % (stepname, self.result_s, str(self.elapsed_time)), file=self.logfile) # DETAIL display print(file=self.logfile) print('DETAILS:', file=self.logfile) print(file=self.logfile) print('Backup sequence number: %d' % self.seqnum, file=self.logfile) for stepname in self.stepnames: set_step_variables(stepname) print('step:[%s],result:[%s],command:[%s],status/expected:%d/%d,duration:%s,output:[%s],error:[%s],cmd_start:[%s],cmd_end:[%s]' % (stepname, self.result_s, self.command, self.status, self.expected_status, str(self.elapsed_time), self.output, self.error, self.cmd_start_s, self.cmd_end_s), file=self.logfile) return # DONE # END save_and_report_results class #-------------------------------------------------------------------------------| # db_build --action create --rf2 <dir> --release_type delta --neopw <pw> | #-------------------------------------------------------------------------------| def db_build(arglist): saved_pwd = os.getcwd() opt = optparse.OptionParser() opt.add_option('--rf2',action='store') opt.add_option('--release_type', action='store', dest='release_type', choices=['delta','snapshot','full']) opt.add_option('--action', action='store', default='create', choices=['create','update']) opt.add_option('--neopw64', action='store') opt.add_option('--neopw', action='store') opt.add_option('--mode', action='store', default='build', choices=['build','prep','make_csvs','run_cypher','validate']) # build is end-to-end, others are subsets opt.add_option('--logfile', action='store') opt.add_option('--output_dir', action='store', default='.') opt.add_option('--relationship_file', action='store', default='Relationship') opt.add_option('--language_code', action='store', default='en', choices=['en','en-us', 'en-US', 'en-GB']) opt.add_option('--language_name', action='store', default='Language') opt.add_option('--prep_only', action='store_true') opts, args = opt.parse_args(arglist) if not (len(args)==0 and opts.rf2 and opts.release_type and (opts.neopw or opts.neopw64)): print('Usage: db_build --rf2 <dir> --release_type snapshot/full --neopw <pw>') sys.exit(1) if opts.neopw and opts.neopw64: print('Usage db_build, only one of --neopw and --neopw64 may be specified') sys.exit(1) if opts.neopw64: # snomed_g v1.2, convert neopw64 to neopw opts.neopw = str(base64.b64decode(opts.neopw64),'utf-8') if sys.version_info[0]==3 else base64.decodestring(opts.neopw64) # py2 # file path separator pathsep = '/' # make sure output directory exists and is empty opts.output_dir = get_path(opts.output_dir, pathsep) if not (os.path.isdir(opts.output_dir) and len(os.listdir(opts.output_dir)) == 0): print('*** Output directory [%s] isn\'t empty or doesn\'t exist ***' % opts.output_dir) sys.exit(1) # make sure a Terminology folder exists in the opts.rf2 folder if not (os.path.isdir(opts.rf2) and 'Terminology' in os.listdir(opts.rf2)): print('*** The --rf2 option [%s] must specify a folder, which must contain a Terminology subfolder' % opts.rf2) sys.exit(1) # open logfile logfile = open(opts.output_dir+'build.log', 'w') if not opts.logfile else \ (sys.output if opts.logfile == '-' else open(opts.logfile, 'w')) #--------------------------------------------------------------------------- # Determine SNOMED_G bin directory, where snomed_g_rf2_tools.py exists, etal #--------------------------------------------------------------------------- # determine snomed_g_bin -- bin directory where snomed_g_rf2_tools.py exists in, etc -- try SNOMED_G_HOME, SNOMED_G_BIN env vbls # ... ask directly if these variables don't exist snomed_g_bin = os.environ.get('SNOMED_G_BIN',None) # unlikely to exist, but great if it does if not snomed_g_bin: snomed_g_home = os.environ.get('SNOMED_G_HOME',None) if snomed_g_home: snomed_g_bin = get_path(snomed_g_home, pathsep) + 'bin' else: snomed_g_bin = get_path(os.path.dirname(os.path.abspath(__file__)), pathsep) # default to python script dir validated = False while not validated: if len(snomed_g_bin)==0: snomed_g_bin = (input if sys.version_info[0]==3 else raw_input)\ ('Enter SNOMED_G bin directory path where snomed_g_rf2_tools.py exists: ').rstrip(pathsep) else: # try to validate, look for snomed_g_rf2_tools.py target_file = snomed_g_bin+pathsep+'snomed_g_rf2_tools.py' validated = os.path.isfile(target_file) if not validated: print('Cant find [%s]' % target_file); snomed_g_bin = '' snomed_g_bin = get_path(snomed_g_bin, pathsep) print('SNOMED_G bin directory [%s]' % snomed_g_bin) # db_build ==> connect to NEO4J, make sure information given is good if opts.mode=='build': neo4j = snomed_g_lib_neo4j.Neo4j_Access(opts.neopw) # Connect to RF2 files, make sure rf2 directory given is good rf2_folders = snomed_g_lib_rf2.Rf2_Folders(opts.rf2, opts.release_type, opts.relationship_file, opts.language_code) # Build # open SQLITE database DB = StatusDb(os.path.abspath(opts.output_dir.rstrip(pathsep)+pathsep+'build_status.db')) # create YYYYMMDD string d = datetime.datetime.now() # determine current date yyyymmdd = '%04d%02d%02d' % (d.year,d.month,d.day) job_start_datetime = datetime.datetime.now() # Commands needed to Create/Update a SNOMED_G Graph Database # NOTE: Default mode is all-operations, so JOB_START and JOB_END to not have a mode specified commands_d = { 'JOB_START' : {'stepname': 'JOB_START', 'log': 'JOB-START(action:[%s], mode:[%s], release_type:[%s], rf2:[%s], date:[%s])' \ % (opts.action, opts.mode, opts.release_type, opts.rf2, yyyymmdd)}, 'FIND_ROLENAMES': {'stepname': 'FIND_ROLENAMES', 'cmd': 'python %s/snomed_g_rf2_tools.py find_rolenames --release_type %s --rf2 %s --language_code %s --language_name %s' \ % (snomed_g_bin, opts.release_type, opts.rf2, opts.language_code, opts.language_name), 'mode': ['build','prep','make_csvs','validate']}, 'FIND_ROLEGROUPS': {'stepname': 'FIND_ROLEGROUPS', 'cmd': 'python %s/snomed_g_rf2_tools.py find_rolegroups --release_type %s --rf2 %s --language_code %s --language_name %s' \ % (snomed_g_bin,opts.release_type,opts.rf2,opts.language_code,opts.language_name), 'mode': ['build','prep','make_csvs']}, 'MAKE_CONCEPT_CSVS': {'stepname': 'MAKE_CONCEPT_CSVS', 'cmd': 'python %s/snomed_g_rf2_tools.py make_csv --element concept --release_type %s --rf2 %s --neopw %s --action %s --relationship_file %s --language_code %s --language_name %s' \ % (snomed_g_bin, opts.release_type, opts.rf2, opts.neopw, opts.action, opts.relationship_file, opts.language_code, opts.language_name), 'mode': ['build','prep','make_csvs','validate']}, 'MAKE_DESCRIPTION_CSVS': {'stepname': 'MAKE_DESCRIPTION_CSVS', 'cmd': 'python %s/snomed_g_rf2_tools.py make_csv --element description --release_type %s --rf2 %s --neopw %s --action %s --relationship_file %s --language_code %s --language_name %s' \ % (snomed_g_bin, opts.release_type, opts.rf2, opts.neopw, opts.action, opts.relationship_file, opts.language_code, opts.language_name), 'mode': ['build','prep','make_csvs','validate']}, 'MAKE_ISA_REL_CSVS': {'stepname': 'MAKE_ISA_REL_CSVS', 'cmd': 'python %s/snomed_g_rf2_tools.py make_csv --element isa_rel --release_type %s --rf2 %s --neopw %s --action %s --relationship_file %s --language_code %s --language_name %s' \ % (snomed_g_bin, opts.release_type, opts.rf2, opts.neopw, opts.action, opts.relationship_file, opts.language_code, opts.language_name), 'mode': ['build','prep','make_csvs','validate']}, 'MAKE_DEFINING_REL_CSVS': {'stepname': 'MAKE_DEFINING_REL_CSVS', 'cmd': 'python %s/snomed_g_rf2_tools.py make_csv --element defining_rel --release_type %s --rf2 %s --neopw %s --action %s --relationship_file %s --language_code %s --language_name %s' \ % (snomed_g_bin, opts.release_type, opts.rf2, opts.neopw, opts.action, opts.relationship_file, opts.language_code, opts.language_name), 'mode': ['build','prep','make_csvs','validate']}, 'TEMPLATE_PROCESSING': {'stepname': 'TEMPLATE_PROCESSING', 'cmd': 'python %s/snomed_g_template_tools.py instantiate %s/snomed_g_graphdb_cypher_%s.template build.cypher --rf2 %s --release_type %s' \ % (snomed_g_bin, snomed_g_bin, ('create' if opts.action=='create' else 'update'), opts.rf2, opts.release_type), 'mode': ['build','prep']}, 'CYPHER_EXECUTION': {'stepname': 'CYPHER_EXECUTION', 'cmd': 'python %s/snomed_g_neo4j_tools.py run_cypher build.cypher --verbose --neopw %s' \ % (snomed_g_bin, opts.neopw), 'mode': ['build','run_cypher']}, 'CHECK_RESULT': {'stepname': 'CHECK_RESULT', 'cmd': 'python %s/snomed_g_neo4j_tools.py run_cypher %s/snomed_g_graphdb_update_failure_check.cypher --verbose --neopw %s' \ % (snomed_g_bin, snomed_g_bin, opts.neopw), 'mode': ['build','run_cypher']}, 'JOB_END': {'stepname': 'JOB_END', 'log': 'JOB-END'} } command_list_db_build = [ commands_d[x] for x in ['JOB_START', 'FIND_ROLENAMES', 'FIND_ROLEGROUPS', 'MAKE_CONCEPT_CSVS', 'MAKE_DESCRIPTION_CSVS', 'MAKE_ISA_REL_CSVS', 'MAKE_DEFINING_REL_CSVS', 'TEMPLATE_PROCESSING', 'CYPHER_EXECUTION', 'CHECK_RESULT', 'JOB_END'] ] command_list_db_build_prep = [commands_d[x] for x in ['JOB_START', 'FIND_ROLENAMES', 'FIND_ROLEGROUPS', 'MAKE_CONCEPT_CSVS', 'MAKE_DESCRIPTION_CSVS', 'MAKE_ISA_REL_CSVS', 'MAKE_DEFINING_REL_CSVS', 'TEMPLATE_PROCESSING', 'JOB_END'] ] # OLD -- #{'stepname':'CYPHER_EXECUTION', 'cmd':'%s/neo4j-shell -localhost -file build.cypher' % neo4j_bin, 'mode':['build','run_cypher']}, command_list = command_list_db_build if not opts.prep_only else command_list_db_build_prep stepnames = [x['stepname'] for x in command_list] # list of dictionaries seqnum = DB.get_next_sequence_number() # Execute commands (BUILD) results_d = {} for command_d in command_list: # extract from tuple stepname, cmd, logmsg, expected_status, mode_requirement = \ command_d['stepname'], command_d.get('cmd',None), command_d.get('log',None), command_d.get('expected_status',0), command_d.get('mode', None) if mode_requirement and opts.mode not in mode_requirement: continue # eg: NEO4J execution only in build mode results_d[stepname] = {} cmd_start = datetime.datetime.now() if stepname!='JOB_END' else job_start_datetime # start timer status = -1 should_break = False results_d[stepname]['result'] = 'SUCCESS' # assumption of success until failure determined results_d[stepname]['expected_status'] = expected_status results_d[stepname]['command'] = cmd print(stepname) print(stepname, file=logfile) # indicate to user what step we are on if logmsg: # no command to execute in a separate process results_d[stepname]['status'] = 0 results_d[stepname]['STDOUT'] = logmsg # LOG everything after 'LOG:' output, err = '', '' else: # execute command (cmd) in subprocess print(cmd, file=logfile) try: #p = subprocess.Popen(cmd, shell=True,stdin=PIPE, stdout=PIPE, stderr=PIPE) #output, err = p.communicate(b"") #status = p.returncode cmd_as_list = cmd.split(' ') if opts.output_dir != '.': os.chdir(opts.output_dir) # move to output_dir, to start subprocess subprocess.check_call(cmd_as_list, stdout=logfile, stderr=logfile) if opts.output_dir != '.': os.chdir(saved_pwd) # get back (popd) status = 0 # if no exception -- status is zero except subprocess.CalledProcessError as e: status = e.returncode results_d[stepname]['status'] = status if status != expected_status: results_d[stepname]['result'] = 'FAILED (STATUS %d)' % status should_break = True pass # might be fine, should_break controls termination except: # NOTE: result defaulted to -1 above results_d[stepname]['result'] = 'EXCEPTION occured -- on step [%s], cmd [%s]' % (stepname,cmd) should_break = True pass else: # no exception results_d[stepname]['status'] = status if status != expected_status: results_d[stepname]['result'] = 'FAILED (STATUS %d)' % status should_break = True # no steps are optional, terminate now # Book-keeping cmd_end = datetime.datetime.now() # stop timer results_d[stepname]['elapsed_time'] = cmd_end-cmd_start if len(output) > 0: results_d[stepname]['STDOUT'] = output.replace('\n','<EOL>') if len(err) > 0: results_d[stepname]['STDERR'] = err.replace('\n','<EOL>') results_d[stepname]['cmd_start'] = cmd_start results_d[stepname]['cmd_end'] = cmd_end if should_break: break # Write results to the database save_and_report_results(DB, seqnum, stepnames, results_d, logfile) # Done sys.exit(0) # END db_build #----------------------------------------------------------------------------| # MAIN | #----------------------------------------------------------------------------| def parse_and_interpret(arglist): command_interpreters = [('db_build',db_build)] command_names = [x[0] for x in command_interpreters] if len(arglist) < 1: print('Usage: python <cmd> %s ...' % '[one of %s]' % ','.join(command_names)); sys.exit(1) # DEMAND that arglist[0] be one of the sub-commands command_name = arglist[0] try: command_index = command_names.index(command_name) except: print('Usage : python <cmd> %s ...' % '[one of %s]' % ','.join(command_names)); sys.exit(1) command_interpreters[command_index][1](arglist[1:]) # call appropriate interpreter # MAIN parse_and_interpret(sys.argv[1:]) # causes sub-command processing to occur as well sys.exit(0)
mit
andim/scipy
scipy/signal/_max_len_seq.py
47
3843
# Author: Eric Larson # 2014 """Tools for MLS generation""" import numpy as np from ._max_len_seq_inner import _max_len_seq_inner __all__ = ['max_len_seq'] # These are definitions of linear shift register taps for use in max_len_seq() _mls_taps = {2: [1], 3: [2], 4: [3], 5: [3], 6: [5], 7: [6], 8: [7, 6, 1], 9: [5], 10: [7], 11: [9], 12: [11, 10, 4], 13: [12, 11, 8], 14: [13, 12, 2], 15: [14], 16: [15, 13, 4], 17: [14], 18: [11], 19: [18, 17, 14], 20: [17], 21: [19], 22: [21], 23: [18], 24: [23, 22, 17], 25: [22], 26: [25, 24, 20], 27: [26, 25, 22], 28: [25], 29: [27], 30: [29, 28, 7], 31: [28], 32: [31, 30, 10]} def max_len_seq(nbits, state=None, length=None, taps=None): """ Maximum length sequence (MLS) generator. Parameters ---------- nbits : int Number of bits to use. Length of the resulting sequence will be ``(2**nbits) - 1``. Note that generating long sequences (e.g., greater than ``nbits == 16``) can take a long time. state : array_like, optional If array, must be of length ``nbits``, and will be cast to binary (bool) representation. If None, a seed of ones will be used, producing a repeatable representation. If ``state`` is all zeros, an error is raised as this is invalid. Default: None. length : int | None, optional Number of samples to compute. If None, the entire length ``(2**nbits) - 1`` is computed. taps : array_like, optional Polynomial taps to use (e.g., ``[7, 6, 1]`` for an 8-bit sequence). If None, taps will be automatically selected (for up to ``nbits == 32``). Returns ------- seq : array Resulting MLS sequence of 0's and 1's. state : array The final state of the shift register. Notes ----- The algorithm for MLS generation is generically described in: https://en.wikipedia.org/wiki/Maximum_length_sequence The default values for taps are specifically taken from the first option listed for each value of ``nbits`` in: http://www.newwaveinstruments.com/resources/articles/ m_sequence_linear_feedback_shift_register_lfsr.htm .. versionadded:: 0.15.0 """ if taps is None: if nbits not in _mls_taps: known_taps = np.array(list(_mls_taps.keys())) raise ValueError('nbits must be between %s and %s if taps is None' % (known_taps.min(), known_taps.max())) taps = np.array(_mls_taps[nbits], np.intp) else: taps = np.unique(np.array(taps, np.intp))[::-1] if np.any(taps < 0) or np.any(taps > nbits) or taps.size < 1: raise ValueError('taps must be non-empty with values between ' 'zero and nbits (inclusive)') taps = np.ascontiguousarray(taps) # needed for Cython n_max = (2**nbits) - 1 if length is None: length = n_max else: length = int(length) if length < 0: raise ValueError('length must be greater than or equal to 0') # We use int8 instead of bool here because numpy arrays of bools # don't seem to work nicely with Cython if state is None: state = np.ones(nbits, dtype=np.int8, order='c') else: # makes a copy if need be, ensuring it's 0's and 1's state = np.array(state, dtype=bool, order='c').astype(np.int8) if state.ndim != 1 or state.size != nbits: raise ValueError('state must be a 1-dimensional array of size nbits') if np.all(state == 0): raise ValueError('state must not be all zeros') seq = np.empty(length, dtype=np.int8, order='c') state = _max_len_seq_inner(taps, state, nbits, length, seq) return seq, state
bsd-3-clause
munnerz/CouchPotatoServer
libs/html5lib/treeadapters/sax.py
1835
1661
from __future__ import absolute_import, division, unicode_literals from xml.sax.xmlreader import AttributesNSImpl from ..constants import adjustForeignAttributes, unadjustForeignAttributes prefix_mapping = {} for prefix, localName, namespace in adjustForeignAttributes.values(): if prefix is not None: prefix_mapping[prefix] = namespace def to_sax(walker, handler): """Call SAX-like content handler based on treewalker walker""" handler.startDocument() for prefix, namespace in prefix_mapping.items(): handler.startPrefixMapping(prefix, namespace) for token in walker: type = token["type"] if type == "Doctype": continue elif type in ("StartTag", "EmptyTag"): attrs = AttributesNSImpl(token["data"], unadjustForeignAttributes) handler.startElementNS((token["namespace"], token["name"]), token["name"], attrs) if type == "EmptyTag": handler.endElementNS((token["namespace"], token["name"]), token["name"]) elif type == "EndTag": handler.endElementNS((token["namespace"], token["name"]), token["name"]) elif type in ("Characters", "SpaceCharacters"): handler.characters(token["data"]) elif type == "Comment": pass else: assert False, "Unknown token type" for prefix, namespace in prefix_mapping.items(): handler.endPrefixMapping(prefix) handler.endDocument()
gpl-3.0
pprett/scikit-learn
sklearn/preprocessing/label.py
28
28237
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Mathieu Blondel <mathieu@mblondel.org> # Olivier Grisel <olivier.grisel@ensta.org> # Andreas Mueller <amueller@ais.uni-bonn.de> # Joel Nothman <joel.nothman@gmail.com> # Hamzeh Alsalhi <ha258@cornell.edu> # License: BSD 3 clause from collections import defaultdict import itertools import array import numpy as np import scipy.sparse as sp from ..base import BaseEstimator, TransformerMixin from ..utils.fixes import np_version from ..utils.fixes import sparse_min_max from ..utils.fixes import astype from ..utils.fixes import in1d from ..utils import column_or_1d from ..utils.validation import check_array from ..utils.validation import check_is_fitted from ..utils.validation import _num_samples from ..utils.multiclass import unique_labels from ..utils.multiclass import type_of_target from ..externals import six zip = six.moves.zip map = six.moves.map __all__ = [ 'label_binarize', 'LabelBinarizer', 'LabelEncoder', 'MultiLabelBinarizer', ] def _check_numpy_unicode_bug(labels): """Check that user is not subject to an old numpy bug Fixed in master before 1.7.0: https://github.com/numpy/numpy/pull/243 """ if np_version[:3] < (1, 7, 0) and labels.dtype.kind == 'U': raise RuntimeError("NumPy < 1.7.0 does not implement searchsorted" " on unicode data correctly. Please upgrade" " NumPy to use LabelEncoder with unicode inputs.") class LabelEncoder(BaseEstimator, TransformerMixin): """Encode labels with value between 0 and n_classes-1. Read more in the :ref:`User Guide <preprocessing_targets>`. Attributes ---------- classes_ : array of shape (n_class,) Holds the label for each class. Examples -------- `LabelEncoder` can be used to normalize labels. >>> from sklearn import preprocessing >>> le = preprocessing.LabelEncoder() >>> le.fit([1, 2, 2, 6]) LabelEncoder() >>> le.classes_ array([1, 2, 6]) >>> le.transform([1, 1, 2, 6]) #doctest: +ELLIPSIS array([0, 0, 1, 2]...) >>> le.inverse_transform([0, 0, 1, 2]) array([1, 1, 2, 6]) It can also be used to transform non-numerical labels (as long as they are hashable and comparable) to numerical labels. >>> le = preprocessing.LabelEncoder() >>> le.fit(["paris", "paris", "tokyo", "amsterdam"]) LabelEncoder() >>> list(le.classes_) ['amsterdam', 'paris', 'tokyo'] >>> le.transform(["tokyo", "tokyo", "paris"]) #doctest: +ELLIPSIS array([2, 2, 1]...) >>> list(le.inverse_transform([2, 2, 1])) ['tokyo', 'tokyo', 'paris'] See also -------- sklearn.preprocessing.OneHotEncoder : encode categorical integer features using a one-hot aka one-of-K scheme. """ def fit(self, y): """Fit label encoder Parameters ---------- y : array-like of shape (n_samples,) Target values. Returns ------- self : returns an instance of self. """ y = column_or_1d(y, warn=True) _check_numpy_unicode_bug(y) self.classes_ = np.unique(y) return self def fit_transform(self, y): """Fit label encoder and return encoded labels Parameters ---------- y : array-like of shape [n_samples] Target values. Returns ------- y : array-like of shape [n_samples] """ y = column_or_1d(y, warn=True) _check_numpy_unicode_bug(y) self.classes_, y = np.unique(y, return_inverse=True) return y def transform(self, y): """Transform labels to normalized encoding. Parameters ---------- y : array-like of shape [n_samples] Target values. Returns ------- y : array-like of shape [n_samples] """ check_is_fitted(self, 'classes_') y = column_or_1d(y, warn=True) classes = np.unique(y) _check_numpy_unicode_bug(classes) if len(np.intersect1d(classes, self.classes_)) < len(classes): diff = np.setdiff1d(classes, self.classes_) raise ValueError("y contains new labels: %s" % str(diff)) return np.searchsorted(self.classes_, y) def inverse_transform(self, y): """Transform labels back to original encoding. Parameters ---------- y : numpy array of shape [n_samples] Target values. Returns ------- y : numpy array of shape [n_samples] """ check_is_fitted(self, 'classes_') diff = np.setdiff1d(y, np.arange(len(self.classes_))) if diff: raise ValueError("y contains new labels: %s" % str(diff)) y = np.asarray(y) return self.classes_[y] class LabelBinarizer(BaseEstimator, TransformerMixin): """Binarize labels in a one-vs-all fashion Several regression and binary classification algorithms are available in the scikit. A simple way to extend these algorithms to the multi-class classification case is to use the so-called one-vs-all scheme. At learning time, this simply consists in learning one regressor or binary classifier per class. In doing so, one needs to convert multi-class labels to binary labels (belong or does not belong to the class). LabelBinarizer makes this process easy with the transform method. At prediction time, one assigns the class for which the corresponding model gave the greatest confidence. LabelBinarizer makes this easy with the inverse_transform method. Read more in the :ref:`User Guide <preprocessing_targets>`. Parameters ---------- neg_label : int (default: 0) Value with which negative labels must be encoded. pos_label : int (default: 1) Value with which positive labels must be encoded. sparse_output : boolean (default: False) True if the returned array from transform is desired to be in sparse CSR format. Attributes ---------- classes_ : array of shape [n_class] Holds the label for each class. y_type_ : str, Represents the type of the target data as evaluated by utils.multiclass.type_of_target. Possible type are 'continuous', 'continuous-multioutput', 'binary', 'multiclass', 'multiclass-multioutput', 'multilabel-indicator', and 'unknown'. sparse_input_ : boolean, True if the input data to transform is given as a sparse matrix, False otherwise. Examples -------- >>> from sklearn import preprocessing >>> lb = preprocessing.LabelBinarizer() >>> lb.fit([1, 2, 6, 4, 2]) LabelBinarizer(neg_label=0, pos_label=1, sparse_output=False) >>> lb.classes_ array([1, 2, 4, 6]) >>> lb.transform([1, 6]) array([[1, 0, 0, 0], [0, 0, 0, 1]]) Binary targets transform to a column vector >>> lb = preprocessing.LabelBinarizer() >>> lb.fit_transform(['yes', 'no', 'no', 'yes']) array([[1], [0], [0], [1]]) Passing a 2D matrix for multilabel classification >>> import numpy as np >>> lb.fit(np.array([[0, 1, 1], [1, 0, 0]])) LabelBinarizer(neg_label=0, pos_label=1, sparse_output=False) >>> lb.classes_ array([0, 1, 2]) >>> lb.transform([0, 1, 2, 1]) array([[1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 1, 0]]) See also -------- label_binarize : function to perform the transform operation of LabelBinarizer with fixed classes. sklearn.preprocessing.OneHotEncoder : encode categorical integer features using a one-hot aka one-of-K scheme. """ def __init__(self, neg_label=0, pos_label=1, sparse_output=False): if neg_label >= pos_label: raise ValueError("neg_label={0} must be strictly less than " "pos_label={1}.".format(neg_label, pos_label)) if sparse_output and (pos_label == 0 or neg_label != 0): raise ValueError("Sparse binarization is only supported with non " "zero pos_label and zero neg_label, got " "pos_label={0} and neg_label={1}" "".format(pos_label, neg_label)) self.neg_label = neg_label self.pos_label = pos_label self.sparse_output = sparse_output def fit(self, y): """Fit label binarizer Parameters ---------- y : array of shape [n_samples,] or [n_samples, n_classes] Target values. The 2-d matrix should only contain 0 and 1, represents multilabel classification. Returns ------- self : returns an instance of self. """ self.y_type_ = type_of_target(y) if 'multioutput' in self.y_type_: raise ValueError("Multioutput target data is not supported with " "label binarization") if _num_samples(y) == 0: raise ValueError('y has 0 samples: %r' % y) self.sparse_input_ = sp.issparse(y) self.classes_ = unique_labels(y) return self def fit_transform(self, y): """Fit label binarizer and transform multi-class labels to binary labels. The output of transform is sometimes referred to as the 1-of-K coding scheme. Parameters ---------- y : array or sparse matrix of shape [n_samples,] or \ [n_samples, n_classes] Target values. The 2-d matrix should only contain 0 and 1, represents multilabel classification. Sparse matrix can be CSR, CSC, COO, DOK, or LIL. Returns ------- Y : array or CSR matrix of shape [n_samples, n_classes] Shape will be [n_samples, 1] for binary problems. """ return self.fit(y).transform(y) def transform(self, y): """Transform multi-class labels to binary labels The output of transform is sometimes referred to by some authors as the 1-of-K coding scheme. Parameters ---------- y : array or sparse matrix of shape [n_samples,] or \ [n_samples, n_classes] Target values. The 2-d matrix should only contain 0 and 1, represents multilabel classification. Sparse matrix can be CSR, CSC, COO, DOK, or LIL. Returns ------- Y : numpy array or CSR matrix of shape [n_samples, n_classes] Shape will be [n_samples, 1] for binary problems. """ check_is_fitted(self, 'classes_') y_is_multilabel = type_of_target(y).startswith('multilabel') if y_is_multilabel and not self.y_type_.startswith('multilabel'): raise ValueError("The object was not fitted with multilabel" " input.") return label_binarize(y, self.classes_, pos_label=self.pos_label, neg_label=self.neg_label, sparse_output=self.sparse_output) def inverse_transform(self, Y, threshold=None): """Transform binary labels back to multi-class labels Parameters ---------- Y : numpy array or sparse matrix with shape [n_samples, n_classes] Target values. All sparse matrices are converted to CSR before inverse transformation. threshold : float or None Threshold used in the binary and multi-label cases. Use 0 when: - Y contains the output of decision_function (classifier) Use 0.5 when: - Y contains the output of predict_proba If None, the threshold is assumed to be half way between neg_label and pos_label. Returns ------- y : numpy array or CSR matrix of shape [n_samples] Target values. Notes ----- In the case when the binary labels are fractional (probabilistic), inverse_transform chooses the class with the greatest value. Typically, this allows to use the output of a linear model's decision_function method directly as the input of inverse_transform. """ check_is_fitted(self, 'classes_') if threshold is None: threshold = (self.pos_label + self.neg_label) / 2. if self.y_type_ == "multiclass": y_inv = _inverse_binarize_multiclass(Y, self.classes_) else: y_inv = _inverse_binarize_thresholding(Y, self.y_type_, self.classes_, threshold) if self.sparse_input_: y_inv = sp.csr_matrix(y_inv) elif sp.issparse(y_inv): y_inv = y_inv.toarray() return y_inv def label_binarize(y, classes, neg_label=0, pos_label=1, sparse_output=False): """Binarize labels in a one-vs-all fashion Several regression and binary classification algorithms are available in the scikit. A simple way to extend these algorithms to the multi-class classification case is to use the so-called one-vs-all scheme. This function makes it possible to compute this transformation for a fixed set of class labels known ahead of time. Parameters ---------- y : array-like Sequence of integer labels or multilabel data to encode. classes : array-like of shape [n_classes] Uniquely holds the label for each class. neg_label : int (default: 0) Value with which negative labels must be encoded. pos_label : int (default: 1) Value with which positive labels must be encoded. sparse_output : boolean (default: False), Set to true if output binary array is desired in CSR sparse format Returns ------- Y : numpy array or CSR matrix of shape [n_samples, n_classes] Shape will be [n_samples, 1] for binary problems. Examples -------- >>> from sklearn.preprocessing import label_binarize >>> label_binarize([1, 6], classes=[1, 2, 4, 6]) array([[1, 0, 0, 0], [0, 0, 0, 1]]) The class ordering is preserved: >>> label_binarize([1, 6], classes=[1, 6, 4, 2]) array([[1, 0, 0, 0], [0, 1, 0, 0]]) Binary targets transform to a column vector >>> label_binarize(['yes', 'no', 'no', 'yes'], classes=['no', 'yes']) array([[1], [0], [0], [1]]) See also -------- LabelBinarizer : class used to wrap the functionality of label_binarize and allow for fitting to classes independently of the transform operation """ if not isinstance(y, list): # XXX Workaround that will be removed when list of list format is # dropped y = check_array(y, accept_sparse='csr', ensure_2d=False, dtype=None) else: if _num_samples(y) == 0: raise ValueError('y has 0 samples: %r' % y) if neg_label >= pos_label: raise ValueError("neg_label={0} must be strictly less than " "pos_label={1}.".format(neg_label, pos_label)) if (sparse_output and (pos_label == 0 or neg_label != 0)): raise ValueError("Sparse binarization is only supported with non " "zero pos_label and zero neg_label, got " "pos_label={0} and neg_label={1}" "".format(pos_label, neg_label)) # To account for pos_label == 0 in the dense case pos_switch = pos_label == 0 if pos_switch: pos_label = -neg_label y_type = type_of_target(y) if 'multioutput' in y_type: raise ValueError("Multioutput target data is not supported with label " "binarization") if y_type == 'unknown': raise ValueError("The type of target data is not known") n_samples = y.shape[0] if sp.issparse(y) else len(y) n_classes = len(classes) classes = np.asarray(classes) if y_type == "binary": if n_classes == 1: if sparse_output: return sp.csr_matrix((n_samples, 1), dtype=int) else: Y = np.zeros((len(y), 1), dtype=np.int) Y += neg_label return Y elif len(classes) >= 3: y_type = "multiclass" sorted_class = np.sort(classes) if (y_type == "multilabel-indicator" and classes.size != y.shape[1]): raise ValueError("classes {0} missmatch with the labels {1}" "found in the data".format(classes, unique_labels(y))) if y_type in ("binary", "multiclass"): y = column_or_1d(y) # pick out the known labels from y y_in_classes = in1d(y, classes) y_seen = y[y_in_classes] indices = np.searchsorted(sorted_class, y_seen) indptr = np.hstack((0, np.cumsum(y_in_classes))) data = np.empty_like(indices) data.fill(pos_label) Y = sp.csr_matrix((data, indices, indptr), shape=(n_samples, n_classes)) elif y_type == "multilabel-indicator": Y = sp.csr_matrix(y) if pos_label != 1: data = np.empty_like(Y.data) data.fill(pos_label) Y.data = data else: raise ValueError("%s target data is not supported with label " "binarization" % y_type) if not sparse_output: Y = Y.toarray() Y = astype(Y, int, copy=False) if neg_label != 0: Y[Y == 0] = neg_label if pos_switch: Y[Y == pos_label] = 0 else: Y.data = astype(Y.data, int, copy=False) # preserve label ordering if np.any(classes != sorted_class): indices = np.searchsorted(sorted_class, classes) Y = Y[:, indices] if y_type == "binary": if sparse_output: Y = Y.getcol(-1) else: Y = Y[:, -1].reshape((-1, 1)) return Y def _inverse_binarize_multiclass(y, classes): """Inverse label binarization transformation for multiclass. Multiclass uses the maximal score instead of a threshold. """ classes = np.asarray(classes) if sp.issparse(y): # Find the argmax for each row in y where y is a CSR matrix y = y.tocsr() n_samples, n_outputs = y.shape outputs = np.arange(n_outputs) row_max = sparse_min_max(y, 1)[1] row_nnz = np.diff(y.indptr) y_data_repeated_max = np.repeat(row_max, row_nnz) # picks out all indices obtaining the maximum per row y_i_all_argmax = np.flatnonzero(y_data_repeated_max == y.data) # For corner case where last row has a max of 0 if row_max[-1] == 0: y_i_all_argmax = np.append(y_i_all_argmax, [len(y.data)]) # Gets the index of the first argmax in each row from y_i_all_argmax index_first_argmax = np.searchsorted(y_i_all_argmax, y.indptr[:-1]) # first argmax of each row y_ind_ext = np.append(y.indices, [0]) y_i_argmax = y_ind_ext[y_i_all_argmax[index_first_argmax]] # Handle rows of all 0 y_i_argmax[np.where(row_nnz == 0)[0]] = 0 # Handles rows with max of 0 that contain negative numbers samples = np.arange(n_samples)[(row_nnz > 0) & (row_max.ravel() == 0)] for i in samples: ind = y.indices[y.indptr[i]:y.indptr[i + 1]] y_i_argmax[i] = classes[np.setdiff1d(outputs, ind)][0] return classes[y_i_argmax] else: return classes.take(y.argmax(axis=1), mode="clip") def _inverse_binarize_thresholding(y, output_type, classes, threshold): """Inverse label binarization transformation using thresholding.""" if output_type == "binary" and y.ndim == 2 and y.shape[1] > 2: raise ValueError("output_type='binary', but y.shape = {0}". format(y.shape)) if output_type != "binary" and y.shape[1] != len(classes): raise ValueError("The number of class is not equal to the number of " "dimension of y.") classes = np.asarray(classes) # Perform thresholding if sp.issparse(y): if threshold > 0: if y.format not in ('csr', 'csc'): y = y.tocsr() y.data = np.array(y.data > threshold, dtype=np.int) y.eliminate_zeros() else: y = np.array(y.toarray() > threshold, dtype=np.int) else: y = np.array(y > threshold, dtype=np.int) # Inverse transform data if output_type == "binary": if sp.issparse(y): y = y.toarray() if y.ndim == 2 and y.shape[1] == 2: return classes[y[:, 1]] else: if len(classes) == 1: return np.repeat(classes[0], len(y)) else: return classes[y.ravel()] elif output_type == "multilabel-indicator": return y else: raise ValueError("{0} format is not supported".format(output_type)) class MultiLabelBinarizer(BaseEstimator, TransformerMixin): """Transform between iterable of iterables and a multilabel format Although a list of sets or tuples is a very intuitive format for multilabel data, it is unwieldy to process. This transformer converts between this intuitive format and the supported multilabel format: a (samples x classes) binary matrix indicating the presence of a class label. Parameters ---------- classes : array-like of shape [n_classes] (optional) Indicates an ordering for the class labels sparse_output : boolean (default: False), Set to true if output binary array is desired in CSR sparse format Attributes ---------- classes_ : array of labels A copy of the `classes` parameter where provided, or otherwise, the sorted set of classes found when fitting. Examples -------- >>> from sklearn.preprocessing import MultiLabelBinarizer >>> mlb = MultiLabelBinarizer() >>> mlb.fit_transform([(1, 2), (3,)]) array([[1, 1, 0], [0, 0, 1]]) >>> mlb.classes_ array([1, 2, 3]) >>> mlb.fit_transform([set(['sci-fi', 'thriller']), set(['comedy'])]) array([[0, 1, 1], [1, 0, 0]]) >>> list(mlb.classes_) ['comedy', 'sci-fi', 'thriller'] See also -------- sklearn.preprocessing.OneHotEncoder : encode categorical integer features using a one-hot aka one-of-K scheme. """ def __init__(self, classes=None, sparse_output=False): self.classes = classes self.sparse_output = sparse_output def fit(self, y): """Fit the label sets binarizer, storing `classes_` Parameters ---------- y : iterable of iterables A set of labels (any orderable and hashable object) for each sample. If the `classes` parameter is set, `y` will not be iterated. Returns ------- self : returns this MultiLabelBinarizer instance """ if self.classes is None: classes = sorted(set(itertools.chain.from_iterable(y))) else: classes = self.classes dtype = np.int if all(isinstance(c, int) for c in classes) else object self.classes_ = np.empty(len(classes), dtype=dtype) self.classes_[:] = classes return self def fit_transform(self, y): """Fit the label sets binarizer and transform the given label sets Parameters ---------- y : iterable of iterables A set of labels (any orderable and hashable object) for each sample. If the `classes` parameter is set, `y` will not be iterated. Returns ------- y_indicator : array or CSR matrix, shape (n_samples, n_classes) A matrix such that `y_indicator[i, j] = 1` iff `classes_[j]` is in `y[i]`, and 0 otherwise. """ if self.classes is not None: return self.fit(y).transform(y) # Automatically increment on new class class_mapping = defaultdict(int) class_mapping.default_factory = class_mapping.__len__ yt = self._transform(y, class_mapping) # sort classes and reorder columns tmp = sorted(class_mapping, key=class_mapping.get) # (make safe for tuples) dtype = np.int if all(isinstance(c, int) for c in tmp) else object class_mapping = np.empty(len(tmp), dtype=dtype) class_mapping[:] = tmp self.classes_, inverse = np.unique(class_mapping, return_inverse=True) # ensure yt.indices keeps its current dtype yt.indices = np.array(inverse[yt.indices], dtype=yt.indices.dtype, copy=False) if not self.sparse_output: yt = yt.toarray() return yt def transform(self, y): """Transform the given label sets Parameters ---------- y : iterable of iterables A set of labels (any orderable and hashable object) for each sample. If the `classes` parameter is set, `y` will not be iterated. Returns ------- y_indicator : array or CSR matrix, shape (n_samples, n_classes) A matrix such that `y_indicator[i, j] = 1` iff `classes_[j]` is in `y[i]`, and 0 otherwise. """ check_is_fitted(self, 'classes_') class_to_index = dict(zip(self.classes_, range(len(self.classes_)))) yt = self._transform(y, class_to_index) if not self.sparse_output: yt = yt.toarray() return yt def _transform(self, y, class_mapping): """Transforms the label sets with a given mapping Parameters ---------- y : iterable of iterables class_mapping : Mapping Maps from label to column index in label indicator matrix Returns ------- y_indicator : sparse CSR matrix, shape (n_samples, n_classes) Label indicator matrix """ indices = array.array('i') indptr = array.array('i', [0]) for labels in y: indices.extend(set(class_mapping[label] for label in labels)) indptr.append(len(indices)) data = np.ones(len(indices), dtype=int) return sp.csr_matrix((data, indices, indptr), shape=(len(indptr) - 1, len(class_mapping))) def inverse_transform(self, yt): """Transform the given indicator matrix into label sets Parameters ---------- yt : array or sparse matrix of shape (n_samples, n_classes) A matrix containing only 1s ands 0s. Returns ------- y : list of tuples The set of labels for each sample such that `y[i]` consists of `classes_[j]` for each `yt[i, j] == 1`. """ check_is_fitted(self, 'classes_') if yt.shape[1] != len(self.classes_): raise ValueError('Expected indicator for {0} classes, but got {1}' .format(len(self.classes_), yt.shape[1])) if sp.issparse(yt): yt = yt.tocsr() if len(yt.data) != 0 and len(np.setdiff1d(yt.data, [0, 1])) > 0: raise ValueError('Expected only 0s and 1s in label indicator.') return [tuple(self.classes_.take(yt.indices[start:end])) for start, end in zip(yt.indptr[:-1], yt.indptr[1:])] else: unexpected = np.setdiff1d(yt, [0, 1]) if len(unexpected) > 0: raise ValueError('Expected only 0s and 1s in label indicator. ' 'Also got {0}'.format(unexpected)) return [tuple(self.classes_.compress(indicators)) for indicators in yt]
bsd-3-clause
cmlasu/smm_gem5
configs/topologies/Mesh.py
47
5099
# Copyright (c) 2010 Advanced Micro Devices, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Brad Beckmann from m5.params import * from m5.objects import * from BaseTopology import SimpleTopology class Mesh(SimpleTopology): description='Mesh' def __init__(self, controllers): self.nodes = controllers # Makes a generic mesh assuming an equal number of cache and directory cntrls def makeTopology(self, options, network, IntLink, ExtLink, Router): nodes = self.nodes num_routers = options.num_cpus num_rows = options.mesh_rows # There must be an evenly divisible number of cntrls to routers # Also, obviously the number or rows must be <= the number of routers cntrls_per_router, remainder = divmod(len(nodes), num_routers) assert(num_rows <= num_routers) num_columns = int(num_routers / num_rows) assert(num_columns * num_rows == num_routers) # Create the routers in the mesh routers = [Router(router_id=i) for i in range(num_routers)] network.routers = routers # link counter to set unique link ids link_count = 0 # Add all but the remainder nodes to the list of nodes to be uniformly # distributed across the network. network_nodes = [] remainder_nodes = [] for node_index in xrange(len(nodes)): if node_index < (len(nodes) - remainder): network_nodes.append(nodes[node_index]) else: remainder_nodes.append(nodes[node_index]) # Connect each node to the appropriate router ext_links = [] for (i, n) in enumerate(network_nodes): cntrl_level, router_id = divmod(i, num_routers) assert(cntrl_level < cntrls_per_router) ext_links.append(ExtLink(link_id=link_count, ext_node=n, int_node=routers[router_id])) link_count += 1 # Connect the remainding nodes to router 0. These should only be # DMA nodes. for (i, node) in enumerate(remainder_nodes): assert(node.type == 'DMA_Controller') assert(i < remainder) ext_links.append(ExtLink(link_id=link_count, ext_node=node, int_node=routers[0])) link_count += 1 network.ext_links = ext_links # Create the mesh links. First row (east-west) links then column # (north-south) links int_links = [] for row in xrange(num_rows): for col in xrange(num_columns): if (col + 1 < num_columns): east_id = col + (row * num_columns) west_id = (col + 1) + (row * num_columns) int_links.append(IntLink(link_id=link_count, node_a=routers[east_id], node_b=routers[west_id], weight=1)) link_count += 1 for col in xrange(num_columns): for row in xrange(num_rows): if (row + 1 < num_rows): north_id = col + (row * num_columns) south_id = col + ((row + 1) * num_columns) int_links.append(IntLink(link_id=link_count, node_a=routers[north_id], node_b=routers[south_id], weight=2)) link_count += 1 network.int_links = int_links
bsd-3-clause
chetan/ansible
lib/ansible/runner/action_plugins/normal.py
148
2205
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import os import pwd import random import traceback import tempfile import ansible.constants as C from ansible import utils from ansible import errors from ansible import module_common from ansible.runner.return_data import ReturnData from ansible.callbacks import vv, vvv class ActionModule(object): def __init__(self, runner): self.runner = runner def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs): ''' transfer & execute a module that is not 'copy' or 'template' ''' module_args = self.runner._complex_args_hack(complex_args, module_args) if self.runner.noop_on_check(inject): if module_name in [ 'shell', 'command' ]: return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not supported for %s' % module_name)) # else let the module parsing code decide, though this will only be allowed for AnsibleModuleCommon using # python modules for now module_args += " CHECKMODE=True" if self.runner.no_log: module_args += " NO_LOG=True" # shell and command are the same module if module_name == 'shell': module_name = 'command' module_args += " #USE_SHELL" vv("REMOTE_MODULE %s %s" % (module_name, module_args), host=conn.host) return self.runner._execute_module(conn, tmp, module_name, module_args, inject=inject, complex_args=complex_args)
gpl-3.0
azatoth/scons
test/option/d.py
5
3759
#!/usr/bin/env python # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Verify that the -d option is ignored. """ import TestSCons test = TestSCons.TestSCons() test.write('SConstruct', "") test.run(arguments = '-d .', stderr = "Warning: ignoring -d option\n") test.pass_test() # test.subdir('subdir') test.write('SConstruct', """ env = Environment() env.Program(target = 'aaa', source = 'aaa.c') env.Program(target = 'bbb', source = 'bbb.c') SConscript('subdir/SConscript') """) test.write(['subdir', 'SConscript'], """ env = Environment() env.Program(target = 'ccc', source = 'ccc.c') env.Program(target = 'ddd', source = 'ddd.c') """) test.write('aaa.c', """ int main(int argc, char *argv) { argv[argc++] = "--"; printf("aaa.c\n"); exit (0); } """) test.write('bbb.c', """ int main(int argc, char *argv) { argv[argc++] = "--"; printf("bbb.c\n"); exit (0); } """) test.write(['subdir', 'ccc.c'], """ int main(int argc, char *argv) { argv[argc++] = "--"; printf("subdir/ccc.c\n"); exit (0); } """) test.write(['subdir', 'ddd.c'], """ int main(int argc, char *argv) { argv[argc++] = "--"; printf("subdir/ddd.c\n"); exit (0); } """) test.run(arguments = '-d .', stdout = """ Target aaa: aaa.o Checking aaa Checking aaa.o Checking aaa.c Rebuilding aaa.o: out of date. cc -c -o aaa.o aaa.c Rebuilding aaa: out of date. cc -o aaa aaa.o Target aaa.o: aaa.c Target bbb: bbb.o Checking bbb Checking bbb.o Checking bbb.c Rebuilding bbb.o: out of date. cc -c -o bbb.o bbb.c Rebuilding bbb: out of date. cc -o bbb bbb.o Target bbb.o: bbb.c Target subdir/ccc/g: subdir/ccc.o Checking subdir/ccc/g Checking subdir/ccc/g.o Checking subdir/ccc/g.c Rebuilding subdir/ccc/g.o: out of date. cc -c -o subdir/ccc/g.o subdir/ccc.c Rebuilding subdir/ccc/g: out of date. cc -o subdir/ccc/g subdir/ccc.o Target subdir/ccc/g.o: subdir/ccc.c Target subdir/ddd/g: subdir/ddd.o Checking subdir/ddd/g Checking subdir/ddd/g.o Checking subdir/ddd/g.c Rebuilding subdir/ddd/g.o: out of date. cc -c -o subdir/ddd/g.o subdir/ddd.c Rebuilding subdir/ddd/g: out of date. cc -o subdir/ddd/g subdir/ddd.o Target subdir/ddd/g.o: subdir/ddd.c """) test.run(program = test.workpath('aaa'), stdout = "aaa.c\n") test.run(program = test.workpath('bbb'), stdout = "bbb.c\n") test.run(program = test.workpath('subdir/ccc'), stdout = "subdir/ccc.c\n") test.run(program = test.workpath('subdir/ddd'), stdout = "subdir/ddd.c\n") test.pass_test() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
mit
ekwoodrich/nirha
nirhalib/venv/lib/python2.7/site-packages/pip/vcs/git.py
473
7898
import tempfile import re import os.path from pip.util import call_subprocess from pip.util import display_path, rmtree from pip.vcs import vcs, VersionControl from pip.log import logger from pip.backwardcompat import url2pathname, urlparse urlsplit = urlparse.urlsplit urlunsplit = urlparse.urlunsplit class Git(VersionControl): name = 'git' dirname = '.git' repo_name = 'clone' schemes = ('git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file') bundle_file = 'git-clone.txt' guide = ('# This was a Git repo; to make it a repo again run:\n' 'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n') def __init__(self, url=None, *args, **kwargs): # Works around an apparent Git bug # (see http://article.gmane.org/gmane.comp.version-control.git/146500) if url: scheme, netloc, path, query, fragment = urlsplit(url) if scheme.endswith('file'): initial_slashes = path[:-len(path.lstrip('/'))] newpath = initial_slashes + url2pathname(path).replace('\\', '/').lstrip('/') url = urlunsplit((scheme, netloc, newpath, query, fragment)) after_plus = scheme.find('+') + 1 url = scheme[:after_plus] + urlunsplit((scheme[after_plus:], netloc, newpath, query, fragment)) super(Git, self).__init__(url, *args, **kwargs) def parse_vcs_bundle_file(self, content): url = rev = None for line in content.splitlines(): if not line.strip() or line.strip().startswith('#'): continue url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line) if url_match: url = url_match.group(1).strip() rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line) if rev_match: rev = rev_match.group(1).strip() if url and rev: return url, rev return None, None def export(self, location): """Export the Git repository at the url to the destination location""" temp_dir = tempfile.mkdtemp('-export', 'pip-') self.unpack(temp_dir) try: if not location.endswith('/'): location = location + '/' call_subprocess( [self.cmd, 'checkout-index', '-a', '-f', '--prefix', location], filter_stdout=self._filter, show_stdout=False, cwd=temp_dir) finally: rmtree(temp_dir) def check_rev_options(self, rev, dest, rev_options): """Check the revision options before checkout to compensate that tags and branches may need origin/ as a prefix. Returns the SHA1 of the branch or tag if found. """ revisions = self.get_refs(dest) origin_rev = 'origin/%s' % rev if origin_rev in revisions: # remote branch return [revisions[origin_rev]] elif rev in revisions: # a local tag or branch name return [revisions[rev]] else: logger.warn("Could not find a tag or branch '%s', assuming commit." % rev) return rev_options def switch(self, dest, url, rev_options): call_subprocess( [self.cmd, 'config', 'remote.origin.url', url], cwd=dest) call_subprocess( [self.cmd, 'checkout', '-q'] + rev_options, cwd=dest) self.update_submodules(dest) def update(self, dest, rev_options): # First fetch changes from the default remote call_subprocess([self.cmd, 'fetch', '-q'], cwd=dest) # Then reset to wanted revision (maby even origin/master) if rev_options: rev_options = self.check_rev_options(rev_options[0], dest, rev_options) call_subprocess([self.cmd, 'reset', '--hard', '-q'] + rev_options, cwd=dest) #: update submodules self.update_submodules(dest) def obtain(self, dest): url, rev = self.get_url_rev() if rev: rev_options = [rev] rev_display = ' (to %s)' % rev else: rev_options = ['origin/master'] rev_display = '' if self.check_destination(dest, url, rev_options, rev_display): logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest))) call_subprocess([self.cmd, 'clone', '-q', url, dest]) #: repo may contain submodules self.update_submodules(dest) if rev: rev_options = self.check_rev_options(rev, dest, rev_options) # Only do a checkout if rev_options differs from HEAD if not self.get_revision(dest).startswith(rev_options[0]): call_subprocess([self.cmd, 'checkout', '-q'] + rev_options, cwd=dest) def get_url(self, location): url = call_subprocess( [self.cmd, 'config', 'remote.origin.url'], show_stdout=False, cwd=location) return url.strip() def get_revision(self, location): current_rev = call_subprocess( [self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location) return current_rev.strip() def get_refs(self, location): """Return map of named refs (branches or tags) to commit hashes.""" output = call_subprocess([self.cmd, 'show-ref'], show_stdout=False, cwd=location) rv = {} for line in output.strip().splitlines(): commit, ref = line.split(' ', 1) ref = ref.strip() ref_name = None if ref.startswith('refs/remotes/'): ref_name = ref[len('refs/remotes/'):] elif ref.startswith('refs/heads/'): ref_name = ref[len('refs/heads/'):] elif ref.startswith('refs/tags/'): ref_name = ref[len('refs/tags/'):] if ref_name is not None: rv[ref_name] = commit.strip() return rv def get_src_requirement(self, dist, location, find_tags): repo = self.get_url(location) if not repo.lower().startswith('git:'): repo = 'git+' + repo egg_project_name = dist.egg_name().split('-', 1)[0] if not repo: return None current_rev = self.get_revision(location) refs = self.get_refs(location) # refs maps names to commit hashes; we need the inverse # if multiple names map to a single commit, this arbitrarily picks one names_by_commit = dict((commit, ref) for ref, commit in refs.items()) if current_rev in names_by_commit: # It's a tag full_egg_name = '%s-%s' % (egg_project_name, names_by_commit[current_rev]) else: full_egg_name = '%s-dev' % egg_project_name return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name) def get_url_rev(self): """ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. That's required because although they use SSH they sometimes doesn't work with a ssh:// scheme (e.g. Github). But we need a scheme for parsing. Hence we remove it again afterwards and return it as a stub. """ if not '://' in self.url: assert not 'file:' in self.url self.url = self.url.replace('git+', 'git+ssh://') url, rev = super(Git, self).get_url_rev() url = url.replace('ssh://', '') else: url, rev = super(Git, self).get_url_rev() return url, rev def update_submodules(self, location): if not os.path.exists(os.path.join(location, '.gitmodules')): return call_subprocess([self.cmd, 'submodule', 'update', '--init', '--recursive', '-q'], cwd=location) vcs.register(Git)
apache-2.0
jni/skan
skan/gui.py
1
11875
import os import json import asyncio from pathlib import Path import numpy as np import matplotlib matplotlib.use('TkAgg') from matplotlib.figure import Figure from matplotlib.backends.backend_tkagg import (FigureCanvasTkAgg, NavigationToolbar2Tk) import matplotlib.pyplot as plt import tkinter as tk import tkinter.filedialog from tkinter import ttk import click from . import pre, pipe, draw, io, __version__ @asyncio.coroutine def _async(coroutine, *args): loop = asyncio.get_event_loop() return (yield from loop.run_in_executor(None, coroutine, *args)) STANDARD_MARGIN = (3, 3, 12, 12) class Launch(tk.Tk): def __init__(self, params_dict=None): super().__init__() self.title('Skeleton analysis tool') self.crop_radius = tk.IntVar(value=0, name='Crop radius') self.smooth_method = tk.StringVar(value='Gaussian', name='Smoothing method') self.smooth_method._choices = pre.SMOOTH_METHODS self.smooth_radius = tk.DoubleVar(value=0.1, name='Smoothing radius') self.threshold_radius = tk.DoubleVar(value=50e-9, name='Threshold radius') self.brightness_offset = tk.DoubleVar(value=0.075, name='Brightness offset') self.image_format = tk.StringVar(value='auto', name='Image format') self.scale_metadata_path = tk.StringVar(value='Scan/PixelHeight', name='Scale metadata path') self.preview_skeleton_plots = tk.BooleanVar(value=True, name='Live ' 'preview skeleton plot?') self.save_skeleton_plots = tk.BooleanVar(value=True, name='Save skeleton plot?') self.skeleton_plot_prefix = tk.StringVar(value='skeleton-plot-', name='Prefix for skeleton plots') self.output_filename = tk.StringVar(value='skeleton.xlsx', name='Output filename') self.parameters = [ self.crop_radius, self.smooth_method, self.smooth_radius, self.threshold_radius, self.brightness_offset, self.image_format, self.scale_metadata_path, self.preview_skeleton_plots, self.save_skeleton_plots, self.skeleton_plot_prefix, self.output_filename, ] self.input_files = [] self.output_folder = None if params_dict is None: params_dict = {} self.params_dict = params_dict.copy() self.parameter_config(params_dict) # allow resizing self.rowconfigure(0, weight=1) self.columnconfigure(0, weight=1) self.create_main_frame() def parameter_config(self, params_dict): """Set parameter values from a config dictionary.""" if isinstance(params_dict, str): if params_dict.startswith('{'): # JSON string params_dict = json.loads(params_dict) else: # config file with open(params_dict) as params_fin: params_dict = json.load(params_fin) self.params_dict.update(params_dict) name2param = {p._name.lower(): p for p in self.parameters} for param, value in self.params_dict.items(): if param.lower() in name2param: name2param[param].set(value) params_dict.pop(param) for param, value in params_dict.copy().items(): if param.lower() == 'input files': self.input_files = value params_dict.pop(param) elif param.lower() == 'output folder': self.output_folder = Path(os.path.expanduser(value)) params_dict.pop(param) elif param.lower() == 'version': print(f'Parameter file version: {params_dict.pop(param)}') for param in params_dict: print(f'Parameter not recognised: {param}') def save_parameters(self, filename=None): out = {p._name.lower(): p.get() for p in self.parameters} out['input files'] = self.input_files out['output folder'] = str(self.output_folder) out['version'] = __version__ if filename is None: return json.dumps(out) attempt = 0 base, ext = os.path.splitext(filename) while os.path.exists(filename): filename = f'{base} ({attempt}){ext}' attempt += 1 with open(filename, mode='wt') as fout: json.dump(out, fout, indent=2) def create_main_frame(self): main = ttk.Frame(master=self, padding=STANDARD_MARGIN) main.grid(row=0, column=0, sticky='nsew') self.create_parameters_frame(main) self.create_buttons_frame(main) main.pack() def create_parameters_frame(self, parent): parameters = ttk.Frame(master=parent, padding=STANDARD_MARGIN) parameters.grid(sticky='nsew') heading = ttk.Label(parameters, text='Analysis parameters') heading.grid(column=0, row=0, sticky='n') for i, param in enumerate(self.parameters, start=1): param_label = ttk.Label(parameters, text=param._name) param_label.grid(row=i, column=0, sticky='nsew') if type(param) == tk.BooleanVar: param_entry = ttk.Checkbutton(parameters, variable=param) elif hasattr(param, '_choices'): param_entry = ttk.OptionMenu(parameters, param, param.get(), *param._choices.keys()) else: param_entry = ttk.Entry(parameters, textvariable=param) param_entry.grid(row=i, column=1, sticky='nsew') def create_buttons_frame(self, parent): buttons = ttk.Frame(master=parent, padding=STANDARD_MARGIN) buttons.grid(sticky='nsew') actions = [ ('Choose config', self.choose_config_file), ('Choose files', self.choose_input_files), ('Choose output folder', self.choose_output_folder), ('Run', lambda: asyncio.ensure_future(self.run())) ] for col, (action_name, action) in enumerate(actions): button = ttk.Button(buttons, text=action_name, command=action) button.grid(row=0, column=col) def choose_config_file(self): config_file = tk.filedialog.askopenfilename() self.parameter_config(config_file) def choose_input_files(self): self.input_files = tk.filedialog.askopenfilenames() if len(self.input_files) > 0 and self.output_folder is None: self.output_folder = Path(os.path.dirname(self.input_files[0])) def choose_output_folder(self): self.output_folder = Path( tk.filedialog.askdirectory(initialdir=self.output_folder)) def make_figure_window(self): self.figure_window = tk.Toplevel(self) self.figure_window.wm_title('Preview') screen_dpi = self.figure_window.winfo_fpixels('1i') screen_width = self.figure_window.winfo_screenwidth() # in pixels figure_width = screen_width / 2 / screen_dpi figure_height = 0.75 * figure_width self.figure = Figure(figsize=(figure_width, figure_height), dpi=screen_dpi) ax0 = self.figure.add_subplot(221) axes = [self.figure.add_subplot(220 + i, sharex=ax0, sharey=ax0) for i in range(2, 5)] self.axes = np.array([ax0] + axes) canvas = FigureCanvasTkAgg(self.figure, master=self.figure_window) canvas.show() canvas.get_tk_widget().pack(side=tk.TOP, fill=tk.BOTH, expand=1) toolbar = NavigationToolbar2Tk(canvas, self.figure_window) toolbar.update() canvas._tkcanvas.pack(side=tk.TOP, fill=tk.BOTH, expand=1) async def run(self): print('Input files:') for file in self.input_files: print(' ', file) print('Parameters:') for param in self.parameters: p = param.get() print(' ', param, type(p), p) print('Output:', self.output_folder) save_skeleton = ('' if not self.save_skeleton_plots.get() else self.skeleton_plot_prefix.get()) images_iterator = pipe.process_images( self.input_files, self.image_format.get(), self.threshold_radius.get(), self.smooth_radius.get(), self.brightness_offset.get(), self.scale_metadata_path.get(), crop_radius=self.crop_radius.get(), smooth_method=self.smooth_method.get()) if self.preview_skeleton_plots.get(): self.make_figure_window() elif self.save_skeleton_plots.get(): self.figure = plt.figure() ax0 = self.figure.add_subplot(221) axes = [self.figure.add_subplot(220 + i, sharex=ax0, sharey=ax0) for i in range(2, 5)] self.axes = np.array([ax0] + axes) self.save_parameters(self.output_folder / 'skan-config.json') for i, result in enumerate(images_iterator): if i < len(self.input_files): filename, image, thresholded, skeleton, framedata = result if save_skeleton: for ax in self.axes: ax.clear() w, h = draw.pixel_perfect_figsize(image) self.figure.set_size_inches(4*w, 4*h) draw.pipeline_plot(image, thresholded, skeleton, framedata, figure=self.figure, axes=self.axes) output_basename = (save_skeleton + os.path.basename( os.path.splitext(filename)[0]) + '.png') output_filename = str(self.output_folder / output_basename) self.figure.savefig(output_filename) if self.preview_skeleton_plots.get(): self.figure.canvas.draw_idle() else: result_full, result_image = result result_filtered = result_full[(result_full['mean shape index']>0.125) & (result_full['mean shape index']<0.625) & (result_full['branch-type'] == 2) & (result_full['euclidean-distance']>0)] ridgeydata = result_filtered.groupby('filename')[['filename','branch-distance','scale','euclidean-distance','squiggle','mean shape index']].mean() io.write_excel(str(self.output_folder / self.output_filename.get()), branches=result_full, images=result_image, filtered=ridgeydata, parameters=json.loads(self.save_parameters())) def tk_update(loop, app): try: app.update() except tkinter.TclError: loop.stop() return loop.call_later(.01, tk_update, loop, app) @click.command() @click.option('-c', '--config', default='', help='JSON configuration file.') def launch(config): params = json.load(open(config)) if config else None app = Launch(params) loop = asyncio.get_event_loop() tk_update(loop, app) loop.run_forever()
bsd-3-clause
jfrazelle/boulder
test/v2_integration.py
1
22964
#!/usr/bin/env python2.7 """ Integration test cases for ACMEv2 as implemented by boulder-wfe2. """ import random import subprocess import requests import datetime import time import os import json import OpenSSL from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import rsa import chisel2 from helpers import * from acme.messages import Status, CertificateRequest, Directory from acme import crypto_util as acme_crypto_util from acme import client as acme_client from acme import messages import josepy import tempfile import shutil import atexit import challtestsrv challSrv = challtestsrv.ChallTestServer() tempdir = tempfile.mkdtemp() @atexit.register def stop(): shutil.rmtree(tempdir) def random_domain(): """Generate a random domain for testing (to avoid rate limiting).""" return "rand.%x.xyz" % random.randrange(2**32) def test_multidomain(): chisel2.auth_and_issue([random_domain(), random_domain()]) def test_wildcardmultidomain(): """ Test issuance for a random domain and a random wildcard domain using DNS-01. """ chisel2.auth_and_issue([random_domain(), "*."+random_domain()], chall_type="dns-01") def test_http_challenge(): chisel2.auth_and_issue([random_domain(), random_domain()], chall_type="http-01") def rand_http_chall(client): d = random_domain() authz = client.request_domain_challenges(d) for c in authz.body.challenges: if isinstance(c.chall, challenges.HTTP01): return d, c.chall raise Exception("No HTTP-01 challenge found for random domain authz") def test_http_challenge_loop_redirect(): client = chisel2.make_client() # Create an authz for a random domain and get its HTTP-01 challenge token d, chall = rand_http_chall(client) token = chall.encode("token") # Create a HTTP redirect from the challenge's validation path to itself challengePath = "/.well-known/acme-challenge/{0}".format(token) challSrv.add_http_redirect( challengePath, "http://{0}{1}".format(d, challengePath)) # Issuing for the the name should fail because of the challenge domains's # redirect loop. chisel2.expect_problem("urn:acme:error:connection", lambda: auth_and_issue([d], client=client, chall_type="http-01")) challSrv.remove_http_redirect(challengePath) def test_http_challenge_badport_redirect(): client = chisel2.make_client() # Create an authz for a random domain and get its HTTP-01 challenge token d, chall = rand_http_chall(client) token = chall.encode("token") # Create a HTTP redirect from the challenge's validation path to a host with # an invalid port. challengePath = "/.well-known/acme-challenge/{0}".format(token) challSrv.add_http_redirect( challengePath, "http://{0}:1337{1}".format(d, challengePath)) # Issuing for the name should fail because of the challenge domain's # invalid port redirect. chisel2.expect_problem("urn:acme:error:connection", lambda: auth_and_issue([d], client=client, chall_type="http-01")) challSrv.remove_http_redirect(challengePath) def test_http_challenge_badhost_redirect(): client = chisel2.make_client() # Create an authz for a random domain and get its HTTP-01 challenge token d, chall = rand_http_chall(client) token = chall.encode("token") # Create a HTTP redirect from the challenge's validation path to a bare IP # hostname. challengePath = "/.well-known/acme-challenge/{0}".format(token) challSrv.add_http_redirect( challengePath, "https://127.0.0.1{0}".format(challengePath)) # Issuing for the name should cause a connection error because the redirect # domain name is an IP address. chisel2.expect_problem("urn:acme:error:connection", lambda: auth_and_issue([d], client=client, chall_type="http-01")) challSrv.remove_http_redirect(challengePath) def test_http_challenge_badproto_redirect(): client = chisel2.make_client() # Create an authz for a random domain and get its HTTP-01 challenge token d, chall = rand_http_chall(client) token = chall.encode("token") # Create a HTTP redirect from the challenge's validation path to whacky # non-http/https protocol URL. challengePath = "/.well-known/acme-challenge/{0}".format(token) challSrv.add_http_redirect( challengePath, "gopher://{0}{1}".format(d, challengePath)) # Issuing for the name should cause a connection error because the redirect # domain name is an IP address. chisel2.expect_problem("urn:acme:error:connection", lambda: auth_and_issue([d], client=client, chall_type="http-01")) challSrv.remove_http_redirect(challengePath) def test_http_challenge_http_redirect(): client = chisel2.make_client() # Create an authz for a random domain and get its HTTP-01 challenge token d, chall = rand_http_chall(client) token = chall.encode("token") # Calculate its keyauth so we can add it in a special non-standard location # for the redirect result resp = chall.response(client.net.key) keyauth = resp.key_authorization challSrv.add_http01_response("http-redirect", keyauth) # Create a HTTP redirect from the challenge's validation path to some other # token path where we have registered the key authorization. challengePath = "/.well-known/acme-challenge/{0}".format(token) challSrv.add_http_redirect( challengePath, "http://{0}/.well-known/acme-challenge/http-redirect".format(d)) auth_and_issue([d], client=client, chall_type="http-01") challSrv.remove_http_redirect(challengePath) challSrv.remove_http01_response("http-redirect") history = challSrv.http_request_history(d) challSrv.clear_http_request_history(d) # There should have been at least two GET requests made to the # challtestsrv. There may have been more if remote VAs were configured. if len(history) < 2: raise Exception("Expected at least 2 HTTP request events on challtestsrv, found {1}".format(len(history))) initialRequests = [] redirectedRequests = [] for request in history: # All requests should have been over HTTP if request['HTTPS'] is True: raise Exception("Expected all requests to be HTTP") # Initial requests should have the expected initial HTTP-01 URL for the challenge if request['URL'] == challengePath: initialRequests.append(request) # Redirected requests should have the expected redirect path URL with all # its parameters elif request['URL'] == redirectPath: redirectedRequests.append(request) else: raise Exception("Unexpected request URL {0} in challtestsrv history: {1}".format(request['URL'], request)) # There should have been at least 1 initial HTTP-01 validation request. if len(initialRequests) < 1: raise Exception("Expected {0} initial HTTP-01 request events on challtestsrv, found {1}".format(validation_attempts, len(initialRequests))) # There should have been at least 1 redirected HTTP request for each VA if len(redirectedRequests) < 1: raise Exception("Expected {0} redirected HTTP-01 request events on challtestsrv, found {1}".format(validation_attempts, len(redirectedRequests))) def test_http_challenge_https_redirect(): client = chisel2.make_client() # Create an authz for a random domain and get its HTTP-01 challenge token d, chall = rand_http_chall(client) token = chall.encode("token") # Create a HTTP redirect from the challenge's validation path to an HTTPS # address with the same path. challengePath = "/.well-known/acme-challenge/{0}".format(token) challSrv.add_http_redirect( challengePath, "https://{0}{1}".format(d, challengePath)) # Also add an A record for the domain pointing to the interface that the # HTTPS HTTP-01 challtestsrv is bound. challSrv.add_a_record(d, ["10.77.77.77"]) auth_and_issue([d], client=client, chall_type="http-01") challSrv.remove_http_redirect(challengePath) challSrv.remove_a_record(d) # There should have been at least two GET requests made to the challtestsrv by the VA if len(history) < 2: raise Exception("Expected 2 HTTP request events on challtestsrv, found {0}".format(len(history))) initialRequests = [] redirectedRequests = [] for request in history: # Initial requests should have the expected initial HTTP-01 URL for the challenge if request['URL'] == challengePath: initialRequests.append(request) # Redirected requests should have the expected redirect path URL with all # its parameters elif request['URL'] == redirectPath: redirectedRequests.append(request) else: raise Exception("Unexpected request URL {0} in challtestsrv history: {1}".format(request['URL'], request)) # There should have been at least 1 initial HTTP-01 validation request. if len(initialRequests) < 1: raise Exception("Expected {0} initial HTTP-01 request events on challtestsrv, found {1}".format(validation_attempts, len(initialRequests))) # All initial requests should have been over HTTP for r in initialRequests: if r['HTTPS'] is True: raise Exception("Expected all initial requests to be HTTP") # There should have been at least 1 redirected HTTP request for each VA if len(redirectedRequests) < 1: raise Exception("Expected {0} redirected HTTP-01 request events on challtestsrv, found {1}".format(validation_attempts, len(redirectedRequests))) # All the redirected requests should have been over HTTPS with the correct # SNI value for r in redirectedRequests: if r['HTTPS'] is False: raise Exception("Expected all redirected requests to be HTTPS") # TODO(@cpu): The following ServerName test will fail with config-next # until https://github.com/letsencrypt/boulder/issues/3969 is fixed. if default_config_dir.startswith("test/config-next"): return elif r['ServerName'] != d: raise Exception("Expected all redirected requests to have ServerName {0} got \"{1}\"".format(d, r['ServerName'])) def test_tls_alpn_challenge(): # Pick two random domains domains = [random_domain(),random_domain()] # Add A records for these domains to ensure the VA's requests are directed # to the interface that the challtestsrv has bound for TLS-ALPN-01 challenge # responses for host in domains: challSrv.add_a_record(host, ["10.88.88.88"]) chisel2.auth_and_issue(domains, chall_type="tls-alpn-01") for host in domains: challSrv.remove_a_record(host) def test_overlapping_wildcard(): """ Test issuance for a random domain and a wildcard version of the same domain using DNS-01. This should result in *two* distinct authorizations. """ domain = random_domain() domains = [ domain, "*."+domain ] client = chisel2.make_client(None) csr_pem = chisel2.make_csr(domains) order = client.new_order(csr_pem) authzs = order.authorizations if len(authzs) != 2: raise Exception("order for %s had %d authorizations, expected 2" % (domains, len(authzs))) cleanup = chisel2.do_dns_challenges(client, authzs) try: order = client.poll_and_finalize(order) finally: cleanup() def test_wildcard_exactblacklist(): """ Test issuance for a wildcard that would cover an exact blacklist entry. It should fail with a policy error. """ # We include "highrisk.le-test.hoffman-andrews.com" in `test/hostname-policy.json` # Issuing for "*.le-test.hoffman-andrews.com" should be blocked domain = "*.le-test.hoffman-andrews.com" # We expect this to produce a policy problem chisel2.expect_problem("urn:ietf:params:acme:error:rejectedIdentifier", lambda: chisel2.auth_and_issue([domain], chall_type="dns-01")) def test_wildcard_authz_reuse(): """ Test that an authorization for a base domain obtained via HTTP-01 isn't reused when issuing a wildcard for that base domain later on. """ # Create one client to reuse across multiple issuances client = chisel2.make_client(None) # Pick a random domain to issue for domains = [ random_domain() ] csr_pem = chisel2.make_csr(domains) # Submit an order for the name order = client.new_order(csr_pem) # Complete the order via an HTTP-01 challenge cleanup = chisel2.do_http_challenges(client, order.authorizations) try: order = client.poll_and_finalize(order) finally: cleanup() # Now try to issue a wildcard for the random domain domains[0] = "*." + domains[0] csr_pem = chisel2.make_csr(domains) order = client.new_order(csr_pem) # We expect all of the returned authorizations to be pending status for authz in order.authorizations: if authz.body.status != Status("pending"): raise Exception("order for %s included a non-pending authorization (status: %s) from a previous HTTP-01 order" % ((domains), str(authz.body.status))) def test_bad_overlap_wildcard(): if not os.environ.get('BOULDER_CONFIG_DIR', '').startswith("test/config-next"): return chisel2.expect_problem("urn:ietf:params:acme:error:malformed", lambda: chisel2.auth_and_issue(["*.example.com", "www.example.com"])) def test_duplicate_orders(): """ Test that the same client issuing for the same domain names twice in a row works without error. """ client = chisel2.make_client(None) domains = [ random_domain() ] chisel2.auth_and_issue(domains, client=client) chisel2.auth_and_issue(domains, client=client) def test_order_reuse_failed_authz(): """ Test that creating an order for a domain name, failing an authorization in that order, and submitting another new order request for the same name doesn't reuse a failed authorizaton in the new order. """ client = chisel2.make_client(None) domains = [ random_domain() ] csr_pem = chisel2.make_csr(domains) order = client.new_order(csr_pem) firstOrderURI = order.uri # Pick the first authz's first challenge, doesn't matter what type it is chall_body = order.authorizations[0].body.challenges[0] # Answer it, but with nothing set up to solve the challenge request client.answer_challenge(chall_body, chall_body.response(client.net.key)) # Poll for a fixed amount of time checking for the order to become invalid # from the authorization attempt initiated above failing deadline = datetime.datetime.now() + datetime.timedelta(seconds=60) while datetime.datetime.now() < deadline: time.sleep(1) updatedOrder = requests.get(firstOrderURI).json() if updatedOrder['status'] == "invalid": break # If the loop ended and the status isn't invalid then we reached the # deadline waiting for the order to become invalid, fail the test if updatedOrder['status'] != "invalid": raise Exception("timed out waiting for order %s to become invalid" % firstOrderURI) # Make another order with the same domains order = client.new_order(csr_pem) # It should not be the same order as before if order.uri == firstOrderURI: raise Exception("new-order for %s returned a , now-invalid, order" % domains) # We expect all of the returned authorizations to be pending status for authz in order.authorizations: if authz.body.status != Status("pending"): raise Exception("order for %s included a non-pending authorization (status: %s) from a previous order" % ((domains), str(authz.body.status))) # We expect the new order can be fulfilled cleanup = chisel2.do_http_challenges(client, order.authorizations) try: order = client.poll_and_finalize(order) finally: cleanup() def test_order_finalize_early(): """ Test that finalizing an order before its fully authorized results in the order having an error set and the status being invalid. """ # Create a client client = chisel2.make_client(None) # Create a random domain and a csr domains = [ random_domain() ] csr_pem = chisel2.make_csr(domains) # Create an order for the domain order = client.new_order(csr_pem) deadline = datetime.datetime.now() + datetime.timedelta(seconds=5) # Finalizing an order early should generate an unauthorized error and we # should check that the order is invalidated. chisel2.expect_problem("urn:ietf:params:acme:error:unauthorized", lambda: client.finalize_order(order, deadline)) # Poll for a fixed amount of time checking for the order to become invalid # from the early finalization attempt initiated above failing while datetime.datetime.now() < deadline: time.sleep(1) updatedOrder = requests.get(order.uri).json() if updatedOrder['status'] == "invalid": break # If the loop ended and the status isn't invalid then we reached the # deadline waiting for the order to become invalid, fail the test if updatedOrder['status'] != "invalid": raise Exception("timed out waiting for order %s to become invalid" % order.uri) # The order should have an error with the expected type if updatedOrder['error']['type'] != 'urn:ietf:params:acme:error:unauthorized': raise Exception("order %s has incorrect error field type: \"%s\"" % (order.uri, updatedOrder['error']['type'])) def test_revoke_by_issuer(): client = chisel2.make_client(None) order = chisel2.auth_and_issue([random_domain()], client=client) cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, order.fullchain_pem) reset_akamai_purges() client.revoke(josepy.ComparableX509(cert), 0) cert_file_pem = os.path.join(tempdir, "revokeme.pem") with open(cert_file_pem, "w") as f: f.write(OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, cert).decode()) ee_ocsp_url = "http://localhost:4002" wait_for_ocsp_revoked(cert_file_pem, "test/test-ca2.pem", ee_ocsp_url) verify_akamai_purge() def test_revoke_by_authz(): domains = [random_domain()] order = chisel2.auth_and_issue(domains) # create a new client and re-authz client = chisel2.make_client(None) chisel2.auth_and_issue(domains, client=client) cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, order.fullchain_pem) reset_akamai_purges() client.revoke(josepy.ComparableX509(cert), 0) cert_file_pem = os.path.join(tempdir, "revokeme.pem") with open(cert_file_pem, "w") as f: f.write(OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, cert).decode()) ee_ocsp_url = "http://localhost:4002" wait_for_ocsp_revoked(cert_file_pem, "test/test-ca2.pem", ee_ocsp_url) verify_akamai_purge() def test_revoke_by_privkey(): client = chisel2.make_client(None) domains = [random_domain()] key = OpenSSL.crypto.PKey() key.generate_key(OpenSSL.crypto.TYPE_RSA, 2048) key_pem = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key) csr_pem = chisel2.make_csr(domains) order = client.new_order(csr_pem) cleanup = chisel2.do_http_challenges(client, order.authorizations) try: order = client.poll_and_finalize(order) finally: cleanup() # Create a new client with the JWK as the cert private key jwk = josepy.JWKRSA(key=key) net = acme_client.ClientNetwork(key, user_agent="Boulder integration tester") directory = Directory.from_json(net.get(chisel2.DIRECTORY_V2).json()) new_client = acme_client.ClientV2(directory, net) cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, order.fullchain_pem) reset_akamai_purges() client.revoke(josepy.ComparableX509(cert), 0) cert_file_pem = os.path.join(tempdir, "revokeme.pem") with open(cert_file_pem, "w") as f: f.write(OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, cert).decode()) ee_ocsp_url = "http://localhost:4002" wait_for_ocsp_revoked(cert_file_pem, "test/test-ca2.pem", ee_ocsp_url) verify_akamai_purge() def test_sct_embedding(): if not os.environ.get('BOULDER_CONFIG_DIR', '').startswith("test/config-next"): return order = chisel2.auth_and_issue([random_domain()]) cert = x509.load_pem_x509_certificate(str(order.fullchain_pem), default_backend()) # make sure there is no poison extension try: cert.extensions.get_extension_for_oid(x509.ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3")) raise Exception("certificate contains CT poison extension") except x509.ExtensionNotFound: # do nothing pass # make sure there is a SCT list extension try: sctList = cert.extensions.get_extension_for_oid(x509.ObjectIdentifier("1.3.6.1.4.1.11129.2.4.2")) except x509.ExtensionNotFound: raise Exception("certificate doesn't contain SCT list extension") if len(sctList.value) != 2: raise Exception("SCT list contains wrong number of SCTs") for sct in sctList.value: if sct.version != x509.certificate_transparency.Version.v1: raise Exception("SCT contains wrong version") if sct.entry_type != x509.certificate_transparency.LogEntryType.PRE_CERTIFICATE: raise Exception("SCT contains wrong entry type") def test_only_return_existing_reg(): client = chisel2.uninitialized_client() email = "test@not-example.com" client.new_account(messages.NewRegistration.from_data(email=email, terms_of_service_agreed=True)) client = chisel2.uninitialized_client(key=client.net.key) class extendedAcct(dict): def json_dumps(self, indent=None): return json.dumps(self) acct = extendedAcct({ "termsOfServiceAgreed": True, "contact": [email], "onlyReturnExisting": True }) resp = client.net.post(client.directory['newAccount'], acct, acme_version=2) if resp.status_code != 200: raise Exception("incorrect response returned for onlyReturnExisting") other_client = chisel2.uninitialized_client() newAcct = extendedAcct({ "termsOfServiceAgreed": True, "contact": [email], "onlyReturnExisting": True }) chisel2.expect_problem("urn:ietf:params:acme:error:accountDoesNotExist", lambda: other_client.net.post(other_client.directory['newAccount'], newAcct, acme_version=2)) def run(cmd, **kwargs): return subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, **kwargs)
mpl-2.0
skirpichev/omg
diofant/polys/polytools.py
1
119397
"""User-friendly public interface to polynomial functions.""" import functools import operator import mpmath from ..core import (Add, Basic, Derivative, E, Expr, Integer, Mul, Tuple, oo, preorder_traversal, sympify) from ..core.compatibility import default_sort_key, iterable from ..core.decorators import _sympifyit from ..core.mul import _keep_coeff from ..core.relational import Relational from ..domains import FF, QQ, ZZ from ..logic.boolalg import BooleanAtom from ..utilities import group, sift from . import polyoptions as options from .constructor import construct_domain from .fglmtools import matrix_fglm from .groebnertools import groebner as _groebner from .monomials import Monomial from .orderings import monomial_key from .polyerrors import (CoercionFailed, ComputationFailed, DomainError, ExactQuotientFailed, GeneratorsError, GeneratorsNeeded, MultivariatePolynomialError, PolificationFailed, PolynomialError, UnificationFailed) from .polyutils import (_dict_from_expr, _dict_reorder, _parallel_dict_from_expr, _sort_gens) from .rationaltools import together from .rings import PolyElement __all__ = ('Poly', 'PurePoly', 'poly_from_expr', 'parallel_poly_from_expr', 'degree', 'degree_list', 'LC', 'LM', 'LT', 'prem', 'div', 'rem', 'quo', 'exquo', 'half_gcdex', 'gcdex', 'invert', 'subresultants', 'resultant', 'discriminant', 'cofactors', 'gcd_list', 'gcd', 'lcm_list', 'lcm', 'terms_gcd', 'trunc', 'monic', 'content', 'primitive', 'compose', 'decompose', 'sqf_norm', 'sqf_part', 'sqf_list', 'sqf', 'factor_list', 'factor', 'count_roots', 'real_roots', 'nroots', 'cancel', 'reduced', 'groebner', 'GroebnerBasis', 'poly') class Poly(Expr): """Generic class for representing polynomial expressions.""" is_commutative = True is_Poly = True _op_priority = 10.1 def __new__(cls, rep, *gens, **args): """Create a new polynomial instance out of something useful.""" opt = options.build_options(gens, args) if 'order' in opt: raise NotImplementedError("'order' keyword is not implemented yet") if iterable(rep, exclude=str): if isinstance(rep, dict): return cls._from_dict(rep, opt) else: return cls._from_list(list(rep), opt) else: rep = sympify(rep) if rep.is_Poly: return cls._from_poly(rep, opt) else: return cls._from_expr(rep, opt) @classmethod def new(cls, rep, *gens): """Construct :class:`Poly` instance from raw representation.""" if not isinstance(rep, PolyElement): raise PolynomialError( f'invalid polynomial representation: {rep}') elif rep.ring.ngens != len(gens): raise PolynomialError(f'invalid arguments: {rep}, {gens}') obj = Expr.__new__(cls) obj.rep = rep obj.gens = gens return obj @classmethod def from_dict(cls, rep, *gens, **args): """Construct a polynomial from a :class:`dict`.""" opt = options.build_options(gens, args) return cls._from_dict(rep, opt) @classmethod def from_list(cls, rep, *gens, **args): """Construct a polynomial from a :class:`list`.""" opt = options.build_options(gens, args) return cls._from_list(rep, opt) @classmethod def from_poly(cls, rep, *gens, **args): """Construct a polynomial from a polynomial.""" opt = options.build_options(gens, args) return cls._from_poly(rep, opt) @classmethod def from_expr(cls, rep, *gens, **args): """Construct a polynomial from an expression.""" opt = options.build_options(gens, args) return cls._from_expr(rep, opt) @classmethod def _from_dict(cls, rep, opt): """Construct a polynomial from a :class:`dict`.""" gens = opt.gens if not gens: raise GeneratorsNeeded( "can't initialize from 'dict' without generators") domain = opt.domain if domain is None: domain, rep = construct_domain(rep, opt=opt) else: for monom, coeff in rep.items(): rep[monom] = domain.convert(coeff) ring = domain.poly_ring(*gens) return cls.new(ring.from_dict(rep), *gens) @classmethod def _from_list(cls, rep, opt): """Construct a polynomial from a :class:`list`.""" gens = opt.gens if not gens: raise GeneratorsNeeded( "can't initialize from 'list' without generators") elif len(gens) != 1: raise MultivariatePolynomialError( "'list' representation not supported") domain = opt.domain if domain is None: domain, rep = construct_domain(rep, opt=opt) else: rep = list(map(domain.convert, rep)) ring = domain.poly_ring(*gens) return cls.new(ring.from_list(rep), *gens) @classmethod def _from_poly(cls, rep, opt): """Construct a polynomial from a polynomial.""" if cls != rep.__class__: rep = cls.new(rep.rep, *rep.gens) gens = opt.gens if opt.composite or (gens and set(rep.gens) != set(gens)): return cls._from_expr(rep.as_expr(), opt) if gens and rep.gens != gens: rep = rep.reorder(*gens) if opt.domain: rep = rep.set_domain(opt.domain) elif opt.field: rep = rep.to_field() return rep @classmethod def _from_expr(cls, rep, opt): """Construct a polynomial from an expression.""" rep, opt = _dict_from_expr(rep, opt) return cls._from_dict(rep, opt) def _hashable_content(self): """Allow Diofant to hash Poly instances.""" return self.rep, self.gens def __hash__(self): return super().__hash__() @property def free_symbols(self): """ Free symbols of a polynomial expression. Examples ======== >>> Poly(x**2 + 1).free_symbols {x} >>> Poly(x**2 + y).free_symbols {x, y} >>> Poly(x**2 + y, x).free_symbols {x, y} """ symbols = set() for gen in self.gens: symbols |= gen.free_symbols return symbols | self.free_symbols_in_domain @property def free_symbols_in_domain(self): """ Free symbols of the domain of ``self``. Examples ======== >>> Poly(x**2 + 1).free_symbols_in_domain set() >>> Poly(x**2 + y).free_symbols_in_domain set() >>> Poly(x**2 + y, x).free_symbols_in_domain {y} """ domain, symbols = self.domain, set() if domain.is_Composite: for gen in domain.symbols: symbols |= gen.free_symbols elif domain.is_ExpressionDomain: for coeff in self.coeffs(): symbols |= coeff.free_symbols return symbols @property def args(self): """ Don't mess up with the core. Examples ======== >>> Poly(x**2 + 1, x).args (x**2 + 1, x) """ return (self.as_expr(),) + self.gens @property def is_number(self): return self.as_expr().is_number @property def gen(self): """ Return the principal generator. Examples ======== >>> Poly(x**2 + 1, x).gen x """ return self.gens[0] @property def domain(self): """Get the ground domain of ``self``.""" return self.rep.ring.domain @property def zero(self): """Return zero polynomial with ``self``'s properties.""" return self.new(self.rep.ring.zero, *self.gens) @property def one(self): """Return one polynomial with ``self``'s properties.""" return self.new(self.rep.ring.one, *self.gens) @property def unit(self): """Return unit polynomial with ``self``'s properties.""" return self.new(self.rep.unit(self.rep.lev, self.domain), *self.gens) def unify(self, other): """ Make ``self`` and ``other`` belong to the same domain. Examples ======== >>> f, g = Poly(x/2 + 1), Poly(2*x + 1) >>> f Poly(1/2*x + 1, x, domain='QQ') >>> g Poly(2*x + 1, x, domain='ZZ') >>> F, G = f.unify(g) >>> F Poly(1/2*x + 1, x, domain='QQ') >>> G Poly(2*x + 1, x, domain='QQ') """ _, per, F, G = self._unify(other) return per(F), per(G) def _unify(self, other): other = sympify(other) if not other.is_Poly: try: return (self.domain, self.per, self.rep, self.rep.ring(self.domain.convert(other))) except CoercionFailed: raise UnificationFailed(f"can't unify {self} with {other}") newring = self.rep.ring.unify(other.rep.ring) gens = newring.symbols F, G = self.rep.set_ring(newring), other.rep.set_ring(newring) cls = self.__class__ dom = newring.domain def per(rep, dom=dom, gens=gens, remove=None): if remove is not None: gens = gens[:remove] + gens[remove + 1:] if not gens: return dom.to_expr(rep) return cls.new(rep, *gens) return dom, per, F, G def per(self, rep, gens=None, remove=None): """ Create a Poly out of the given representation. Examples ======== >>> a = Poly(x**2 + 1) >>> R = ZZ.inject(x) >>> a.per(R.from_dense([ZZ(1), ZZ(1)]), gens=[y]) Poly(y + 1, y, domain='ZZ') """ if gens is None: gens = self.gens if remove is not None: gens = gens[:remove] + gens[remove + 1:] if not gens: return self.domain.to_expr(rep) return self.__class__.new(rep, *gens) def set_domain(self, domain): """Set the ground domain of ``self``.""" opt = options.build_options(self.gens, {'domain': domain}) newrep = self.rep.set_domain(opt.domain) return self.per(newrep) def set_modulus(self, modulus): """ Set the modulus of ``self``. Examples ======== >>> Poly(5*x**2 + 2*x - 1, x).set_modulus(2) Poly(x**2 + 1, x, modulus=2) """ modulus = options.Modulus.preprocess(modulus) return self.set_domain(FF(modulus)) def get_modulus(self): """ Get the modulus of ``self``. Examples ======== >>> Poly(x**2 + 1, modulus=2).get_modulus() 2 """ domain = self.domain if domain.is_FiniteField: return Integer(domain.order) else: raise PolynomialError('not a polynomial over a Galois field') def _eval_subs(self, old, new): """Internal implementation of :func:`~diofant.core.basic.Basic.subs`.""" if old in self.gens: if new.is_number: return self.eval(old, new) else: try: return self.replace(old, new) except PolynomialError: pass return self.as_expr().subs({old: new}) def exclude(self): """ Remove unnecessary generators from ``self``. Examples ======== >>> Poly(a + x, a, b, c, d, x).exclude() Poly(a + x, a, x, domain='ZZ') """ rep = self.rep if rep.is_ground: return self for x in rep.ring.symbols: try: rep = rep.drop(x) except ValueError: pass return self.per(rep, gens=rep.ring.symbols) def replace(self, x, y=None): """ Replace ``x`` with ``y`` in generators list. Examples ======== >>> Poly(x**2 + 1, x).replace(x, y) Poly(y**2 + 1, y, domain='ZZ') """ if y is None: if self.is_univariate: x, y = self.gen, x else: raise PolynomialError( 'syntax supported only in univariate case') if x == y: return self if x in self.gens and y not in self.gens: dom = self.domain if not dom.is_Composite or y not in dom.symbols: gens = list(self.gens) gens[gens.index(x)] = y rep = dom.poly_ring(*gens).from_dict(dict(self.rep)) return self.per(rep, gens=gens) raise PolynomialError(f"can't replace {x} with {y} in {self}") def reorder(self, *gens, **args): """ Efficiently apply new order of generators. Examples ======== >>> Poly(x**2 + x*y**2, x, y).reorder(y, x) Poly(y**2*x + x**2, y, x, domain='ZZ') """ opt = options.Options((), args) if not gens: gens = _sort_gens(self.gens, opt=opt) elif set(self.gens) != set(gens): raise PolynomialError( 'generators list can differ only up to order of elements') rep = dict(zip(*_dict_reorder(dict(self.rep), self.gens, gens))) newring = self.domain.poly_ring(*gens) rep = newring.from_dict(rep) return self.per(rep, gens=gens) def has_only_gens(self, *gens): """ Return ``True`` if ``Poly(f, *gens)`` retains ground domain. Examples ======== >>> Poly(x*y + 1, x, y, z).has_only_gens(x, y) True >>> Poly(x*y + z, x, y, z).has_only_gens(x, y) False """ indices = set() for gen in gens: try: index = self.gens.index(gen) except ValueError: raise GeneratorsError( f"{self} doesn't have {gen} as generator") else: indices.add(index) for monom in self.monoms(): for i, elt in enumerate(monom): if i not in indices and elt: return False return True def to_ring(self): """ Make the ground domain a ring. Examples ======== >>> Poly(x**2 + 1, field=True).to_ring() Poly(x**2 + 1, x, domain='ZZ') """ return self.set_domain(self.domain.ring) def to_field(self): """ Make the ground domain a field. Examples ======== >>> Poly(x**2 + 1).to_field() Poly(x**2 + 1, x, domain='QQ') """ return self.set_domain(self.domain.field) def to_exact(self): """ Make the ground domain exact. Examples ======== >>> Poly(x**2 + 1.0).to_exact() Poly(x**2 + 1, x, domain='QQ') """ return self.set_domain(self.domain.get_exact()) def retract(self, field=None): """ Recalculate the ground domain of a polynomial. Examples ======== >>> f = Poly(x**2 + 1, domain=QQ.inject(y)) >>> f Poly(x**2 + 1, x, domain='QQ[y]') >>> f.retract() Poly(x**2 + 1, x, domain='ZZ') >>> f.retract(field=True) Poly(x**2 + 1, x, domain='QQ') """ dom, rep = construct_domain(self.as_dict(), field=field, composite=self.domain.is_Composite or None, extension=False if self.domain.is_ExpressionDomain else True) return self.from_dict(rep, self.gens, domain=dom) def slice(self, x, m, n=None): """Take a continuous subsequence of terms of ``self``.""" if n is None: j, m, n = 0, x, m else: j = self._gen_to_level(x) m, n = int(m), int(n) result = self.rep.slice(m, n, j) return self.per(result) def coeffs(self, order=None): """ Returns all non-zero coefficients from ``self`` in lex order. Examples ======== >>> Poly(x**3 + 2*x + 3, x).coeffs() [1, 2, 3] See Also ======== all_coeffs coeff_monomial """ return [self.domain.to_expr(c) for c in self.rep.coeffs(order=order)] def monoms(self, order=None): """ Returns all non-zero monomials from ``self`` in lex order. Examples ======== >>> Poly(x**2 + 2*x*y**2 + x*y + 3*y, x, y).monoms() [(2, 0), (1, 2), (1, 1), (0, 1)] """ return self.rep.monoms(order=order) def terms(self, order=None): """ Returns all non-zero terms from ``self`` in lex order. Examples ======== >>> Poly(x**2 + 2*x*y**2 + x*y + 3*y, x, y).terms() [((2, 0), 1), ((1, 2), 2), ((1, 1), 1), ((0, 1), 3)] """ return [(m, self.domain.to_expr(c)) for m, c in self.rep.terms(order=order)] def all_coeffs(self): """ Returns all coefficients from a univariate polynomial ``self``. Examples ======== >>> Poly(x**3 + 2*x - 1, x).all_coeffs() [1, 0, 2, -1] """ return [self.domain.to_expr(c) for c in self.rep.all_coeffs()] def termwise(self, func, *gens, **args): """ Apply a function to all terms of ``self``. Examples ======== >>> def func(k, coeff): ... k = k[0] ... return coeff//10**(2-k) >>> Poly(x**2 + 20*x + 400).termwise(func) Poly(x**2 + 2*x + 4, x, domain='ZZ') """ terms = {} for monom, coeff in self.terms(): result = func(monom, coeff) if isinstance(result, tuple): monom, coeff = result else: coeff = result if coeff: if monom not in terms: terms[monom] = coeff else: raise PolynomialError(f'{monom} monomial was generated twice') return self.from_dict(terms, *(gens or self.gens), **args) def length(self): """ Returns the number of non-zero terms in ``self``. Examples ======== >>> Poly(x**2 + 2*x - 1).length() 3 """ return len(self.as_dict()) def as_dict(self, native=False): """ Switch to a :class:`dict` representation. Examples ======== >>> Poly(x**2 + 2*x*y**2 - y, x, y).as_dict() {(0, 1): -1, (1, 2): 2, (2, 0): 1} """ if native: return dict(self.rep) else: return {k: self.domain.to_expr(v) for k, v in self.rep.items()} def as_expr(self, *gens): """ Convert a Poly instance to an Expr instance. Examples ======== >>> f = Poly(x**2 + 2*x*y**2 - y, x, y) >>> f.as_expr() x**2 + 2*x*y**2 - y >>> f.as_expr({x: 5}) 10*y**2 - y + 25 >>> f.as_expr(5, 6) 379 """ if not gens: gens = self.gens elif len(gens) == 1 and isinstance(gens[0], dict): mapping = gens[0] gens = list(self.gens) for gen, value in mapping.items(): try: index = gens.index(gen) except ValueError: raise GeneratorsError( f"{self} doesn't have {gen} as generator") else: gens[index] = value return self.rep.as_expr(*gens) def deflate(self): """ Reduce degree of ``self`` by mapping ``x_i**m`` to ``y_i``. Examples ======== >>> Poly(x**6*y**2 + x**3 + 1, x, y).deflate() ((3, 2), Poly(x**2*y + x + 1, x, y, domain='ZZ')) """ J, result = self.rep.deflate() return J, self.per(result[0]) def inject(self, front=False): """ Inject ground domain generators into ``self``. Examples ======== >>> f = Poly(x**2*y + x*y**3 + x*y + 1, x) >>> f.inject() Poly(x**2*y + x*y**3 + x*y + 1, x, y, domain='ZZ') >>> f.inject(front=True) Poly(y**3*x + y*x**2 + y*x + 1, y, x, domain='ZZ') """ result = self.rep.inject(front=front) return self.new(result, *result.ring.symbols) def eject(self, *gens): """ Eject selected generators into the ground domain. Examples ======== >>> f = Poly(x**2*y + x*y**3 + x*y + 1, x, y) >>> f.eject(x) Poly(x*y**3 + (x**2 + x)*y + 1, y, domain='ZZ[x]') >>> f.eject(y) Poly(y*x**2 + (y**3 + y)*x + 1, x, domain='ZZ[y]') """ dom = self.domain if not dom.is_Numerical: raise DomainError(f"can't eject generators over {dom}") result = self.rep.copy() result = result.eject(*gens) return self.new(result, *result.ring.symbols) def terms_gcd(self): """ Remove GCD of terms from the polynomial ``self``. Examples ======== >>> Poly(x**6*y**2 + x**3*y, x, y).terms_gcd() ((3, 1), Poly(x**3*y + 1, x, y, domain='ZZ')) """ J, result = self.rep.terms_gcd() return J, self.per(result) def quo_ground(self, coeff): """ Quotient of ``self`` by a an element of the ground domain. Examples ======== >>> Poly(2*x + 4).quo_ground(2) Poly(x + 2, x, domain='ZZ') >>> Poly(2*x + 3).quo_ground(2) Poly(x + 1, x, domain='ZZ') """ result = self.rep.quo_ground(coeff) return self.per(result) def exquo_ground(self, coeff): """ Exact quotient of ``self`` by a an element of the ground domain. Examples ======== >>> Poly(2*x + 4).exquo_ground(2) Poly(x + 2, x, domain='ZZ') >>> Poly(2*x + 3).exquo_ground(2) Traceback (most recent call last): ... ExactQuotientFailed: 2 does not divide 3 in ZZ """ result = self.rep.exquo_ground(coeff) return self.per(result) def prem(self, other): """ Polynomial pseudo-remainder of ``self`` by ``other``. Examples ======== >>> Poly(x**2 + 1, x).prem(Poly(2*x - 4, x)) Poly(20, x, domain='ZZ') """ _, per, F, G = self._unify(other) result = F.prem(G) return per(result) def div(self, other, auto=True): """ Polynomial division with remainder of ``self`` by ``other``. Examples ======== >>> Poly(x**2 + 1, x).div(Poly(2*x - 4, x)) (Poly(1/2*x + 1, x, domain='QQ'), Poly(5, x, domain='QQ')) >>> Poly(x**2 + 1, x).div(Poly(2*x - 4, x), auto=False) (Poly(0, x, domain='ZZ'), Poly(x**2 + 1, x, domain='ZZ')) """ dom, per, F, G = self._unify(other) retract = False if auto and dom.is_Ring and not dom.is_Field: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) retract = True q, r = divmod(F, G) if retract: try: Q, R = q.set_domain(q.ring.domain.ring), r.set_domain(r.ring.domain.ring) except CoercionFailed: pass else: q, r = Q, R return per(q), per(r) def rem(self, other, auto=True): """ Computes the polynomial remainder of ``self`` by ``other``. Examples ======== >>> Poly(x**2 + 1, x).rem(Poly(2*x - 4, x)) Poly(5, x, domain='ZZ') >>> Poly(x**2 + 1, x).rem(Poly(2*x - 4, x), auto=False) Poly(x**2 + 1, x, domain='ZZ') """ dom, per, F, G = self._unify(other) retract = False if auto and dom.is_Ring and not dom.is_Field: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) retract = True r = F % G if retract: try: r = r.set_domain(r.ring.domain.ring) except CoercionFailed: pass return per(r) def quo(self, other, auto=True): """ Computes polynomial quotient of ``self`` by ``other``. Examples ======== >>> Poly(x**2 + 1, x).quo(Poly(2*x - 4, x)) Poly(1/2*x + 1, x, domain='QQ') >>> Poly(x**2 - 1, x).quo(Poly(x - 1, x)) Poly(x + 1, x, domain='ZZ') """ dom, per, F, G = self._unify(other) retract = False if auto and dom.is_Ring and not dom.is_Field: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) retract = True q = F // G if retract: try: q = q.set_domain(q.ring.domain.ring) except CoercionFailed: pass return per(q) def exquo(self, other, auto=True): """ Computes polynomial exact quotient of ``self`` by ``other``. Examples ======== >>> Poly(x**2 - 1, x).exquo(Poly(x - 1, x)) Poly(x + 1, x, domain='ZZ') >>> Poly(x**2 + 1, x).exquo(Poly(2*x - 4, x)) Traceback (most recent call last): ... ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1 """ dom, per, F, G = self._unify(other) retract = False if auto and dom.is_Ring and not dom.is_Field: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) retract = True try: q = F.exquo(G) except ExactQuotientFailed as exc: raise exc.new(self.as_expr(), other.as_expr()) if retract: try: q = q.set_domain(q.ring.domain.ring) except CoercionFailed: pass return per(q) def _gen_to_level(self, gen): """Returns level associated with the given generator.""" try: return self.rep.ring.index(gen) except ValueError: raise PolynomialError(f'a valid generator expected, got {gen}') def degree(self, gen=0): """ Returns degree of ``self`` in ``x_j``. The degree of 0 is negative infinity. Examples ======== >>> Poly(x**2 + y*x + 1, x, y).degree() 2 >>> Poly(x**2 + y*x + y, x, y).degree(y) 1 >>> Poly(0, x).degree() -oo """ j = self._gen_to_level(gen) return self.rep.degree(j) def degree_list(self): """ Returns a list of degrees of ``self``. Examples ======== >>> Poly(x**2 + y*x + 1, x, y).degree_list() (2, 1) """ return self.rep.degree_list() def total_degree(self): """ Returns the total degree of ``self``. Examples ======== >>> Poly(x**2 + y*x + 1, x, y).total_degree() 2 >>> Poly(x + y**5, x, y).total_degree() 5 """ return self.rep.total_degree() def LC(self, order=None): """ Returns the leading coefficient of ``self``. Examples ======== >>> Poly(4*x**3 + 2*x**2 + 3*x, x).LC() 4 """ if order is not None: return self.coeffs(order)[0] result = self.rep.LC return self.domain.to_expr(result) def TC(self): """ Returns the trailing coefficient of ``self``. Examples ======== >>> Poly(x**3 + 2*x**2 + 3*x, x).TC() 0 """ result = self.rep.ring.dmp_ground_TC(self.rep) return self.domain.to_expr(result) def EC(self, order=None): """ Returns the last non-zero coefficient of ``self``. Examples ======== >>> Poly(x**3 + 2*x**2 + 3*x, x).EC() 3 """ EM = self.EM(order) return self.coeff_monomial(tuple(EM)) def coeff_monomial(self, monom): """ Returns the coefficient of ``monom`` in ``self`` if there, else None. Examples ======== >>> p = Poly(24*x*y*exp(8) + 23*x, x, y) >>> p.coeff_monomial(x) 23 >>> p.coeff_monomial(y) 0 >>> p.coeff_monomial(x*y) 24*E**8 >>> p.coeff_monomial((1, 1)) 24*E**8 Note that ``Expr.coeff()`` behaves differently, collecting terms if possible; the Poly must be converted to an Expr to use that method, however: >>> p.as_expr().coeff(x) 24*E**8*y + 23 >>> p.as_expr().coeff(y) 24*E**8*x >>> p.as_expr().coeff(x*y) 24*E**8 """ N = Monomial(monom, self.gens) if len(N) != len(self.gens): raise ValueError('exponent of each generator must be specified') result = self.rep.coeff(N) return self.domain.to_expr(result) def coeff(self, x, n=1, right=False): # the semantics of coeff_monomial and Expr.coeff are different; # if someone is working with a Poly, they should be aware of the # differences and chose the method best suited for the query. # Alternatively, a pure-polys method could be written here but # at this time the ``right`` keyword would be ignored because Poly # doesn't work with non-commutatives. raise NotImplementedError( 'Either convert to Expr with `as_expr` method ' "to use Expr's coeff method or else use the " '`coeff_monomial` method of Polys.') def LM(self, order=None): """ Returns the leading monomial of ``self``. The leading monomial signifies the the monomial having the highest power of the principal generator in the polynomial expression. Examples ======== >>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).LM() x**2*y**0 """ LM = (0,)*len(self.gens) if self.is_zero else self.monoms(order)[0] return Monomial(LM, self.gens) def EM(self, order=None): """ Returns the last non-zero monomial of ``self``. Examples ======== >>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).EM() x**0*y**1 """ EM = (0,)*len(self.gens) if self.is_zero else self.monoms(order)[-1] return Monomial(EM, self.gens) def LT(self, order=None): """ Returns the leading term of ``self``. The leading term signifies the term having the highest power of the principal generator in the polynomial expression. Examples ======== >>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).LT() (x**2*y**0, 4) """ LM = self.LM(order) return LM, self.coeff_monomial(tuple(LM)) def ET(self, order=None): """ Returns the last non-zero term of ``self``. Examples ======== >>> Poly(4*x**2 + 2*x*y**2 + x*y + 3*y, x, y).ET() (x**0*y**1, 3) """ EM = self.EM(order) return EM, self.coeff_monomial(tuple(EM)) def clear_denoms(self, convert=False): """ Clear denominators, but keep the ground domain. Examples ======== >>> f = Poly(x/2 + Rational(1, 3)) >>> f.clear_denoms() (6, Poly(3*x + 2, x, domain='QQ')) >>> f.clear_denoms(convert=True) (6, Poly(3*x + 2, x, domain='ZZ')) """ dom = self.domain if convert and dom.has_assoc_Ring: dom = self.domain.ring coeff, result = self.rep.clear_denoms(convert=convert) f = self.per(result) if convert: f = f.set_domain(dom) return dom.to_expr(coeff), f def rat_clear_denoms(self, other): """ Clear denominators in a rational function ``self/other``. Examples ======== >>> f = Poly(x**2/y + 1, x) >>> g = Poly(x**3 + y, x) >>> p, q = f.rat_clear_denoms(g) >>> p Poly(x**2 + y, x, domain='ZZ[y]') >>> q Poly(y*x**3 + y**2, x, domain='ZZ[y]') """ f, g = self, other dom, per, f, g = f._unify(g) f = per(f) g = per(g) if not (dom.is_Field and dom.has_assoc_Ring): return f, g a, f = f.clear_denoms(convert=True) b, g = g.clear_denoms(convert=True) f *= b g *= a return f, g def integrate(self, *specs, **args): """ Computes indefinite integral of ``self``. Examples ======== >>> Poly(x**2 + 2*x + 1, x).integrate() Poly(1/3*x**3 + x**2 + x, x, domain='QQ') >>> Poly(x*y**2 + x, x, y).integrate((0, 1), (1, 0)) Poly(1/2*x**2*y**2 + 1/2*x**2, x, y, domain='QQ') """ f = self if args.get('auto', True) and f.domain.is_Ring: f = f.to_field() if not specs: return f.per(f.rep.integrate(m=1)) rep = f.rep for spec in specs: if type(spec) is tuple: gen, m = spec else: gen, m = spec, 1 rep = rep.integrate(f._gen_to_level(gen), int(m)) return f.per(rep) def diff(self, *specs, **kwargs): """ Computes partial derivative of ``self``. Examples ======== >>> Poly(x**2 + 2*x + 1, x).diff() Poly(2*x + 2, x, domain='ZZ') >>> Poly(x*y**2 + x, x, y).diff((0, 0), (1, 1)) Poly(2*x*y, x, y, domain='ZZ') """ if not kwargs.get('evaluate', True): return Derivative(self, *specs, **kwargs) if not specs: return self.per(self.rep.diff()) rep = self.rep for spec in specs: if type(spec) is tuple: gen, m = spec else: gen, m = spec, 1 rep = rep.diff(self._gen_to_level(gen), int(m)) return self.per(rep) _eval_derivative = diff def eval(self, x, a=None, auto=True): """ Evaluate ``self`` at ``a`` in the given variable. Examples ======== >>> Poly(x**2 + 2*x + 3, x).eval(2) 11 >>> Poly(2*x*y + 3*x + y + 2, x, y).eval(x, 2) Poly(5*y + 8, y, domain='ZZ') >>> f = Poly(2*x*y + 3*x + y + 2*z, x, y, z) >>> f.eval({x: 2}) Poly(5*y + 2*z + 6, y, z, domain='ZZ') >>> f.eval({x: 2, y: 5}) Poly(2*z + 31, z, domain='ZZ') >>> f.eval({x: 2, y: 5, z: 7}) 45 >>> f.eval((2, 5)) Poly(2*z + 31, z, domain='ZZ') >>> f(2, 5) Poly(2*z + 31, z, domain='ZZ') """ f = self if a is None: if isinstance(x, dict): mapping = x for gen, value in mapping.items(): f = f.eval(gen, value) return f elif isinstance(x, (tuple, list)): values = x if len(values) > len(f.gens): raise ValueError('too many values provided') for gen, value in zip(f.gens, values): f = f.eval(gen, value) return f else: j, a = 0, x else: j = f._gen_to_level(x) try: result = f.rep.eval(j, a) except CoercionFailed: if not auto: raise DomainError(f"can't evaluate at {a} in {f.domain}") else: a_domain, [a] = construct_domain([a]) new_domain = f.domain.unify(a_domain, f.gens) f = f.set_domain(new_domain) a = new_domain.convert(a, a_domain) result = f.rep.eval(j, a) return f.per(result, remove=j) def __call__(self, *values): """ Evaluate ``self`` at the give values. Examples ======== >>> f = Poly(2*x*y + 3*x + y + 2*z, x, y, z) >>> f(2) Poly(5*y + 2*z + 6, y, z, domain='ZZ') >>> f(2, 5) Poly(2*z + 31, z, domain='ZZ') >>> f(2, 5, 7) 45 """ return self.eval(values) def half_gcdex(self, other, auto=True): """ Half extended Euclidean algorithm of ``self`` and ``other``. Returns ``(s, h)`` such that ``h = gcd(f, g)`` and ``s*f = h (mod g)``. Examples ======== >>> f = x**4 - 2*x**3 - 6*x**2 + 12*x + 15 >>> g = x**3 + x**2 - 4*x - 4 >>> Poly(f).half_gcdex(Poly(g)) (Poly(-1/5*x + 3/5, x, domain='QQ'), Poly(x + 1, x, domain='QQ')) """ dom, per, F, G = self._unify(other) if auto and dom.is_Ring: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) s, h = F.half_gcdex(G) return per(s), per(h) def gcdex(self, other, auto=True): """ Extended Euclidean algorithm of ``self`` and ``other``. Returns ``(s, t, h)`` such that ``h = gcd(f, g)`` and ``s*f + t*g = h``. Examples ======== >>> f = x**4 - 2*x**3 - 6*x**2 + 12*x + 15 >>> g = x**3 + x**2 - 4*x - 4 >>> Poly(f).gcdex(Poly(g)) (Poly(-1/5*x + 3/5, x, domain='QQ'), Poly(1/5*x**2 - 6/5*x + 2, x, domain='QQ'), Poly(x + 1, x, domain='QQ')) """ dom, per, F, G = self._unify(other) if auto and dom.is_Ring: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) s, t, h = F.gcdex(G) return per(s), per(t), per(h) def invert(self, other, auto=True): """ Invert ``self`` modulo ``other`` when possible. Examples ======== >>> Poly(x**2 - 1, x).invert(Poly(2*x - 1, x)) Poly(-4/3, x, domain='QQ') >>> Poly(x**2 - 1, x).invert(Poly(x - 1, x)) Traceback (most recent call last): ... NotInvertible: zero divisor """ dom, per, F, G = self._unify(other) if auto and dom.is_Ring: F, G = F.set_domain(F.ring.domain.field), G.set_domain(G.ring.domain.field) result = F.ring.invert(F, G) return per(result) def subresultants(self, other): """ Computes the subresultant PRS of ``self`` and ``other``. Examples ======== >>> Poly(x**2 + 1, x).subresultants(Poly(x**2 - 1, x)) [Poly(x**2 + 1, x, domain='ZZ'), Poly(x**2 - 1, x, domain='ZZ'), Poly(-2, x, domain='ZZ')] """ _, per, F, G = self._unify(other) result = F.subresultants(G) return list(map(per, result)) def resultant(self, other, includePRS=False): """ Computes the resultant of ``self`` and ``other`` via PRS. If includePRS=True, it includes the subresultant PRS in the result. Because the PRS is used to calculate the resultant, this is more efficient than calling :func:`subresultants` separately. Examples ======== >>> f = Poly(x**2 + 1, x) >>> f.resultant(Poly(x**2 - 1, x)) 4 >>> f.resultant(Poly(x**2 - 1, x), includePRS=True) (4, [Poly(x**2 + 1, x, domain='ZZ'), Poly(x**2 - 1, x, domain='ZZ'), Poly(-2, x, domain='ZZ')]) """ _, per, F, G = self._unify(other) if includePRS: result, R = F.resultant(G, includePRS=includePRS) return per(result, remove=0), list(map(per, R)) else: result = F.resultant(G) return per(result, remove=0) def discriminant(self): """ Computes the discriminant of ``self``. Examples ======== >>> Poly(x**2 + 2*x + 3, x).discriminant() -8 """ result = self.rep.discriminant() return self.per(result, remove=0) def dispersionset(self, other=None): r"""Compute the *dispersion set* of two polynomials. Examples ======== >>> sorted(Poly((x - 3)*(x + 3)).dispersionset()) [0, 6] See Also ======== dispersion diofant.polys.dispersion.dispersionset """ from .dispersion import dispersionset return dispersionset(self, other) def dispersion(self, other=None): r"""Compute the *dispersion* of polynomials. Examples ======== >>> Poly((x - 3)*(x + 3)).dispersion() 6 See Also ======== dispersionset diofant.polys.dispersion.dispersion References ========== * :cite:`Man1994disp` * :cite:`Koepf98` * :cite:`Abramov71rat` * :cite:`Man1993indefsum` """ from .dispersion import dispersion return dispersion(self, other) def cofactors(self, other): """ Returns the GCD of ``self`` and ``other`` and their cofactors. For two polynomials ``f`` and ``g`` it returns polynomials ``(h, cff, cfg)`` such that ``h = gcd(f, g)``, and ``cff = quo(f, h)`` and ``cfg = quo(g, h)`` are, so called, cofactors of ``f`` and ``g``. Examples ======== >>> Poly(x**2 - 1, x).cofactors(Poly(x**2 - 3*x + 2, x)) (Poly(x - 1, x, domain='ZZ'), Poly(x + 1, x, domain='ZZ'), Poly(x - 2, x, domain='ZZ')) """ _, per, F, G = self._unify(other) h, cff, cfg = F.cofactors(G) return per(h), per(cff), per(cfg) def gcd(self, other): """ Returns the polynomial GCD of ``self`` and ``other``. Examples ======== >>> Poly(x**2 - 1, x).gcd(Poly(x**2 - 3*x + 2, x)) Poly(x - 1, x, domain='ZZ') """ _, per, F, G = self._unify(other) result = F.gcd(G) return per(result) def lcm(self, other): """ Returns polynomial LCM of ``self`` and ``other``. Examples ======== >>> Poly(x**2 - 1, x).lcm(Poly(x**2 - 3*x + 2, x)) Poly(x**3 - 2*x**2 - x + 2, x, domain='ZZ') """ _, per, F, G = self._unify(other) result = F.lcm(G) return per(result) def trunc(self, p): """ Reduce ``self`` modulo a constant ``p``. Examples ======== >>> Poly(2*x**3 + 3*x**2 + 5*x + 7, x).trunc(3) Poly(-x**3 - x + 1, x, domain='ZZ') """ p = self.domain.convert(p) result = self.rep.trunc_ground(p) return self.per(result) def monic(self, auto=True): """ Divides all coefficients by ``LC(f)``. Examples ======== >>> Poly(3*x**2 + 6*x + 9).monic() Poly(x**2 + 2*x + 3, x, domain='QQ') >>> Poly(3*x**2 + 4*x + 2).monic() Poly(x**2 + 4/3*x + 2/3, x, domain='QQ') """ f = self if auto and f.domain.is_Ring: f = f.to_field() result = f.rep.monic() return f.per(result) def content(self): """ Returns the GCD of polynomial coefficients. Examples ======== >>> Poly(6*x**2 + 8*x + 12, x).content() 2 """ result = self.rep.content() return self.domain.to_expr(result) def primitive(self): """ Returns the content and a primitive form of ``self``. Examples ======== >>> Poly(2*x**2 + 8*x + 12, x).primitive() (2, Poly(x**2 + 4*x + 6, x, domain='ZZ')) """ cont, result = self.rep.primitive() return self.domain.to_expr(cont), self.per(result) def compose(self, other): """ Computes the functional composition of ``self`` and ``other``. Examples ======== >>> Poly(x**2 + x, x).compose(Poly(x - 1, x)) Poly(x**2 - x, x, domain='ZZ') """ _, per, F, G = self._unify(other) result = F.compose(G.ring.gens[0], G) return per(result) def decompose(self): """ Computes a functional decomposition of ``self``. Examples ======== >>> Poly(x**4 + 2*x**3 - x - 1).decompose() [Poly(x**2 - x - 1, x, domain='ZZ'), Poly(x**2 + x, x, domain='ZZ')] """ result = self.rep.decompose() return list(map(self.per, result)) def shift(self, a): """ Efficiently compute Taylor shift ``f(x + a)``. Examples ======== >>> Poly(x**2 - 2*x + 1, x).shift(2) Poly(x**2 + 2*x + 1, x, domain='ZZ') """ result = self.rep.shift(a) return self.per(result) def sqf_norm(self): """ Computes square-free norm of ``self``. Returns ``s``, ``f``, ``r``, such that ``g(x) = f(x-sa)`` and ``r(x) = Norm(g(x))`` is a square-free polynomial over ``K``, where ``a`` is the algebraic extension of the ground domain. Examples ======== >>> s, f, r = Poly(x**2 + 1, x, extension=[sqrt(3)]).sqf_norm() >>> s 1 >>> f Poly(x**2 - 2*sqrt(3)*x + 4, x, domain='QQ<sqrt(3)>') >>> r Poly(x**4 - 4*x**2 + 16, x, domain='QQ') """ s, g, r = self.rep.sqf_norm() return s, self.per(g), self.per(r) def sqf_part(self): """ Computes square-free part of ``self``. Examples ======== >>> Poly(x**3 - 3*x - 2, x).sqf_part() Poly(x**2 - x - 2, x, domain='ZZ') """ result = self.rep.sqf_part() return self.per(result) def sqf_list(self): """ Returns a list of square-free factors of ``self``. Examples ======== >>> f = 2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16 >>> Poly(f).sqf_list() (2, [(Poly(x + 1, x, domain='ZZ'), 2), (Poly(x + 2, x, domain='ZZ'), 3)]) """ coeff, factors = self.rep.sqf_list() return (self.domain.to_expr(coeff), [(self.per(g), k) for g, k in factors]) def factor_list(self): """ Returns a list of irreducible factors of ``self``. Examples ======== >>> f = 2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y >>> Poly(f).factor_list() (2, [(Poly(x + y, x, y, domain='ZZ'), 1), (Poly(x**2 + 1, x, y, domain='ZZ'), 2)]) """ try: coeff, factors = self.rep.factor_list() except DomainError: return Integer(1), [(self, 1)] return (self.domain.to_expr(coeff), [(self.per(g), k) for g, k in factors]) def count_roots(self, inf=None, sup=None): """ Return the number of roots of ``self`` in ``[inf, sup]`` interval. Examples ======== >>> Poly(x**4 - 4, x).count_roots(-3, 3) 2 >>> Poly(x**4 - 4, x).count_roots(0, 1 + 3*I) 1 """ inf_real, sup_real = True, True if inf is not None: inf = sympify(inf) if inf == -oo: inf = None else: re, im = inf.as_real_imag() if not im: inf = QQ.convert(inf) else: inf, inf_real = tuple(map(QQ.convert, (re, im))), False if sup is not None: sup = sympify(sup) if sup is oo: sup = None else: re, im = sup.as_real_imag() if not im: sup = QQ.convert(sup) else: sup, sup_real = tuple(map(QQ.convert, (re, im))), False if inf_real and sup_real: count = self.rep.ring.dup_count_real_roots(self.rep, inf=inf, sup=sup) else: if inf_real and inf is not None: inf = (inf, QQ.zero) if sup_real and sup is not None: sup = (sup, QQ.zero) count = self.rep.ring.dup_count_complex_roots(self.rep, inf=inf, sup=sup) return Integer(count) def root(self, index, radicals=True): """ Get an indexed root of a polynomial. Examples ======== >>> f = Poly(2*x**3 - 7*x**2 + 4*x + 4) >>> f.root(0) -1/2 >>> f.root(1) 2 >>> f.root(2) 2 >>> f.root(3) Traceback (most recent call last): ... IndexError: root index out of [-3, 2] range, got 3 >>> Poly(x**5 + x + 1).root(0) RootOf(x**3 - x**2 + 1, 0) """ from .rootoftools import RootOf return RootOf(self, index, radicals=radicals) def real_roots(self, multiple=True, radicals=True): """ Return a list of real roots with multiplicities. Examples ======== >>> Poly(2*x**3 - 7*x**2 + 4*x + 4).real_roots() [-1/2, 2, 2] >>> Poly(x**3 + x + 1).real_roots() [RootOf(x**3 + x + 1, 0)] """ from .rootoftools import RootOf reals = RootOf.real_roots(self, radicals=radicals) if multiple: return reals else: return group(reals, multiple=False) def all_roots(self, multiple=True, radicals=True): """ Return a list of real and complex roots with multiplicities. Examples ======== >>> Poly(2*x**3 - 7*x**2 + 4*x + 4).all_roots() [-1/2, 2, 2] >>> Poly(x**3 + x + 1).all_roots() [RootOf(x**3 + x + 1, 0), RootOf(x**3 + x + 1, 1), RootOf(x**3 + x + 1, 2)] """ from .rootoftools import RootOf roots = RootOf.all_roots(self, radicals=radicals) if multiple: return roots else: return group(roots, multiple=False) def nroots(self, n=15, maxsteps=50, cleanup=True): """ Compute numerical approximations of roots of ``self``. Parameters ========== n ... the number of digits to calculate maxsteps ... the maximum number of iterations to do If the accuracy `n` cannot be reached in `maxsteps`, it will raise an exception. You need to rerun with higher maxsteps. Examples ======== >>> Poly(x**2 - 3).nroots(n=15) [-1.73205080756888, 1.73205080756888] >>> Poly(x**2 - 3).nroots(n=30) [-1.73205080756887729352744634151, 1.73205080756887729352744634151] """ if self.is_multivariate: raise MultivariatePolynomialError( f"can't compute numerical roots of {self}") if self.degree() <= 0: return [] # For integer and rational coefficients, convert them to integers only # (for accuracy). Otherwise just try to convert the coefficients to # mpmath.mpc and raise an exception if the conversion fails. if self.domain is ZZ: coeffs = [int(coeff) for coeff in self.all_coeffs()] elif self.domain is QQ: denoms = [coeff.denominator for coeff in self.all_coeffs()] from ..core import ilcm fac = ilcm(*denoms) coeffs = [int(coeff*fac) for coeff in self.all_coeffs()] else: coeffs = [coeff.evalf(n, strict=False).as_real_imag() for coeff in self.all_coeffs()] try: coeffs = [mpmath.mpc(*coeff) for coeff in coeffs] except TypeError: raise DomainError(f'Numerical domain expected, got {self.domain}') dps = mpmath.mp.dps mpmath.mp.dps = n try: # We need to add extra precision to guard against losing accuracy. # 10 times the degree of the polynomial seems to work well. roots = mpmath.polyroots(coeffs, maxsteps=maxsteps, cleanup=cleanup, error=False, extraprec=self.degree()*10) # Mpmath puts real roots first, then complex ones (as does all_roots) # so we make sure this convention holds here, too. roots = list(map(sympify, sorted(roots, key=lambda r: (1 if r.imag else 0, r.real, r.imag)))) except mpmath.libmp.NoConvergence: raise mpmath.libmp.NoConvergence( f'convergence to root failed; try n < {n} or maxsteps > {maxsteps}') finally: mpmath.mp.dps = dps return roots def cancel(self, other, include=False): """ Cancel common factors in a rational function ``self/other``. Examples ======== >>> Poly(2*x**2 - 2, x).cancel(Poly(x**2 - 2*x + 1, x)) (1, Poly(2*x + 2, x, domain='ZZ'), Poly(x - 1, x, domain='ZZ')) >>> Poly(2*x**2 - 2, x).cancel(Poly(x**2 - 2*x + 1, x), include=True) (Poly(2*x + 2, x, domain='ZZ'), Poly(x - 1, x, domain='ZZ')) """ dom, per, F, G = self._unify(other) result = F.cancel(G, include=include) if not include: if dom.has_assoc_Ring: dom = dom.ring cp, cq, p, q = result cp = dom.to_expr(cp) cq = dom.to_expr(cq) return cp/cq, per(p), per(q) else: return tuple(map(per, result)) @property def is_zero(self): """ Returns ``True`` if ``self`` is a zero polynomial. Examples ======== >>> Poly(0, x).is_zero True >>> Poly(1, x).is_zero False """ return self.rep.is_zero @property def is_one(self): """ Returns ``True`` if ``self`` is a unit polynomial. Examples ======== >>> Poly(0, x).is_one False >>> Poly(1, x).is_one True """ return self.rep.is_one @property def is_squarefree(self): """ Returns ``True`` if ``self`` is a square-free polynomial. Examples ======== >>> Poly(x**2 - 2*x + 1, x).is_squarefree False >>> Poly(x**2 - 1, x).is_squarefree True """ return self.rep.is_squarefree @property def is_monic(self): """ Returns ``True`` if the leading coefficient of ``self`` is one. Examples ======== >>> Poly(x + 2, x).is_monic True >>> Poly(2*x + 2, x).is_monic False """ return self.rep.is_monic @property def is_primitive(self): """ Returns ``True`` if GCD of the coefficients of ``self`` is one. Examples ======== >>> Poly(2*x**2 + 6*x + 12, x).is_primitive False >>> Poly(x**2 + 3*x + 6, x).is_primitive True """ return self.rep.is_primitive @property def is_ground(self): """ Returns ``True`` if ``self`` is an element of the ground domain. Examples ======== >>> Poly(x, x).is_ground False >>> Poly(2, x).is_ground True >>> Poly(y, x).is_ground True """ return self.rep.is_ground @property def is_linear(self): """ Returns ``True`` if ``self`` is linear in all its variables. Examples ======== >>> Poly(x + y + 2, x, y).is_linear True >>> Poly(x*y + 2, x, y).is_linear False """ return self.rep.is_linear @property def is_quadratic(self): """ Returns ``True`` if ``self`` is quadratic in all its variables. Examples ======== >>> Poly(x*y + 2, x, y).is_quadratic True >>> Poly(x*y**2 + 2, x, y).is_quadratic False """ return self.rep.is_quadratic @property def is_term(self): """ Returns ``True`` if ``self`` is zero or has only one term. Examples ======== >>> Poly(3*x**2, x).is_term True >>> Poly(3*x**2 + 1, x).is_term False """ return self.rep.is_term @property def is_homogeneous(self): """ Returns ``True`` if ``self`` is a homogeneous polynomial. A homogeneous polynomial is a polynomial whose all monomials with non-zero coefficients have the same total degree. Examples ======== >>> Poly(x**2 + x*y, x, y).is_homogeneous True >>> Poly(x**3 + x*y, x, y).is_homogeneous False """ return self.rep.is_homogeneous @property def is_irreducible(self): """ Returns ``True`` if ``self`` has no factors over its domain. Examples ======== >>> Poly(x**2 + x + 1, x, modulus=2).is_irreducible True >>> Poly(x**2 + 1, x, modulus=2).is_irreducible False """ return self.rep.is_irreducible @property def is_univariate(self): """ Returns ``True`` if ``self`` is a univariate polynomial. Examples ======== >>> Poly(x**2 + x + 1, x).is_univariate True >>> Poly(x*y**2 + x*y + 1, x, y).is_univariate False >>> Poly(x*y**2 + x*y + 1, x).is_univariate True >>> Poly(x**2 + x + 1, x, y).is_univariate False """ return len(self.gens) == 1 @property def is_multivariate(self): """ Returns ``True`` if ``self`` is a multivariate polynomial. Examples ======== >>> Poly(x**2 + x + 1, x).is_multivariate False >>> Poly(x*y**2 + x*y + 1, x, y).is_multivariate True >>> Poly(x*y**2 + x*y + 1, x).is_multivariate False >>> Poly(x**2 + x + 1, x, y).is_multivariate True """ return len(self.gens) != 1 @property def is_cyclotomic(self): """ Returns ``True`` if ``self`` is a cyclotomic polynomial. Examples ======== >>> f = x**16 + x**14 - x**10 + x**8 - x**6 + x**2 + 1 >>> Poly(f).is_cyclotomic False >>> g = x**16 + x**14 - x**10 - x**8 - x**6 + x**2 + 1 >>> Poly(g).is_cyclotomic True """ return self.rep.is_cyclotomic def __abs__(self): """ Make all coefficients in ``self`` positive. Examples ======== >>> abs(Poly(x**2 - 1, x)) Poly(x**2 + 1, x, domain='ZZ') """ result = abs(self.rep) return self.per(result) def __neg__(self): """ Negate all coefficients in ``self``. Examples ======== >>> -Poly(x**2 - 1, x) Poly(-x**2 + 1, x, domain='ZZ') """ result = -self.rep return self.per(result) @_sympifyit('other', NotImplemented) def __add__(self, other): if not other.is_Poly: try: other = self.__class__(other, *self.gens) except PolynomialError: return self.as_expr() + other _, per, F, G = self._unify(other) result = F + G return per(result) @_sympifyit('other', NotImplemented) def __radd__(self, other): try: other = self.__class__(other, *self.gens) except PolynomialError: return other + self.as_expr() return other + self @_sympifyit('other', NotImplemented) def __sub__(self, other): if not other.is_Poly: try: other = self.__class__(other, *self.gens) except PolynomialError: return self.as_expr() - other _, per, F, G = self._unify(other) result = F - G return per(result) @_sympifyit('other', NotImplemented) def __rsub__(self, other): try: other = self.__class__(other, *self.gens) except PolynomialError: return other - self.as_expr() return other - self @_sympifyit('other', NotImplemented) def __mul__(self, other): if not other.is_Poly: try: other = self.__class__(other, *self.gens) except PolynomialError: return self.as_expr()*other _, per, F, G = self._unify(other) result = F * G return per(result) @_sympifyit('other', NotImplemented) def __rmul__(self, other): try: other = self.__class__(other, *self.gens) except PolynomialError: return other*self.as_expr() return other*self @_sympifyit('n', NotImplemented) def __pow__(self, n): if n.is_Integer and n >= 0: n = int(n) result = self.rep**n return self.per(result) else: return self.as_expr()**n @_sympifyit('other', NotImplemented) def __divmod__(self, other): if not other.is_Poly: other = self.__class__(other, *self.gens) return self.div(other) @_sympifyit('other', NotImplemented) def __rdivmod__(self, other): other = self.__class__(other, *self.gens) return other.div(self) @_sympifyit('other', NotImplemented) def __mod__(self, other): if not other.is_Poly: other = self.__class__(other, *self.gens) return self.rem(other) @_sympifyit('other', NotImplemented) def __rmod__(self, other): other = self.__class__(other, *self.gens) return other.rem(self) @_sympifyit('other', NotImplemented) def __floordiv__(self, other): if not other.is_Poly: other = self.__class__(other, *self.gens) return self.quo(other) @_sympifyit('other', NotImplemented) def __rfloordiv__(self, other): other = self.__class__(other, *self.gens) return other.quo(self) @_sympifyit('other', NotImplemented) def __truediv__(self, other): return self.as_expr()/other.as_expr() @_sympifyit('other', NotImplemented) def __eq__(self, other): f, g = self, other if not g.is_Poly: try: g = f.__class__(g, f.gens, domain=f.domain) except (PolynomialError, DomainError, CoercionFailed): return False if f.gens != g.gens: return False if f.domain != g.domain: try: dom = f.domain.unify(g.domain, f.gens) except UnificationFailed: # pragma: no cover return NotImplemented f = f.set_domain(dom) g = g.set_domain(dom) return f.rep == g.rep def __bool__(self): return not self.is_zero class PurePoly(Poly): """Class for representing pure polynomials.""" def _hashable_content(self): """Allow Diofant to hash Poly instances.""" return self.domain, frozenset(self.rep.items()) def __hash__(self): return super().__hash__() @property def free_symbols(self): """ Free symbols of a polynomial. Examples ======== >>> PurePoly(x**2 + 1).free_symbols set() >>> PurePoly(x**2 + y).free_symbols set() >>> PurePoly(x**2 + y, x).free_symbols {y} """ return self.free_symbols_in_domain @_sympifyit('other', NotImplemented) def __eq__(self, other): f, g = self, other if not g.is_Poly: try: g = f.__class__(g, f.gens, domain=f.domain) except (PolynomialError, DomainError, CoercionFailed): return False if len(f.gens) != len(g.gens): return False if f.domain != g.domain: try: dom = f.domain.unify(g.domain, f.gens) except UnificationFailed: # pragma: no cover return NotImplemented f = f.set_domain(dom) g = g.set_domain(dom) return f.rep.items() == g.rep.items() def _unify(self, other): other = sympify(other) if not other.is_Poly: try: return (self.domain, self.per, self.rep, self.rep.ring(self.domain.convert(other))) except CoercionFailed: raise UnificationFailed(f"can't unify {self} with {other}") if len(self.gens) != len(other.gens): raise UnificationFailed(f"can't unify {self} with {other}") newring = self.rep.ring.unify(other.rep.ring) gens = newring.symbols F, G = self.rep.set_ring(newring), other.rep.set_ring(newring) cls = self.__class__ dom = newring.domain def per(rep, dom=dom, gens=gens, remove=None): if remove is not None: gens = gens[:remove] + gens[remove + 1:] if not gens: return dom.to_expr(rep) return cls.new(rep, *gens) return dom, per, F, G def poly_from_expr(expr, *gens, **args): """Construct a polynomial from an expression.""" opt = options.build_options(gens, args) return _poly_from_expr(expr, opt) def _poly_from_expr(expr, opt): """Construct a polynomial from an expression.""" orig, expr = expr, sympify(expr) if not isinstance(expr, Basic): raise PolificationFailed(opt, orig, expr) elif expr.is_Poly: poly = expr.__class__._from_poly(expr, opt) opt.gens = poly.gens opt.domain = poly.domain if opt.polys is None: opt.polys = True return poly, opt elif opt.expand: expr = expr.expand() try: rep, opt = _dict_from_expr(expr, opt) except GeneratorsNeeded: raise PolificationFailed(opt, orig, expr) monoms, coeffs = zip(*rep.items()) domain = opt.domain if domain is None: opt.domain, coeffs = construct_domain(coeffs, opt=opt) else: coeffs = list(map(domain.convert, coeffs)) rep = dict(zip(monoms, coeffs)) poly = Poly._from_dict(rep, opt) if opt.polys is None: opt.polys = False return poly, opt def parallel_poly_from_expr(exprs, *gens, **args): """Construct polynomials from expressions.""" opt = options.build_options(gens, args) return _parallel_poly_from_expr(exprs, opt) def _parallel_poly_from_expr(exprs, opt): """Construct polynomials from expressions.""" from ..functions import Piecewise if len(exprs) == 2: f, g = exprs if isinstance(f, Poly) and isinstance(g, Poly): f = f.__class__._from_poly(f, opt) g = g.__class__._from_poly(g, opt) f, g = f.unify(g) opt.gens = f.gens opt.domain = f.domain if opt.polys is None: opt.polys = True return [f, g], opt origs, exprs = list(exprs), [] _exprs, _polys = [], [] failed = False for i, expr in enumerate(origs): expr = sympify(expr) if isinstance(expr, Basic): if expr.is_Poly: _polys.append(i) else: _exprs.append(i) if opt.expand: expr = expr.expand() else: failed = True exprs.append(expr) if failed: raise PolificationFailed(opt, origs, exprs, True) if _polys: # XXX: this is a temporary solution for i in _polys: exprs[i] = exprs[i].as_expr() try: reps, opt = _parallel_dict_from_expr(exprs, opt) except GeneratorsNeeded: raise PolificationFailed(opt, origs, exprs, True) for k in opt.gens: if isinstance(k, Piecewise): raise PolynomialError('Piecewise generators do not make sense') coeffs_list, lengths = [], [] all_monoms = [] all_coeffs = [] for rep in reps: monoms, coeffs = zip(*rep.items()) coeffs_list.extend(coeffs) all_monoms.append(monoms) lengths.append(len(coeffs)) domain = opt.domain if domain is None: opt.domain, coeffs_list = construct_domain(coeffs_list, opt=opt) else: coeffs_list = list(map(domain.convert, coeffs_list)) for k in lengths: all_coeffs.append(coeffs_list[:k]) coeffs_list = coeffs_list[k:] polys = [] for monoms, coeffs in zip(all_monoms, all_coeffs): rep = dict(zip(monoms, coeffs)) poly = Poly._from_dict(rep, opt) polys.append(poly) if opt.polys is None: opt.polys = bool(_polys) return polys, opt def degree(f, *gens, **args): """ Return the degree of ``f`` in the given variable. The degree of 0 is negative infinity. Examples ======== >>> degree(x**2 + y*x + 1, gen=x) 2 >>> degree(x**2 + y*x + 1, gen=y) 1 >>> degree(0, x) -oo """ options.allowed_flags(args, ['gen', 'polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('degree', 1, exc) return sympify(F.degree(opt.gen)) def degree_list(f, *gens, **args): """ Return a list of degrees of ``f`` in all variables. Examples ======== >>> degree_list(x**2 + y*x + 1) (2, 1) """ options.allowed_flags(args, ['polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('degree_list', 1, exc) degrees = F.degree_list() return tuple(map(Integer, degrees)) def LC(f, *gens, **args): """ Return the leading coefficient of ``f``. Examples ======== >>> LC(4*x**2 + 2*x*y**2 + x*y + 3*y) 4 """ options.allowed_flags(args, ['polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('LC', 1, exc) return F.LC(order=opt.order) def LM(f, *gens, **args): """ Return the leading monomial of ``f``. Examples ======== >>> LM(4*x**2 + 2*x*y**2 + x*y + 3*y) x**2 """ options.allowed_flags(args, ['polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('LM', 1, exc) monom = F.LM(order=opt.order) return monom.as_expr() def LT(f, *gens, **args): """ Return the leading term of ``f``. Examples ======== >>> LT(4*x**2 + 2*x*y**2 + x*y + 3*y) 4*x**2 """ options.allowed_flags(args, ['polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('LT', 1, exc) monom, coeff = F.LT(order=opt.order) return coeff*monom.as_expr() def prem(f, g, *gens, **args): """ Compute polynomial pseudo-remainder of ``f`` and ``g``. Examples ======== >>> prem(x**2 + 1, 2*x - 4) 20 """ options.allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('prem', 2, exc) r = F.prem(G) if not opt.polys: return r.as_expr() else: return r def div(f, g, *gens, **args): """ Compute polynomial division of ``f`` and ``g``. Examples ======== >>> div(x**2 + 1, 2*x - 4, field=False) (0, x**2 + 1) >>> div(x**2 + 1, 2*x - 4) (x/2 + 1, 5) """ options.allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('div', 2, exc) q, r = F.div(G, auto=opt.auto) if not opt.polys: return q.as_expr(), r.as_expr() else: return q, r def rem(f, g, *gens, **args): """ Compute polynomial remainder of ``f`` and ``g``. Examples ======== >>> rem(x**2 + 1, 2*x - 4, field=False) x**2 + 1 >>> rem(x**2 + 1, 2*x - 4) 5 """ options.allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('rem', 2, exc) r = F.rem(G, auto=opt.auto) if not opt.polys: return r.as_expr() else: return r def quo(f, g, *gens, **args): """ Compute polynomial quotient of ``f`` and ``g``. Examples ======== >>> quo(x**2 + 1, 2*x - 4) x/2 + 1 >>> quo(x**2 - 1, x - 1) x + 1 """ options.allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('quo', 2, exc) q = F.quo(G, auto=opt.auto) if not opt.polys: return q.as_expr() else: return q def exquo(f, g, *gens, **args): """ Compute polynomial exact quotient of ``f`` and ``g``. Examples ======== >>> exquo(x**2 - 1, x - 1) x + 1 >>> exquo(x**2 + 1, 2*x - 4) Traceback (most recent call last): ... ExactQuotientFailed: 2*x - 4 does not divide x**2 + 1 """ options.allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('exquo', 2, exc) q = F.exquo(G, auto=opt.auto) if not opt.polys: return q.as_expr() else: return q def half_gcdex(f, g, *gens, **args): """ Half extended Euclidean algorithm of ``f`` and ``g``. Returns ``(s, h)`` such that ``h = gcd(f, g)`` and ``s*f = h (mod g)``. Examples ======== >>> half_gcdex(x**4 - 2*x**3 - 6*x**2 + 12*x + 15, x**3 + x**2 - 4*x - 4) (-x/5 + 3/5, x + 1) """ options.allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) s, h = domain.half_gcdex(a, b) return domain.to_expr(s), domain.to_expr(h) s, h = F.half_gcdex(G, auto=opt.auto) if not opt.polys: return s.as_expr(), h.as_expr() else: return s, h def gcdex(f, g, *gens, **args): """ Extended Euclidean algorithm of ``f`` and ``g``. Returns ``(s, t, h)`` such that ``h = gcd(f, g)`` and ``s*f + t*g = h``. Examples ======== >>> gcdex(x**4 - 2*x**3 - 6*x**2 + 12*x + 15, x**3 + x**2 - 4*x - 4) (-x/5 + 3/5, x**2/5 - 6*x/5 + 2, x + 1) """ options.allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) s, t, h = domain.gcdex(a, b) return domain.to_expr(s), domain.to_expr(t), domain.to_expr(h) s, t, h = F.gcdex(G, auto=opt.auto) if not opt.polys: return s.as_expr(), t.as_expr(), h.as_expr() else: return s, t, h def invert(f, g, *gens, **args): """ Invert ``f`` modulo ``g`` when possible. Examples ======== >>> invert(x**2 - 1, 2*x - 1) -4/3 >>> invert(x**2 - 1, x - 1) Traceback (most recent call last): ... NotInvertible: zero divisor For more efficient inversion of Rationals, use the ``mod_inverse`` function: >>> mod_inverse(3, 5) 2 >>> (Integer(2)/5).invert(Integer(7)/3) 5/2 See Also ======== diofant.core.numbers.mod_inverse """ options.allowed_flags(args, ['auto', 'polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) return domain.to_expr(domain.invert(a, b)) h = F.invert(G, auto=opt.auto) if not opt.polys: return h.as_expr() else: return h def subresultants(f, g, *gens, **args): """ Compute subresultant PRS of ``f`` and ``g``. Examples ======== >>> subresultants(x**2 + 1, x**2 - 1) [x**2 + 1, x**2 - 1, -2] """ options.allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('subresultants', 2, exc) result = F.subresultants(G) if not opt.polys: return [r.as_expr() for r in result] else: return result def resultant(f, g, *gens, **args): """ Compute resultant of ``f`` and ``g``. Examples ======== >>> resultant(x**2 + 1, x**2 - 1) 4 """ includePRS = args.pop('includePRS', False) options.allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('resultant', 2, exc) if includePRS: result, R = F.resultant(G, includePRS=includePRS) else: result = F.resultant(G) if not opt.polys: if includePRS: return result.as_expr(), [r.as_expr() for r in R] return result.as_expr() else: if includePRS: return result, R return result def discriminant(f, *gens, **args): """ Compute discriminant of ``f``. Examples ======== >>> discriminant(x**2 + 2*x + 3) -8 """ options.allowed_flags(args, ['polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('discriminant', 1, exc) result = F.discriminant() if not opt.polys: return result.as_expr() else: return result def cofactors(f, g, *gens, **args): """ Compute GCD and cofactors of ``f`` and ``g``. Returns polynomials ``(h, cff, cfg)`` such that ``h = gcd(f, g)``, and ``cff = quo(f, h)`` and ``cfg = quo(g, h)`` are, so called, cofactors of ``f`` and ``g``. Examples ======== >>> cofactors(x**2 - 1, x**2 - 3*x + 2) (x - 1, x + 1, x - 2) """ options.allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) h, cff, cfg = domain.cofactors(a, b) return tuple(map(domain.to_expr, (h, cff, cfg))) h, cff, cfg = F.cofactors(G) if not opt.polys: return h.as_expr(), cff.as_expr(), cfg.as_expr() else: return h, cff, cfg def gcd_list(seq, *gens, **args): """ Compute GCD of a list of polynomials. Examples ======== >>> gcd_list([x**3 - 1, x**2 - 1, x**2 - 3*x + 2]) x - 1 """ seq = sympify(seq) def try_non_polynomial_gcd(seq): if not gens and not args: domain, numbers = construct_domain(seq) if not numbers: return domain.zero elif domain.is_Numerical: result, numbers = numbers[0], numbers[1:] for number in numbers: result = domain.gcd(result, number) if result == domain.one: break return domain.to_expr(result) result = try_non_polynomial_gcd(seq) if result is not None: return result options.allowed_flags(args, ['polys']) try: polys, opt = parallel_poly_from_expr(seq, *gens, **args) except PolificationFailed as exc: result = try_non_polynomial_gcd(exc.exprs) if result is not None: return result else: raise ComputationFailed('gcd_list', len(seq), exc) if not polys: if not opt.polys: return Integer(0) else: return Poly(0, opt=opt) result, polys = polys[0], polys[1:] for poly in polys: result = result.gcd(poly) if result.is_one: break if not opt.polys: return result.as_expr() else: return result def gcd(f, g, *gens, **args): """ Compute GCD of ``f`` and ``g``. Examples ======== >>> gcd(x**2 - 1, x**2 - 3*x + 2) x - 1 """ options.allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) return domain.to_expr(domain.gcd(a, b)) result = F.gcd(G) if not opt.polys: return result.as_expr() else: return result def lcm_list(seq, *gens, **args): """ Compute LCM of a list of polynomials. Examples ======== >>> lcm_list([x**3 - 1, x**2 - 1, x**2 - 3*x + 2]) x**5 - x**4 - 2*x**3 - x**2 + x + 2 """ seq = sympify(seq) def try_non_polynomial_lcm(seq): if not gens and not args: domain, numbers = construct_domain(seq) if not numbers: return domain.one elif domain.is_Numerical: result, numbers = numbers[0], numbers[1:] for number in numbers: result = domain.lcm(result, number) return domain.to_expr(result) result = try_non_polynomial_lcm(seq) if result is not None: return result options.allowed_flags(args, ['polys']) try: polys, opt = parallel_poly_from_expr(seq, *gens, **args) except PolificationFailed as exc: result = try_non_polynomial_lcm(exc.exprs) if result is not None: return result else: raise ComputationFailed('lcm_list', len(seq), exc) if not polys: if not opt.polys: return Integer(1) else: return Poly(1, opt=opt) result, polys = polys[0], polys[1:] for poly in polys: result = result.lcm(poly) if not opt.polys: return result.as_expr() else: return result def lcm(f, g, *gens, **args): """ Compute LCM of ``f`` and ``g``. Examples ======== >>> lcm(x**2 - 1, x**2 - 3*x + 2) x**3 - 2*x**2 - x + 2 """ options.allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: domain, (a, b) = construct_domain(exc.exprs) return domain.to_expr(domain.lcm(a, b)) result = F.lcm(G) if not opt.polys: return result.as_expr() else: return result def terms_gcd(f, *gens, **args): """ Remove GCD of terms from ``f``. If the ``deep`` flag is True, then the arguments of ``f`` will have terms_gcd applied to them. If a fraction is factored out of ``f`` and ``f`` is an Add, then an unevaluated Mul will be returned so that automatic simplification does not redistribute it. The hint ``clear``, when set to False, can be used to prevent such factoring when all coefficients are not fractions. Examples ======== >>> terms_gcd(x**6*y**2 + x**3*y, x, y) x**3*y*(x**3*y + 1) The default action of polys routines is to expand the expression given to them. terms_gcd follows this behavior: >>> terms_gcd((3+3*x)*(x+x*y)) 3*x*(x*y + x + y + 1) If this is not desired then the hint ``expand`` can be set to False. In this case the expression will be treated as though it were comprised of one or more terms: >>> terms_gcd((3+3*x)*(x+x*y), expand=False) (3*x + 3)*(x*y + x) In order to traverse factors of a Mul or the arguments of other functions, the ``deep`` hint can be used: >>> terms_gcd((3 + 3*x)*(x + x*y), expand=False, deep=True) 3*x*(x + 1)*(y + 1) >>> terms_gcd(cos(x + x*y), deep=True) cos(x*(y + 1)) Rationals are factored out by default: >>> terms_gcd(x + y/2) (2*x + y)/2 Only the y-term had a coefficient that was a fraction; if one does not want to factor out the 1/2 in cases like this, the flag ``clear`` can be set to False: >>> terms_gcd(x + y/2, clear=False) x + y/2 >>> terms_gcd(x*y/2 + y**2, clear=False) y*(x/2 + y) The ``clear`` flag is ignored if all coefficients are fractions: >>> terms_gcd(x/3 + y/2, clear=False) (2*x + 3*y)/6 See Also ======== diofant.core.exprtools.gcd_terms, diofant.core.exprtools.factor_terms """ from ..core import Equality orig = sympify(f) if not isinstance(f, Expr) or f.is_Atom: return orig if args.get('deep', False): new = f.func(*[terms_gcd(a, *gens, **args) for a in f.args]) args.pop('deep') args['expand'] = False return terms_gcd(new, *gens, **args) if isinstance(f, Equality): return f clear = args.pop('clear', True) options.allowed_flags(args, ['polys']) F, opt = poly_from_expr(f, *gens, **args) J, f = F.terms_gcd() if opt.domain.is_Field: denom, f = f.clear_denoms(convert=True) coeff, f = f.primitive() if opt.domain.is_Field: coeff /= denom term = Mul(*[x**j for x, j in zip(f.gens, J)]) if coeff == 1: coeff = Integer(1) if term == 1: return orig if clear: return _keep_coeff(coeff, term*f.as_expr()) # base the clearing on the form of the original expression, not # the (perhaps) Mul that we have now coeff, f = _keep_coeff(coeff, f.as_expr(), clear=False).as_coeff_Mul() return _keep_coeff(coeff, term*f, clear=False) def trunc(f, p, *gens, **args): """ Reduce ``f`` modulo a constant ``p``. Examples ======== >>> trunc(2*x**3 + 3*x**2 + 5*x + 7, 3) -x**3 - x + 1 """ options.allowed_flags(args, ['auto', 'polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('trunc', 1, exc) result = F.trunc(sympify(p)) if not opt.polys: return result.as_expr() else: return result def monic(f, *gens, **args): """ Divide all coefficients of ``f`` by ``LC(f)``. Examples ======== >>> monic(3*x**2 + 4*x + 2) x**2 + 4*x/3 + 2/3 """ options.allowed_flags(args, ['auto', 'polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('monic', 1, exc) result = F.monic(auto=opt.auto) if not opt.polys: return result.as_expr() else: return result def content(f, *gens, **args): """ Compute GCD of coefficients of ``f``. Examples ======== >>> content(6*x**2 + 8*x + 12) 2 """ options.allowed_flags(args, ['polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('content', 1, exc) return F.content() def primitive(f, *gens, **args): """ Compute content and the primitive form of ``f``. Examples ======== >>> primitive(6*x**2 + 8*x + 12) (2, 3*x**2 + 4*x + 6) >>> eq = (2 + 2*x)*x + 2 Expansion is performed by default: >>> primitive(eq) (2, x**2 + x + 1) Set ``expand`` to False to shut this off. Note that the extraction will not be recursive; use the as_content_primitive method for recursive, non-destructive Rational extraction. >>> primitive(eq, expand=False) (1, x*(2*x + 2) + 2) >>> eq.as_content_primitive() (2, x*(x + 1) + 1) """ options.allowed_flags(args, ['polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('primitive', 1, exc) cont, result = F.primitive() if not opt.polys: return cont, result.as_expr() else: return cont, result def compose(f, g, *gens, **args): """ Compute functional composition ``f(g)``. Examples ======== >>> compose(x**2 + x, x - 1) x**2 - x """ options.allowed_flags(args, ['polys']) try: (F, G), opt = parallel_poly_from_expr((f, g), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('compose', 2, exc) result = F.compose(G) if not opt.polys: return result.as_expr() else: return result def decompose(f, *gens, **args): """ Compute functional decomposition of ``f``. Examples ======== >>> decompose(x**4 + 2*x**3 - x - 1) [x**2 - x - 1, x**2 + x] """ options.allowed_flags(args, ['polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('decompose', 1, exc) result = F.decompose() if not opt.polys: return [r.as_expr() for r in result] else: return result def sqf_norm(f, *gens, **args): """ Compute square-free norm of ``f``. Returns ``s``, ``f``, ``r``, such that ``g(x) = f(x-sa)`` and ``r(x) = Norm(g(x))`` is a square-free polynomial over ``K``, where ``a`` is the algebraic extension of the ground domain. Examples ======== >>> sqf_norm(x**2 + 1, extension=[sqrt(3)]) (1, x**2 - 2*sqrt(3)*x + 4, x**4 - 4*x**2 + 16) """ options.allowed_flags(args, ['polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('sqf_norm', 1, exc) s, g, r = F.sqf_norm() if not opt.polys: return Integer(s), g.as_expr(), r.as_expr() else: return Integer(s), g, r def sqf_part(f, *gens, **args): """ Compute square-free part of ``f``. Examples ======== >>> sqf_part(x**3 - 3*x - 2) x**2 - x - 2 """ options.allowed_flags(args, ['polys']) try: F, opt = poly_from_expr(f, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('sqf_part', 1, exc) result = F.sqf_part() if not opt.polys: return result.as_expr() else: return result def _sorted_factors(factors, method): """Sort a list of ``(expr, exp)`` pairs.""" if method == 'sqf': def key(obj): poly, exp = obj rep = poly.rep.to_dense() return exp, len(rep), len(poly.gens), default_sort_key(rep) else: def key(obj): poly, exp = obj rep = poly.rep.to_dense() return len(rep), len(poly.gens), exp, default_sort_key(rep) return sorted(factors, key=key) def _factors_product(factors): """Multiply a list of ``(expr, exp)`` pairs.""" return Mul(*[f.as_expr()**k for f, k in factors]) def _symbolic_factor_list(expr, opt, method): """Helper function for :func:`_symbolic_factor`.""" coeff, factors = Integer(1), [] args = [i._eval_factor() if hasattr(i, '_eval_factor') else i for i in Mul.make_args(expr)] for arg in args: if arg.is_Number: coeff *= arg continue elif arg.is_Pow and arg.base is not E: base, exp = arg.base, arg.exp if base.is_Number: factors.append((base, exp)) continue else: base, exp = arg, Integer(1) try: poly, _ = _poly_from_expr(base, opt) except PolificationFailed as exc: factors.append((exc.expr, exp)) else: func = getattr(poly, method + '_list') _coeff, _factors = func() if _coeff != 1: if exp.is_Integer: coeff *= _coeff**exp elif _coeff.is_positive: factors.append((_coeff, exp)) else: _factors.append((_coeff, Integer(1))) if exp == 1: factors.extend(_factors) elif exp.is_integer: factors.extend([(f, k*exp) for f, k in _factors]) else: other = [] for f, k in _factors: if f.as_expr().is_positive: factors.append((f, k*exp)) else: other.append((f, k)) factors.append((_factors_product(other), exp)) if method == 'sqf': factors = [(functools.reduce(operator.mul, (f for f, _ in factors if _ == k)), k) for k in set(dict(factors).values())] return coeff, factors def _symbolic_factor(expr, opt, method): """Helper function for :func:`_factor`.""" if isinstance(expr, Expr) and not expr.is_Relational: if hasattr(expr, '_eval_factor'): return expr._eval_factor() coeff, factors = _symbolic_factor_list(together(expr), opt, method) return _keep_coeff(coeff, _factors_product(factors)) elif hasattr(expr, 'args'): return expr.func(*[_symbolic_factor(arg, opt, method) for arg in expr.args]) elif hasattr(expr, '__iter__'): return expr.__class__([_symbolic_factor(arg, opt, method) for arg in expr]) else: raise NotImplementedError def _generic_factor_list(expr, gens, args, method): """Helper function for :func:`sqf_list` and :func:`factor_list`.""" options.allowed_flags(args, ['frac', 'polys']) opt = options.build_options(gens, args) expr = sympify(expr) if isinstance(expr, Expr) and not expr.is_Relational: numer, denom = together(expr).as_numer_denom() cp, fp = _symbolic_factor_list(numer, opt, method) cq, fq = _symbolic_factor_list(denom, opt, method) if fq and not opt.frac: raise PolynomialError(f'a polynomial expected, got {expr}') _opt = opt.clone({'expand': True}) if not _opt.get('gens'): _opt['gens'] = set().union(*[set(f.gens) for f, _ in fp + fq if f.is_Poly]) for factors in (fp, fq): for i, (f, k) in enumerate(factors): if not f.is_Poly: f, _ = _poly_from_expr(f, _opt) factors[i] = (f, k) fp = _sorted_factors(fp, method) fq = _sorted_factors(fq, method) if not opt.polys: fp = [(f.as_expr(), k) for f, k in fp] fq = [(f.as_expr(), k) for f, k in fq] coeff = cp/cq if not opt.frac: return coeff, fp else: return coeff, fp, fq else: raise PolynomialError(f'a polynomial expected, got {expr}') def _generic_factor(expr, gens, args, method): """Helper function for :func:`sqf` and :func:`factor`.""" options.allowed_flags(args, []) opt = options.build_options(gens, args) return _symbolic_factor(sympify(expr), opt, method) def to_rational_coeffs(f): """ Try to transform a polynomial to have rational coefficients. try to find a transformation ``x = alpha*y`` ``f(x) = lc*alpha**n * g(y)`` where ``g`` is a polynomial with rational coefficients, ``lc`` the leading coefficient. If this fails, try ``x = y + beta`` ``f(x) = g(y)`` Returns ``None`` if ``g`` not found; ``(lc, alpha, None, g)`` in case of rescaling ``(None, None, beta, g)`` in case of translation Notes ===== Currently it transforms only polynomials without roots larger than 2. Examples ======== >>> p = Poly(((x**2-1)*(x-2)).subs({x: x*(1 + sqrt(2))}), x, domain=EX) >>> lc, r, _, g = to_rational_coeffs(p) >>> lc, r (7 + 5*sqrt(2), -2*sqrt(2) + 2) >>> g Poly(x**3 + x**2 - 1/4*x - 1/4, x, domain='QQ') >>> r1 = simplify(1/r) >>> Poly(lc*r**3*(g.as_expr()).subs({x: x*r1}), x, domain=EX) == p True """ from ..simplify import simplify def _try_rescale(f, f1=None): """ Try rescaling ``x -> alpha*x`` to convert f to a polynomial with rational coefficients. Returns ``alpha, f``; if the rescaling is successful, ``alpha`` is the rescaling factor, and ``f`` is the rescaled polynomial; else ``alpha`` is ``None``. """ from ..core import Add if f.is_multivariate or not (f.gens[0]).is_Atom: return n = f.degree() lc = f.LC() f1 = f1 or f1.monic() coeffs = f1.all_coeffs()[1:] coeffs = [simplify(coeffx) for coeffx in coeffs] if coeffs[-2]: rescale1_x = simplify(coeffs[-2]/coeffs[-1]) coeffs1 = [] for i in range(len(coeffs)): coeffx = simplify(coeffs[i]*rescale1_x**(i + 1)) if not coeffx.is_rational: break coeffs1.append(coeffx) else: rescale_x = simplify(1/rescale1_x) x = f.gens[0] v = [x**n] for i in range(1, n + 1): v.append(coeffs1[i - 1]*x**(n - i)) f = Add(*v) f = Poly(f) return lc, rescale_x, f def _try_translate(f, f1=None): """ Try translating ``x -> x + alpha`` to convert f to a polynomial with rational coefficients. Returns ``alpha, f``; if the translating is successful, ``alpha`` is the translating factor, and ``f`` is the shifted polynomial; else ``alpha`` is ``None``. """ from ..core import Add if f.is_multivariate or not (f.gens[0]).is_Atom: return n = f.degree() f1 = f1 or f1.monic() coeffs = f1.all_coeffs()[1:] c = simplify(coeffs[0]) if c and not c.is_rational: func = Add if c.is_Add: args = c.args func = c.func else: args = [c] sifted = sift(args, lambda z: z.is_rational) c2 = sifted[False] alpha = -func(*c2)/n f2 = f1.shift(alpha) return alpha, f2 def _has_square_roots(p): """Return True if ``f`` is a sum with square roots but no other root.""" from ..core.exprtools import Factors coeffs = p.coeffs() has_sq = False for y in coeffs: for x in Add.make_args(y): f = Factors(x).factors r = [wx.denominator for b, wx in f.items() if b.is_number and wx.is_Rational and wx.denominator >= 2] if not r: continue if min(r) == 2: has_sq = True if max(r) > 2: return False return has_sq if f.domain.is_ExpressionDomain and _has_square_roots(f): f1 = f.monic() r = _try_rescale(f, f1) if r: return r[0], r[1], None, r[2] else: r = _try_translate(f, f1) if r: return None, None, r[0], r[1] def sqf_list(f, *gens, **args): """ Compute a list of square-free factors of ``f``. Examples ======== >>> sqf_list(2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16) (2, [(x + 1, 2), (x + 2, 3)]) """ return _generic_factor_list(f, gens, args, method='sqf') def sqf(f, *gens, **args): """ Compute square-free factorization of ``f``. Examples ======== >>> sqf(2*x**5 + 16*x**4 + 50*x**3 + 76*x**2 + 56*x + 16) 2*(x + 1)**2*(x + 2)**3 """ return _generic_factor(f, gens, args, method='sqf') def factor_list(f, *gens, **args): """ Compute a list of irreducible factors of ``f``. Examples ======== >>> factor_list(2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y) (2, [(x + y, 1), (x**2 + 1, 2)]) """ return _generic_factor_list(f, gens, args, method='factor') def factor(f, *gens, **args): """ Compute the factorization of expression, ``f``, into irreducibles. (To factor an integer into primes, use ``factorint``.) There two modes implemented: symbolic and formal. If ``f`` is not an instance of :class:`Poly` and generators are not specified, then the former mode is used. Otherwise, the formal mode is used. In symbolic mode, :func:`factor` will traverse the expression tree and factor its components without any prior expansion, unless an instance of :class:`~diofant.core.add.Add` is encountered (in this case formal factorization is used). This way :func:`factor` can handle large or symbolic exponents. By default, the factorization is computed over the rationals. To factor over other domain, e.g. an algebraic or finite field, use appropriate options: ``extension``, ``modulus`` or ``domain``. Examples ======== >>> factor(2*x**5 + 2*x**4*y + 4*x**3 + 4*x**2*y + 2*x + 2*y) 2*(x + y)*(x**2 + 1)**2 >>> factor(x**2 + 1) x**2 + 1 >>> factor(x**2 + 1, modulus=2) (x + 1)**2 >>> factor(x**2 + 1, gaussian=True) (x - I)*(x + I) >>> factor(x**2 - 2, extension=sqrt(2)) (x - sqrt(2))*(x + sqrt(2)) >>> factor((x**2 - 1)/(x**2 + 4*x + 4)) (x - 1)*(x + 1)/(x + 2)**2 >>> factor((x**2 + 4*x + 4)**10000000*(x**2 + 1)) (x + 2)**20000000*(x**2 + 1) By default, factor deals with an expression as a whole: >>> eq = 2**(x**2 + 2*x + 1) >>> factor(eq) 2**(x**2 + 2*x + 1) If the ``deep`` flag is True then subexpressions will be factored: >>> factor(eq, deep=True) 2**((x + 1)**2) See Also ======== diofant.ntheory.factor_.factorint """ f = sympify(f) if args.pop('deep', False): partials = {} muladd = f.atoms(Mul, Add) for p in muladd: fac = factor(p, *gens, **args) if (fac.is_Mul or fac.is_Pow) and fac != p: partials[p] = fac return f.xreplace(partials) try: return _generic_factor(f, gens, args, method='factor') except PolynomialError as msg: if not f.is_commutative: from ..core.exprtools import factor_nc return factor_nc(f) else: raise PolynomialError(msg) def count_roots(f, inf=None, sup=None): """ Return the number of roots of ``f`` in ``[inf, sup]`` interval. If one of ``inf`` or ``sup`` is complex, it will return the number of roots in the complex rectangle with corners at ``inf`` and ``sup``. Examples ======== >>> count_roots(x**4 - 4, -3, 3) 2 >>> count_roots(x**4 - 4, 0, 1 + 3*I) 1 """ try: F = Poly(f, greedy=False) except GeneratorsNeeded: raise PolynomialError(f"can't count roots of {f}, not a polynomial") return F.count_roots(inf=inf, sup=sup) def real_roots(f, multiple=True): """ Return a list of real roots with multiplicities of ``f``. Examples ======== >>> real_roots(2*x**3 - 7*x**2 + 4*x + 4) [-1/2, 2, 2] """ try: F = Poly(f, greedy=False) except GeneratorsNeeded: raise PolynomialError(f"can't compute real roots of {f}, " 'not a polynomial') return F.real_roots(multiple=multiple) def nroots(f, n=15, maxsteps=50, cleanup=True): """ Compute numerical approximations of roots of ``f``. Examples ======== >>> nroots(x**2 - 3, n=15) [-1.73205080756888, 1.73205080756888] >>> nroots(x**2 - 3, n=30) [-1.73205080756887729352744634151, 1.73205080756887729352744634151] """ try: F = Poly(f, greedy=False) except GeneratorsNeeded: raise PolynomialError( f"can't compute numerical roots of {f}, not a polynomial") return F.nroots(n=n, maxsteps=maxsteps, cleanup=cleanup) def cancel(f, *gens, **args): """ Cancel common factors in a rational function ``f``. Examples ======== >>> A = Symbol('A', commutative=False) >>> cancel((2*x**2 - 2)/(x**2 - 2*x + 1)) (2*x + 2)/(x - 1) >>> cancel((sqrt(3) + sqrt(15)*A)/(sqrt(2) + sqrt(10)*A)) sqrt(6)/2 """ from ..core.exprtools import factor_terms from ..functions import Piecewise options.allowed_flags(args, ['polys']) f = sympify(f) if not isinstance(f, Tuple): if f.is_Atom or isinstance(f, Relational) or not isinstance(f, Expr): return f f = factor_terms(f, radical=True) p, q = f.as_numer_denom() elif len(f) == 2: p, q = f else: return factor_terms(f) try: (F, G), opt = parallel_poly_from_expr((p, q), *gens, **args) except PolificationFailed: if not isinstance(f, (tuple, Tuple)): return f else: return Integer(1), p, q except PolynomialError: assert not f.is_commutative or f.has(Piecewise) # Handling of noncommutative and/or piecewise expressions if f.is_Add or f.is_Mul: sifted = sift(f.args, lambda x: x.is_commutative is True and not x.has(Piecewise)) c, nc = sifted[True], sifted[False] nc = [cancel(i) for i in nc] return f.func(cancel(f.func._from_args(c)), *nc) else: reps = [] pot = preorder_traversal(f) next(pot) for e in pot: # XXX: This should really skip anything that's not Expr. if isinstance(e, (tuple, Tuple, BooleanAtom)): continue reps.append((e, cancel(e))) pot.skip() # this was handled successfully return f.xreplace(dict(reps)) c, P, Q = F.cancel(G) if not isinstance(f, (tuple, Tuple)): return c*(P.as_expr()/Q.as_expr()) else: if not opt.polys: return c, P.as_expr(), Q.as_expr() else: return c, P, Q def reduced(f, G, *gens, **args): """ Reduces a polynomial ``f`` modulo a set of polynomials ``G``. Given a polynomial ``f`` and a set of polynomials ``G = (g_1, ..., g_n)``, computes a set of quotients ``q = (q_1, ..., q_n)`` and the remainder ``r`` such that ``f = q_1*g_1 + ... + q_n*g_n + r``, where ``r`` vanishes or ``r`` is a completely reduced polynomial with respect to ``G``. Examples ======== >>> reduced(2*x**4 + y**2 - x**2 + y**3, [x**3 - x, y**3 - y]) ([2*x, 1], x**2 + y**2 + y) """ options.allowed_flags(args, ['polys', 'auto']) try: polys, opt = parallel_poly_from_expr([f] + list(G), *gens, **args) except PolificationFailed as exc: raise ComputationFailed('reduced', 0, exc) domain = opt.domain retract = False if opt.auto and domain.is_Ring and not domain.is_Field: opt = opt.clone({'domain': domain.field}) retract = True _ring = opt.domain.poly_ring(*opt.gens, order=opt.order) for i, poly in enumerate(polys): poly = dict(poly.set_domain(opt.domain).rep) polys[i] = _ring.from_dict(poly) Q, r = polys[0].div(polys[1:]) Q = [Poly._from_dict(dict(q), opt) for q in Q] r = Poly._from_dict(dict(r), opt) if retract: try: _Q, _r = [q.to_ring() for q in Q], r.to_ring() except CoercionFailed: pass else: Q, r = _Q, _r if not opt.polys: return [q.as_expr() for q in Q], r.as_expr() else: return Q, r def groebner(F, *gens, **args): r""" Computes the reduced Gröbner basis for a set of polynomials. Parameters ========== F : list a set of polynomials \*gens : tuple polynomial generators \**args : dict a dictionary of parameters, namely order : str, optional Monomial order, defaults to ``lex``. method : {'buchberger', 'f5b'}, optional Set algorithm to compute Gröbner basis. By default, an improved implementation of the Buchberger algorithm is used. field : bool, optional Force coefficients domain to be a field. Defaults to False. Examples ======== >>> F = [x*y - 2*x, 2*x**2 - y**2] >>> groebner(F) GroebnerBasis([2*x**2 - y**2, x*y - 2*x, y**3 - 2*y**2], x, y, domain='ZZ', order='lex') >>> groebner(F, order=grevlex) GroebnerBasis([y**3 - 2*y**2, 2*x**2 - y**2, x*y - 2*x], x, y, domain='ZZ', order='grevlex') >>> groebner(F, field=True) GroebnerBasis([x**2 - y**2/2, x*y - 2*x, y**3 - 2*y**2], x, y, domain='QQ', order='lex') References ========== * :cite:`Buchberger2001systems` * :cite:`Cox2015ideals` See Also ======== diofant.solvers.polysys.solve_poly_system """ return GroebnerBasis(F, *gens, **args) class GroebnerBasis(Basic): """Represents a reduced Gröbner basis.""" def __new__(cls, F, *gens, **args): """Compute a reduced Gröbner basis for a system of polynomials.""" options.allowed_flags(args, ['polys', 'method']) try: polys, opt = parallel_poly_from_expr(F, *gens, **args) except PolificationFailed as exc: raise ComputationFailed('groebner', len(F), exc) ring = opt.domain.poly_ring(*opt.gens, order=opt.order) if not ring.domain.is_Exact: raise ValueError(f'Domain must be exact, got {ring.domain}') polys = [ring.from_dict(dict(_.rep)) for _ in polys if not _.is_zero] G = _groebner(polys, ring, method=opt.method) G = [Poly._from_dict(g, opt) for g in G] return cls._new(G, opt) @classmethod def _new(cls, basis, options): obj = Basic.__new__(cls) obj._basis = tuple(basis) obj._options = options return obj @property def args(self): return Tuple(*self.exprs), Tuple(*self.gens) @property def exprs(self): return [poly.as_expr() for poly in self._basis] @property def polys(self): return list(self._basis) @property def gens(self): return self._options.gens @property def domain(self): return self._options.domain @property def order(self): return self._options.order def __len__(self): return len(self._basis) def __iter__(self): if self._options.polys: return iter(self.polys) else: return iter(self.exprs) def __getitem__(self, item): if self._options.polys: basis = self.polys else: basis = self.exprs return basis[item] def __hash__(self): return hash((self._basis, tuple(sorted(self._options.items())))) def __eq__(self, other): if isinstance(other, self.__class__): return self._basis == other._basis and self._options == other._options elif iterable(other): return self.polys == list(other) or self.exprs == list(other) else: return False @property def dimension(self): """Dimension of the ideal, generated by a Gröbner basis.""" sets = self.independent_sets if sets is not None: return max(len(s) for s in sets) @property def independent_sets(self): """Compute independent sets for ideal, generated by a Gröbner basis. References ========== * :cite:`Kredel1988indep` """ if self.contains(Integer(1)): return HTG = [_.LM(order=self.order) for _ in self.polys] def dimrec(S, U, M): U1 = U.copy() while U1: x = U1.pop(0) S1 = S + [x] t = Monomial(Mul(*S1), self.gens) for ht in HTG: if all(a and b or not a for a, b in zip(ht, t)): break else: M = dimrec(S1, U1, M) if any(all(_ in m for _ in S) for m in M): return M else: return [S] + M return dimrec([], list(self.gens), []) def set_order(self, order): """ Convert a Gröbner basis from one ordering to another. Notes ===== The FGLM algorithm :cite:`Faugere1993groebner` used to convert reduced Gröbner bases of zero-dimensional ideals from one ordering to another. Sometimes it is infeasible to compute a Gröbner basis with respect to a particular ordering directly. Examples ======== >>> F = [x**2 - 3*y - x + 1, y**2 - 2*x + y - 1] >>> G = groebner(F, x, y, order='grlex') >>> G.set_order('lex') == groebner(F, x, y, order='lex') True """ src_order = self.order dst_order = monomial_key(order) if src_order == dst_order: return self if self.dimension != 0: raise NotImplementedError("can't convert Gröbner bases of " 'ideals with positive dimension') polys = self.polys domain = self.domain opt = self._options.clone({'domain': domain.field, 'order': dst_order}) _ring = opt.domain.poly_ring(*opt.gens, order=src_order) for i, poly in enumerate(polys): poly = dict(poly.set_domain(opt.domain).rep) polys[i] = _ring.from_dict(poly) G = matrix_fglm(polys, _ring, dst_order) G = [Poly._from_dict(dict(g), opt) for g in G] if not domain.is_Field: G = [g.clear_denoms(convert=True)[1] for g in G] opt.domain = domain return self._new(G, opt) def reduce(self, expr, auto=True): """ Reduces a polynomial modulo a Gröbner basis. Given a polynomial ``f`` and a set of polynomials ``G = (g_1, ..., g_n)``, computes a set of quotients ``q = (q_1, ..., q_n)`` and the remainder ``r`` such that ``f = q_1*f_1 + ... + q_n*f_n + r``, where ``r`` vanishes or ``r`` is a completely reduced polynomial with respect to ``G``. Examples ======== >>> f = 2*x**4 - x**2 + y**3 + y**2 >>> G = groebner([x**3 - x, y**3 - y]) >>> G.reduce(f) ([2*x, 1], x**2 + y**2 + y) >>> Q, r = _ >>> expand(sum(q*g for q, g in zip(Q, G)) + r) 2*x**4 - x**2 + y**3 + y**2 >>> _ == f True """ poly = Poly._from_expr(expr, self._options) polys = [poly] + list(self._basis) opt = self._options domain = self.domain retract = False if auto and domain.is_Ring and not domain.is_Field: opt = self._options.clone({'domain': domain.field}) retract = True _ring = opt.domain.poly_ring(*opt.gens, order=opt.order) for i, poly in enumerate(polys): poly = dict(poly.set_domain(opt.domain).rep) polys[i] = _ring.from_dict(poly) Q, r = polys[0].div(polys[1:]) Q = [Poly._from_dict(dict(q), opt) for q in Q] r = Poly._from_dict(dict(r), opt) if retract: try: _Q, _r = [q.to_ring() for q in Q], r.to_ring() except CoercionFailed: pass else: Q, r = _Q, _r if not opt.polys: return [q.as_expr() for q in Q], r.as_expr() else: return Q, r def contains(self, poly): """ Check if ``poly`` belongs the ideal generated by ``self``. Examples ======== >>> f = 2*x**3 + y**3 + 3*y >>> G = groebner([x**2 + y**2 - 1, x*y - 2]) >>> G.contains(f) True >>> G.contains(f + 1) False """ return self.reduce(poly)[1] == 0 def poly(expr, *gens, **args): """ Efficiently transform an expression into a polynomial. Examples ======== >>> poly(x*(x**2 + x - 1)**2) Poly(x**5 + 2*x**4 - x**3 - 2*x**2 + x, x, domain='ZZ') """ options.allowed_flags(args, []) def _poly(expr, opt): terms, poly_terms = [], [] for term in Add.make_args(expr): factors, poly_factors = [], [] for factor in Mul.make_args(term): if factor.is_Add: poly_factors.append(_poly(factor, opt)) elif (factor.is_Pow and factor.base.is_Add and factor.exp.is_Integer and factor.exp >= 0): poly_factors.append(_poly(factor.base, opt)**factor.exp) else: factors.append(factor) if not poly_factors: terms.append(term) else: product = poly_factors[0] for factor in poly_factors[1:]: product *= factor if factors: factor = Mul(*factors) if factor.is_Number: product *= factor else: product *= Poly._from_expr(factor, opt) poly_terms.append(product) if not poly_terms: result = Poly._from_expr(expr, opt) else: result = poly_terms[0] for term in poly_terms[1:]: result += term if terms: term = Add(*terms) if term.is_Number: result += term else: result += Poly._from_expr(term, opt) return result.reorder(*opt.get('gens', ()), **args) expr = sympify(expr) if expr.is_Poly: return Poly(expr, *gens, **args) if 'expand' not in args: args['expand'] = False opt = options.build_options(gens, args) return _poly(expr, opt)
bsd-3-clause
ryfeus/lambda-packs
Tensorflow/source/tensorflow/python/ops/losses/util.py
57
3409
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Utilities for manipulating the loss collections. @@add_loss @@get_losses @@get_regularization_loss @@get_regularization_losses @@get_total_loss """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import constant_op from tensorflow.python.framework import ops from tensorflow.python.ops import math_ops def add_loss(loss, loss_collection=ops.GraphKeys.LOSSES): """Adds a externally defined loss to the collection of losses. Args: loss: A loss `Tensor`. loss_collection: Optional collection to add the loss to. """ if loss_collection: ops.add_to_collection(loss_collection, loss) def get_losses(scope=None, loss_collection=ops.GraphKeys.LOSSES): """Gets the list of losses from the loss_collection. Args: scope: An optional scope name for filtering the losses to return. loss_collection: Optional losses collection. Returns: a list of loss tensors. """ return ops.get_collection(loss_collection, scope) def get_regularization_losses(scope=None): """Gets the list of regularization losses. Args: scope: An optional scope name for filtering the losses to return. Returns: A list of regularization losses as Tensors. """ return ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES, scope) def get_regularization_loss(scope=None, name="total_regularization_loss"): """Gets the total regularization loss. Args: scope: An optional scope name for filtering the losses to return. name: The name of the returned tensor. Returns: A scalar regularization loss. """ losses = get_regularization_losses(scope) if losses: return math_ops.add_n(losses, name=name) else: return constant_op.constant(0.0) def get_total_loss(add_regularization_losses=True, name="total_loss"): """Returns a tensor whose value represents the total loss. In particular, this adds any losses you have added with `tf.add_loss()` to any regularization losses that have been added by regularization parameters on layers constructors e.g. `tf.layers`. Be very sure to use this if you are constructing a loss_op manually. Otherwise regularization arguments on `tf.layers` methods will not function. Args: add_regularization_losses: A boolean indicating whether or not to use the regularization losses in the sum. name: The name of the returned tensor. Returns: A `Tensor` whose value represents the total loss. Raises: ValueError: if `losses` is not iterable. """ losses = get_losses() if add_regularization_losses: losses += get_regularization_losses() return math_ops.add_n(losses, name=name)
mit
aifil/odoo
addons/hr_attendance/report/attendance_errors.py
47
2723
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import datetime import time from openerp.osv import osv from openerp.report import report_sxw class attendance_print(report_sxw.rml_parse): def __init__(self, cr, uid, name, context): super(attendance_print, self).__init__(cr, uid, name, context=context) self.localcontext.update({ 'time': time, 'lst': self._lst, 'total': self._lst_total, 'get_employees':self._get_employees, }) def _get_employees(self, emp_ids): emp_obj_list = self.pool.get('hr.employee').browse(self.cr, self.uid, emp_ids) return emp_obj_list def _lst(self, employee_id, dt_from, dt_to, max, *args): self.cr.execute("select name as date, create_date, action, create_date-name as delay from hr_attendance where employee_id=%s and to_char(name,'YYYY-mm-dd')<=%s and to_char(name,'YYYY-mm-dd')>=%s and action IN (%s,%s) order by name", (employee_id, dt_to, dt_from, 'sign_in', 'sign_out')) res = self.cr.dictfetchall() for r in res: if r['action'] == 'sign_out': r['delay'] = -r['delay'] temp = r['delay'].seconds r['delay'] = str(r['delay']).split('.')[0] if abs(temp) < max*60: r['delay2'] = r['delay'] else: r['delay2'] = '/' return res def _lst_total(self, employee_id, dt_from, dt_to, max, *args): self.cr.execute("select name as date, create_date, action, create_date-name as delay from hr_attendance where employee_id=%s and to_char(name,'YYYY-mm-dd')<=%s and to_char(name,'YYYY-mm-dd')>=%s and action IN (%s,%s) order by name", (employee_id, dt_to, dt_from, 'sign_in', 'sign_out')) res = self.cr.dictfetchall() if not res: return ('/','/') total2 = datetime.timedelta(seconds = 0, minutes = 0, hours = 0) total = datetime.timedelta(seconds = 0, minutes = 0, hours = 0) for r in res: if r['action'] == 'sign_out': r['delay'] = -r['delay'] total += r['delay'] if abs(r['delay'].seconds) < max*60: total2 += r['delay'] result_dict = { 'total': total and str(total).split('.')[0], 'total2': total2 and str(total2).split('.')[0] } return [result_dict] class report_hr_attendanceerrors(osv.AbstractModel): _name = 'report.hr_attendance.report_attendanceerrors' _inherit = 'report.abstract_report' _template = 'hr_attendance.report_attendanceerrors' _wrapped_report_class = attendance_print
gpl-3.0
tushar-rishav/coala
coalib/bears/requirements/GoRequirement.py
1
1781
from coalib.bears.requirements.PackageRequirement import PackageRequirement from coalib.misc.Shell import call_without_output class GoRequirement(PackageRequirement): """ This class is a subclass of ``PackageRequirement``, and helps specifying requirements from ``go``, without using the manager name. """ def __init__(self, package, version="", flag=""): """ Constructs a new ``GoRequirement``, using the ``PackageRequirement`` constructor. >>> pr = GoRequirement('github.com/golang/lint/golint', '19.2', '-u') >>> pr.manager 'go' >>> pr.package 'github.com/golang/lint/golint' >>> pr.version '19.2' >>> pr.flag '-u' :param package: A string with the name of the package to be installed. :param version: A version string. Leave empty to specify latest version. :param flag: A string that specifies any additional flags, that are passed to the manager. """ PackageRequirement.__init__(self, 'go', package, version) self.flag = flag def install_command(self): """ Creates the installation command for the instance of the class. >>> GoRequirement( ... 'github.com/golang/lint/golint', '' , '-u' ).install_command() 'go get -u github.com/golang/lint/golint' :param return: A string with the installation command. """ return "go get {} {}".format(self.flag, self.package) def is_installed(self): """ Checks if the dependency is installed. :param return: True if dependency is installed, false otherwise. """ return not call_without_output(('go', 'doc', self.package))
agpl-3.0
AccelAI/accel.ai
flask-aws/lib/python2.7/site-packages/botocore/model.py
4
23734
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. """Abstractions to interact with service models.""" from collections import defaultdict from botocore.utils import CachedProperty, instance_cache from botocore.compat import OrderedDict NOT_SET = object() class NoShapeFoundError(Exception): pass class InvalidShapeError(Exception): pass class OperationNotFoundError(Exception): pass class InvalidShapeReferenceError(Exception): pass class UndefinedModelAttributeError(Exception): pass class Shape(object): """Object representing a shape from the service model.""" # To simplify serialization logic, all shape params that are # related to serialization are moved from the top level hash into # a 'serialization' hash. This list below contains the names of all # the attributes that should be moved. SERIALIZED_ATTRS = ['locationName', 'queryName', 'flattened', 'location', 'payload', 'streaming', 'timestampFormat', 'xmlNamespace', 'resultWrapper', 'xmlAttribute'] METADATA_ATTRS = ['required', 'min', 'max', 'sensitive', 'enum', 'idempotencyToken'] MAP_TYPE = OrderedDict def __init__(self, shape_name, shape_model, shape_resolver=None): """ :type shape_name: string :param shape_name: The name of the shape. :type shape_model: dict :param shape_model: The shape model. This would be the value associated with the key in the "shapes" dict of the service model (i.e ``model['shapes'][shape_name]``) :type shape_resolver: botocore.model.ShapeResolver :param shape_resolver: A shape resolver object. This is used to resolve references to other shapes. For scalar shape types (string, integer, boolean, etc.), this argument is not required. If a shape_resolver is not provided for a complex type, then a ``ValueError`` will be raised when an attempt to resolve a shape is made. """ self.name = shape_name self.type_name = shape_model['type'] self.documentation = shape_model.get('documentation', '') self._shape_model = shape_model if shape_resolver is None: # If a shape_resolver is not provided, we create an object # that will throw errors if you attempt to resolve # a shape. This is actually ok for scalar shapes # because they don't need to resolve shapes and shouldn't # be required to provide an object they won't use. shape_resolver = UnresolvableShapeMap() self._shape_resolver = shape_resolver self._cache = {} @CachedProperty def serialization(self): """Serialization information about the shape. This contains information that may be needed for input serialization or response parsing. This can include: * name * queryName * flattened * location * payload * streaming * xmlNamespace * resultWrapper * xmlAttribute :rtype: dict :return: Serialization information about the shape. """ model = self._shape_model serialization = {} for attr in self.SERIALIZED_ATTRS: if attr in self._shape_model: serialization[attr] = model[attr] # For consistency, locationName is renamed to just 'name'. if 'locationName' in serialization: serialization['name'] = serialization.pop('locationName') return serialization @CachedProperty def metadata(self): """Metadata about the shape. This requires optional information about the shape, including: * min * max * enum * sensitive * required * idempotencyToken :rtype: dict :return: Metadata about the shape. """ model = self._shape_model metadata = {} for attr in self.METADATA_ATTRS: if attr in self._shape_model: metadata[attr] = model[attr] return metadata @CachedProperty def required_members(self): """A list of members that are required. A structure shape can define members that are required. This value will return a list of required members. If there are no required members an empty list is returned. """ return self.metadata.get('required', []) def _resolve_shape_ref(self, shape_ref): return self._shape_resolver.resolve_shape_ref(shape_ref) def __repr__(self): return "<%s(%s)>" % (self.__class__.__name__, self.name) class StructureShape(Shape): @CachedProperty def members(self): members = self._shape_model['members'] # The members dict looks like: # 'members': { # 'MemberName': {'shape': 'shapeName'}, # 'MemberName2': {'shape': 'shapeName'}, # } # We return a dict of member name to Shape object. shape_members = self.MAP_TYPE() for name, shape_ref in members.items(): shape_members[name] = self._resolve_shape_ref(shape_ref) return shape_members class ListShape(Shape): @CachedProperty def member(self): return self._resolve_shape_ref(self._shape_model['member']) class MapShape(Shape): @CachedProperty def key(self): return self._resolve_shape_ref(self._shape_model['key']) @CachedProperty def value(self): return self._resolve_shape_ref(self._shape_model['value']) class StringShape(Shape): @CachedProperty def enum(self): return self.metadata.get('enum', []) class ServiceModel(object): """ :ivar service_description: The parsed service description dictionary. """ def __init__(self, service_description, service_name=None): """ :type service_description: dict :param service_description: The service description model. This value is obtained from a botocore.loader.Loader, or from directly loading the file yourself:: service_description = json.load( open('/path/to/service-description-model.json')) model = ServiceModel(service_description) :type service_name: str :param service_name: The name of the service. Normally this is the endpoint prefix defined in the service_description. However, you can override this value to provide a more convenient name. This is done in a few places in botocore (ses instead of email, emr instead of elasticmapreduce). If this value is not provided, it will default to the endpointPrefix defined in the model. """ self._service_description = service_description # We want clients to be able to access metadata directly. self.metadata = service_description.get('metadata', {}) self._shape_resolver = ShapeResolver( service_description.get('shapes', {})) self._signature_version = NOT_SET self._service_name = service_name self._instance_cache = {} def shape_for(self, shape_name, member_traits=None): return self._shape_resolver.get_shape_by_name( shape_name, member_traits) def resolve_shape_ref(self, shape_ref): return self._shape_resolver.resolve_shape_ref(shape_ref) @instance_cache def operation_model(self, operation_name): try: model = self._service_description['operations'][operation_name] except KeyError: raise OperationNotFoundError(operation_name) return OperationModel(model, self, operation_name) @CachedProperty def documentation(self): return self._service_description.get('documentation', '') @CachedProperty def operation_names(self): return list(self._service_description.get('operations', [])) @CachedProperty def service_name(self): """The name of the service. This defaults to the endpointPrefix defined in the service model. However, this value can be overriden when a ``ServiceModel`` is created. If a service_name was not provided when the ``ServiceModel`` was created and if there is no endpointPrefix defined in the service model, then an ``UndefinedModelAttributeError`` exception will be raised. """ if self._service_name is not None: return self._service_name else: return self.endpoint_prefix @CachedProperty def signing_name(self): """The name to use when computing signatures. If the model does not define a signing name, this value will be the endpoint prefix defined in the model. """ signing_name = self.metadata.get('signingName') if signing_name is None: signing_name = self.endpoint_prefix return signing_name @CachedProperty def api_version(self): return self._get_metadata_property('apiVersion') @CachedProperty def protocol(self): return self._get_metadata_property('protocol') @CachedProperty def endpoint_prefix(self): return self._get_metadata_property('endpointPrefix') def _get_metadata_property(self, name): try: return self.metadata[name] except KeyError: raise UndefinedModelAttributeError( '"%s" not defined in the metadata of the the model: %s' % (name, self)) # Signature version is one of the rare properties # than can be modified so a CachedProperty is not used here. @property def signature_version(self): if self._signature_version is NOT_SET: signature_version = self.metadata.get('signatureVersion') self._signature_version = signature_version return self._signature_version @signature_version.setter def signature_version(self, value): self._signature_version = value class OperationModel(object): def __init__(self, operation_model, service_model, name=None): """ :type operation_model: dict :param operation_model: The operation model. This comes from the service model, and is the value associated with the operation name in the service model (i.e ``model['operations'][op_name]``). :type service_model: botocore.model.ServiceModel :param service_model: The service model associated with the operation. :type name: string :param name: The operation name. This is the operation name exposed to the users of this model. This can potentially be different from the "wire_name", which is the operation name that *must* by provided over the wire. For example, given:: "CreateCloudFrontOriginAccessIdentity":{ "name":"CreateCloudFrontOriginAccessIdentity2014_11_06", ... } The ``name`` would be ``CreateCloudFrontOriginAccessIdentity``, but the ``self.wire_name`` would be ``CreateCloudFrontOriginAccessIdentity2014_11_06``, which is the value we must send in the corresponding HTTP request. """ self._operation_model = operation_model self._service_model = service_model self._api_name = name # Clients can access '.name' to get the operation name # and '.metadata' to get the top level metdata of the service. self._wire_name = operation_model.get('name') self.metadata = service_model.metadata self.http = operation_model.get('http', {}) @CachedProperty def name(self): if self._api_name is not None: return self._api_name else: return self.wire_name @property def wire_name(self): """The wire name of the operation. In many situations this is the same value as the ``name``, value, but in some services, the operation name exposed to the user is different from the operaiton name we send across the wire (e.g cloudfront). Any serialization code should use ``wire_name``. """ return self._operation_model.get('name') @property def service_model(self): return self._service_model @CachedProperty def documentation(self): return self._operation_model.get('documentation', '') @CachedProperty def input_shape(self): if 'input' not in self._operation_model: # Some operations do not accept any input and do not define an # input shape. return None return self._service_model.resolve_shape_ref( self._operation_model['input']) @CachedProperty def output_shape(self): if 'output' not in self._operation_model: # Some operations do not define an output shape, # in which case we return None to indicate the # operation has no expected output. return None return self._service_model.resolve_shape_ref( self._operation_model['output']) @CachedProperty def idempotent_members(self): input_shape = self.input_shape if not input_shape: return [] return [name for (name, shape) in input_shape.members.items() if 'idempotencyToken' in shape.metadata and shape.metadata['idempotencyToken']] @CachedProperty def has_streaming_input(self): return self.get_streaming_input() is not None @CachedProperty def has_streaming_output(self): return self.get_streaming_output() is not None def get_streaming_input(self): return self._get_streaming_body(self.input_shape) def get_streaming_output(self): return self._get_streaming_body(self.output_shape) def _get_streaming_body(self, shape): """Returns the streaming member's shape if any; or None otherwise.""" if shape is None: return None payload = shape.serialization.get('payload') if payload is not None: payload_shape = shape.members[payload] if payload_shape.type_name == 'blob': return payload_shape return None def __repr__(self): return '%s(name=%s)' % (self.__class__.__name__, self.name) class ShapeResolver(object): """Resolves shape references.""" # Any type not in this mapping will default to the Shape class. SHAPE_CLASSES = { 'structure': StructureShape, 'list': ListShape, 'map': MapShape, 'string': StringShape } def __init__(self, shape_map): self._shape_map = shape_map self._shape_cache = {} def get_shape_by_name(self, shape_name, member_traits=None): try: shape_model = self._shape_map[shape_name] except KeyError: raise NoShapeFoundError(shape_name) try: shape_cls = self.SHAPE_CLASSES.get(shape_model['type'], Shape) except KeyError: raise InvalidShapeError("Shape is missing required key 'type': %s" % shape_model) if member_traits: shape_model = shape_model.copy() shape_model.update(member_traits) result = shape_cls(shape_name, shape_model, self) return result def resolve_shape_ref(self, shape_ref): # A shape_ref is a dict that has a 'shape' key that # refers to a shape name as well as any additional # member traits that are then merged over the shape # definition. For example: # {"shape": "StringType", "locationName": "Foobar"} if len(shape_ref) == 1 and 'shape' in shape_ref: # It's just a shape ref with no member traits, we can avoid # a .copy(). This is the common case so it's specifically # called out here. return self.get_shape_by_name(shape_ref['shape']) else: member_traits = shape_ref.copy() try: shape_name = member_traits.pop('shape') except KeyError: raise InvalidShapeReferenceError( "Invalid model, missing shape reference: %s" % shape_ref) return self.get_shape_by_name(shape_name, member_traits) class UnresolvableShapeMap(object): """A ShapeResolver that will throw ValueErrors when shapes are resolved. """ def get_shape_by_name(self, shape_name, member_traits=None): raise ValueError("Attempted to lookup shape '%s', but no shape " "map was provided.") def resolve_shape_ref(self, shape_ref): raise ValueError("Attempted to resolve shape '%s', but no shape " "map was provided.") class DenormalizedStructureBuilder(object): """Build a StructureShape from a denormalized model. This is a convenience builder class that makes it easy to construct ``StructureShape``s based on a denormalized model. It will handle the details of creating unique shape names and creating the appropriate shape map needed by the ``StructureShape`` class. Example usage:: builder = DenormalizedStructureBuilder() shape = builder.with_members({ 'A': { 'type': 'structure', 'members': { 'B': { 'type': 'structure', 'members': { 'C': { 'type': 'string', } } } } } }).build_model() # ``shape`` is now an instance of botocore.model.StructureShape :type dict_type: class :param dict_type: The dictionary type to use, allowing you to opt-in to using OrderedDict or another dict type. This can be particularly useful for testing when order matters, such as for documentation. """ def __init__(self, name=None): self.members = OrderedDict() self._name_generator = ShapeNameGenerator() if name is None: self.name = self._name_generator.new_shape_name('structure') def with_members(self, members): """ :type members: dict :param members: The denormalized members. :return: self """ self._members = members return self def build_model(self): """Build the model based on the provided members. :rtype: botocore.model.StructureShape :return: The built StructureShape object. """ shapes = OrderedDict() denormalized = { 'type': 'structure', 'members': self._members, } self._build_model(denormalized, shapes, self.name) resolver = ShapeResolver(shape_map=shapes) return StructureShape(shape_name=self.name, shape_model=shapes[self.name], shape_resolver=resolver) def _build_model(self, model, shapes, shape_name): if model['type'] == 'structure': shapes[shape_name] = self._build_structure(model, shapes) elif model['type'] == 'list': shapes[shape_name] = self._build_list(model, shapes) elif model['type'] == 'map': shapes[shape_name] = self._build_map(model, shapes) elif model['type'] in ['string', 'integer', 'boolean', 'blob', 'float', 'timestamp', 'long', 'double', 'char']: shapes[shape_name] = self._build_scalar(model) else: raise InvalidShapeError("Unknown shape type: %s" % model['type']) def _build_structure(self, model, shapes): members = OrderedDict() shape = self._build_initial_shape(model) shape['members'] = members for name, member_model in model['members'].items(): member_shape_name = self._get_shape_name(member_model) members[name] = {'shape': member_shape_name} self._build_model(member_model, shapes, member_shape_name) return shape def _build_list(self, model, shapes): member_shape_name = self._get_shape_name(model) shape = self._build_initial_shape(model) shape['member'] = {'shape': member_shape_name} self._build_model(model['member'], shapes, member_shape_name) return shape def _build_map(self, model, shapes): key_shape_name = self._get_shape_name(model['key']) value_shape_name = self._get_shape_name(model['value']) shape = self._build_initial_shape(model) shape['key'] = {'shape': key_shape_name} shape['value'] = {'shape': value_shape_name} self._build_model(model['key'], shapes, key_shape_name) self._build_model(model['value'], shapes, value_shape_name) return shape def _build_initial_shape(self, model): shape = { 'type': model['type'], } if 'documentation' in model: shape['documentation'] = model['documentation'] if 'enum' in model: shape['enum'] = model['enum'] return shape def _build_scalar(self, model): return self._build_initial_shape(model) def _get_shape_name(self, model): if 'shape_name' in model: return model['shape_name'] else: return self._name_generator.new_shape_name(model['type']) class ShapeNameGenerator(object): """Generate unique shape names for a type. This class can be used in conjunction with the DenormalizedStructureBuilder to generate unique shape names for a given type. """ def __init__(self): self._name_cache = defaultdict(int) def new_shape_name(self, type_name): """Generate a unique shape name. This method will guarantee a unique shape name each time it is called with the same type. :: >>> s = ShapeNameGenerator() >>> s.new_shape_name('structure') 'StructureType1' >>> s.new_shape_name('structure') 'StructureType2' >>> s.new_shape_name('list') 'ListType1' >>> s.new_shape_name('list') 'ListType2' :type type_name: string :param type_name: The type name (structure, list, map, string, etc.) :rtype: string :return: A unique shape name for the given type """ self._name_cache[type_name] += 1 current_index = self._name_cache[type_name] return '%sType%s' % (type_name.capitalize(), current_index)
mit
gavin-feng/odoo
addons/mrp/__init__.py
437
1165
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import mrp import stock import product import wizard import report import company import procurement import res_config # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
flipjack/tecnoservicio
suit/widgets.py
10
4988
from django.contrib.admin.widgets import AdminTimeWidget, AdminDateWidget from django.forms import TextInput, Select, Textarea from django.utils.safestring import mark_safe from django import forms from django.utils.translation import ugettext as _ from django.contrib.admin.templatetags.admin_static import static class NumberInput(TextInput): """ HTML5 Number input Left for backwards compatibility """ input_type = 'number' class HTML5Input(TextInput): """ Supports any HTML5 input http://www.w3schools.com/html/html5_form_input_types.asp """ def __init__(self, attrs=None, input_type=None): self.input_type = input_type super(HTML5Input, self).__init__(attrs) # class LinkedSelect(Select): """ Linked select - Adds link to foreign item, when used with foreign key field """ def __init__(self, attrs=None, choices=()): attrs = _make_attrs(attrs, classes="linked-select") super(LinkedSelect, self).__init__(attrs, choices) class EnclosedInput(TextInput): """ Widget for bootstrap appended/prepended inputs """ def __init__(self, attrs=None, prepend=None, append=None): """ For prepend, append parameters use string like %, $ or html """ self.prepend = prepend self.append = append super(EnclosedInput, self).__init__(attrs=attrs) def enclose_value(self, value): """ If value doesn't starts with html open sign "<", enclose in add-on tag """ if value.startswith("<"): return value if value.startswith("icon-"): value = '<i class="%s"></i>' % value return '<span class="add-on">%s</span>' % value def render(self, name, value, attrs=None): output = super(EnclosedInput, self).render(name, value, attrs) div_classes = [] if self.prepend: div_classes.append('input-prepend') self.prepend = self.enclose_value(self.prepend) output = ''.join((self.prepend, output)) if self.append: div_classes.append('input-append') self.append = self.enclose_value(self.append) output = ''.join((output, self.append)) return mark_safe( '<div class="%s">%s</div>' % (' '.join(div_classes), output)) class AutosizedTextarea(Textarea): """ Autosized Textarea - textarea height dynamically grows based on user input """ def __init__(self, attrs=None): new_attrs = _make_attrs(attrs, {"rows": 2}, "autosize") super(AutosizedTextarea, self).__init__(new_attrs) @property def media(self): return forms.Media(js=[static("suit/js/jquery.autosize-min.js")]) def render(self, name, value, attrs=None): output = super(AutosizedTextarea, self).render(name, value, attrs) output += mark_safe( "<script type=\"text/javascript\">Suit.$('#id_%s').autosize();</script>" % name) return output # # Original date widgets with addition html # class SuitDateWidget(AdminDateWidget): def __init__(self, attrs=None, format=None): defaults = {'placeholder': _('Date:')[:-1]} new_attrs = _make_attrs(attrs, defaults, "vDateField input-small") super(SuitDateWidget, self).__init__(attrs=new_attrs, format=format) def render(self, name, value, attrs=None): output = super(SuitDateWidget, self).render(name, value, attrs) return mark_safe( '<div class="input-append suit-date">%s<span ' 'class="add-on"><i class="icon-calendar"></i></span></div>' % output) class SuitTimeWidget(AdminTimeWidget): def __init__(self, attrs=None, format=None): defaults = {'placeholder': _('Time:')[:-1]} new_attrs = _make_attrs(attrs, defaults, "vTimeField input-small") super(SuitTimeWidget, self).__init__(attrs=new_attrs, format=format) def render(self, name, value, attrs=None): output = super(SuitTimeWidget, self).render(name, value, attrs) return mark_safe( '<div class="input-append suit-date suit-time">%s<span ' 'class="add-on"><i class="icon-time"></i></span></div>' % output) class SuitSplitDateTimeWidget(forms.SplitDateTimeWidget): """ A SplitDateTime Widget that has some admin-specific styling. """ def __init__(self, attrs=None): widgets = [SuitDateWidget, SuitTimeWidget] forms.MultiWidget.__init__(self, widgets, attrs) def format_output(self, rendered_widgets): out_tpl = '<div class="datetime">%s %s</div>' return mark_safe(out_tpl % (rendered_widgets[0], rendered_widgets[1])) def _make_attrs(attrs, defaults=None, classes=None): result = defaults.copy() if defaults else {} if attrs: result.update(attrs) if classes: result["class"] = " ".join((classes, result.get("class", ""))) return result
bsd-3-clause
egafford/sahara
sahara/tests/unit/plugins/cdh/v5_5_0/test_edp_engine.py
3
8671
# Copyright (c) 2015 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from sahara import exceptions as ex from sahara.plugins import base as pb from sahara.plugins.cdh.v5_5_0 import edp_engine from sahara.plugins import exceptions as pl_ex from sahara.tests.unit import base as sahara_base from sahara.tests.unit.plugins.cdh import utils as c_u from sahara.utils import edp def get_cluster(version='5.5.0'): cluster = c_u.get_fake_cluster(plugin_name='CDH', hadoop_version=version) return cluster class EdpEngineTestV550(sahara_base.SaharaTestCase): def setUp(self): super(EdpEngineTestV550, self).setUp() pb.setup_plugins() def test_get_hdfs_user(self): eng = edp_engine.EdpOozieEngine(get_cluster()) self.assertEqual('hdfs', eng.get_hdfs_user()) @mock.patch('sahara.service.edp.hdfs_helper.create_dir_hadoop2') def test_create_hdfs_dir(self, create_dir_hadoop2): eng = edp_engine.EdpOozieEngine(get_cluster()) remote = mock.Mock() dir_name = mock.Mock() eng.create_hdfs_dir(remote, dir_name) create_dir_hadoop2.assert_called_once_with(remote, dir_name, eng.get_hdfs_user()) def test_get_oozie_server_uri(self): cluster = get_cluster() eng = edp_engine.EdpOozieEngine(cluster) uri = eng.get_oozie_server_uri(cluster) self.assertEqual("http://1.2.3.5:11000/oozie", uri) def test_get_name_node_uri(self): cluster = get_cluster() eng = edp_engine.EdpOozieEngine(cluster) uri = eng.get_name_node_uri(cluster) self.assertEqual("hdfs://master_inst.novalocal:8020", uri) # has HDFS_JOURNALNODE cluster = get_cluster() jns_node_group = mock.MagicMock() jns_node_group.node_processes = ['HDFS_JOURNALNODE'] jns_node_group.instances = [mock.Mock()] list.append(cluster.node_groups, jns_node_group) uri = eng.get_name_node_uri(cluster) self.assertEqual("hdfs://nameservice01", uri) def test_get_resource_manager_uri(self): cluster = get_cluster() eng = edp_engine.EdpOozieEngine(cluster) uri = eng.get_resource_manager_uri(cluster) self.assertEqual("master_inst.novalocal:8032", uri) def test_get_oozie_server(self): cluster = get_cluster() eng = edp_engine.EdpOozieEngine(cluster) actual = eng.get_oozie_server(cluster) expect = cluster.node_groups[1].instances[0] self.assertEqual(expect, actual) @mock.patch('sahara.service.edp.oozie.engine.' 'OozieJobEngine.validate_job_execution') def test_validate_job_execution(self, c): cluster = get_cluster() eng = edp_engine.EdpOozieEngine(cluster) eng.validate_job_execution(cluster, mock.Mock(), mock.Mock()) # more than one oozie server dict.__setitem__(cluster.node_groups[1], 'count', 2) self.assertRaises(pl_ex.InvalidComponentCountException, eng.validate_job_execution, cluster, mock.Mock(), mock.Mock()) @mock.patch( 'sahara.plugins.cdh.confighints_helper.get_possible_hive_config_from', return_value={}) def test_get_possible_job_config_hive(self, get_possible_hive_config_from): expected_config = {'job_config': {}} actual_config = edp_engine.EdpOozieEngine.get_possible_job_config( edp.JOB_TYPE_HIVE) get_possible_hive_config_from.assert_called_once_with( 'plugins/cdh/v5_5_0/resources/hive-site.xml') self.assertEqual(expected_config, actual_config) @mock.patch('sahara.plugins.cdh.v5_5_0.edp_engine.EdpOozieEngine') def test_get_possible_job_config_java(self, BaseCDHEdpOozieEngine): expected_config = {'job_config': {}} BaseCDHEdpOozieEngine.get_possible_job_config.return_value = ( expected_config) actual_config = edp_engine.EdpOozieEngine.get_possible_job_config( edp.JOB_TYPE_JAVA) BaseCDHEdpOozieEngine.get_possible_job_config.assert_called_once_with( edp.JOB_TYPE_JAVA) self.assertEqual(expected_config, actual_config) @mock.patch( 'sahara.plugins.cdh.confighints_helper.' 'get_possible_mapreduce_config_from', return_value={}) def test_get_possible_job_config_mapreduce( self, get_possible_mapreduce_config_from): expected_config = {'job_config': {}} actual_config = edp_engine.EdpOozieEngine.get_possible_job_config( edp.JOB_TYPE_MAPREDUCE) get_possible_mapreduce_config_from.assert_called_once_with( 'plugins/cdh/v5_5_0/resources/mapred-site.xml') self.assertEqual(expected_config, actual_config) @mock.patch( 'sahara.plugins.cdh.confighints_helper.' 'get_possible_mapreduce_config_from', return_value={}) def test_get_possible_job_config_mapreduce_streaming( self, get_possible_mapreduce_config_from): expected_config = {'job_config': {}} actual_config = edp_engine.EdpOozieEngine.get_possible_job_config( edp.JOB_TYPE_MAPREDUCE_STREAMING) get_possible_mapreduce_config_from.assert_called_once_with( 'plugins/cdh/v5_5_0/resources/mapred-site.xml') self.assertEqual(expected_config, actual_config) @mock.patch( 'sahara.plugins.cdh.confighints_helper.get_possible_pig_config_from', return_value={}) def test_get_possible_job_config_pig(self, get_possible_pig_config_from): expected_config = {'job_config': {}} actual_config = edp_engine.EdpOozieEngine.get_possible_job_config( edp.JOB_TYPE_PIG) get_possible_pig_config_from.assert_called_once_with( 'plugins/cdh/v5_5_0/resources/mapred-site.xml') self.assertEqual(expected_config, actual_config) @mock.patch('sahara.plugins.cdh.v5_5_0.edp_engine.EdpOozieEngine') def test_get_possible_job_config_shell(self, BaseCDHEdpOozieEngine): expected_config = {'job_config': {}} BaseCDHEdpOozieEngine.get_possible_job_config.return_value = ( expected_config) actual_config = edp_engine.EdpOozieEngine.get_possible_job_config( edp.JOB_TYPE_SHELL) BaseCDHEdpOozieEngine.get_possible_job_config.assert_called_once_with( edp.JOB_TYPE_SHELL) self.assertEqual(expected_config, actual_config) @mock.patch('sahara.plugins.utils.get_config_value_or_default') @mock.patch('sahara.plugins.utils.get_instance') @mock.patch('sahara.service.edp.spark.engine.' 'SparkJobEngine.validate_job_execution') def test_spark_engine_validate_job_execution(self, validate_job_execution, get_instance, get_config_value_or_default): # version unsupported cluster = get_cluster(version='5.4.0') eng = edp_engine.EdpSparkEngine(cluster) self.assertRaises(ex.InvalidDataException, eng.validate_job_execution, cluster, mock.Mock(), mock.Mock()) # none yarn history server cluster = get_cluster() eng = edp_engine.EdpSparkEngine(cluster) self.assertRaises(pl_ex.InvalidComponentCountException, eng.validate_job_execution, cluster, mock.Mock(), mock.Mock()) # valid cluster = get_cluster() yarn_history_node_group = mock.Mock() yarn_history_node_group.node_processes = ['SPARK_YARN_HISTORY_SERVER'] yarn_history_node_group.count = 1 list.append(cluster.node_groups, yarn_history_node_group) eng = edp_engine.EdpSparkEngine(cluster) eng.validate_job_execution(cluster, mock.Mock(), mock.Mock())
apache-2.0
dudymas/python-openstacksdk
openstack/tests/unit/image/v2/test_tag.py
3
2140
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import testtools from openstack.image.v2 import image from openstack.image.v2 import tag class TestTag(testtools.TestCase): def setUp(self): super(TestTag, self).setUp() self.session = mock.Mock() self.session.put = mock.Mock() self.session.delete = mock.Mock() self.img = image.Image({"id": "123"}) def test_basic(self): sot = tag.Tag() self.assertIsNone(sot.resource_key) self.assertEqual('/images/%(image)s/tags', sot.base_path) self.assertEqual('image', sot.service.service_type) self.assertEqual('image', sot.id_attribute) self.assertTrue(sot.allow_create) self.assertFalse(sot.allow_retrieve) self.assertFalse(sot.allow_update) self.assertTrue(sot.allow_delete) self.assertFalse(sot.allow_list) def test_make_it(self): sot = tag.Tag({"image": self.img}) self.assertEqual(self.img, sot.image) def _test_action(self, sot_method, session_method): test_tag = "testing" sot = tag.Tag({"image": self.img}) rv = getattr(sot, sot_method)(self.session, test_tag) url = 'images/%(image)s/tags/%(tag)s' % { "image": self.img.get_id(self.img), "tag": test_tag} self.assertIsNone(rv) session_method.assert_called_with(url, service=sot.service, accept=None) def test_create(self): self._test_action("create", self.session.put) def test_delete(self): self._test_action("delete", self.session.delete)
apache-2.0
HiroIshikawa/21playground
learning/venv/lib/python3.5/site-packages/flask/testsuite/templating.py
562
11237
# -*- coding: utf-8 -*- """ flask.testsuite.templating ~~~~~~~~~~~~~~~~~~~~~~~~~~ Template functionality :copyright: (c) 2011 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import flask import unittest from flask.testsuite import FlaskTestCase class TemplatingTestCase(FlaskTestCase): def test_context_processing(self): app = flask.Flask(__name__) @app.context_processor def context_processor(): return {'injected_value': 42} @app.route('/') def index(): return flask.render_template('context_template.html', value=23) rv = app.test_client().get('/') self.assert_equal(rv.data, b'<p>23|42') def test_original_win(self): app = flask.Flask(__name__) @app.route('/') def index(): return flask.render_template_string('{{ config }}', config=42) rv = app.test_client().get('/') self.assert_equal(rv.data, b'42') def test_request_less_rendering(self): app = flask.Flask(__name__) app.config['WORLD_NAME'] = 'Special World' @app.context_processor def context_processor(): return dict(foo=42) with app.app_context(): rv = flask.render_template_string('Hello {{ config.WORLD_NAME }} ' '{{ foo }}') self.assert_equal(rv, 'Hello Special World 42') def test_standard_context(self): app = flask.Flask(__name__) app.secret_key = 'development key' @app.route('/') def index(): flask.g.foo = 23 flask.session['test'] = 'aha' return flask.render_template_string(''' {{ request.args.foo }} {{ g.foo }} {{ config.DEBUG }} {{ session.test }} ''') rv = app.test_client().get('/?foo=42') self.assert_equal(rv.data.split(), [b'42', b'23', b'False', b'aha']) def test_escaping(self): text = '<p>Hello World!' app = flask.Flask(__name__) @app.route('/') def index(): return flask.render_template('escaping_template.html', text=text, html=flask.Markup(text)) lines = app.test_client().get('/').data.splitlines() self.assert_equal(lines, [ b'&lt;p&gt;Hello World!', b'<p>Hello World!', b'<p>Hello World!', b'<p>Hello World!', b'&lt;p&gt;Hello World!', b'<p>Hello World!' ]) def test_no_escaping(self): app = flask.Flask(__name__) with app.test_request_context(): self.assert_equal(flask.render_template_string('{{ foo }}', foo='<test>'), '<test>') self.assert_equal(flask.render_template('mail.txt', foo='<test>'), '<test> Mail') def test_macros(self): app = flask.Flask(__name__) with app.test_request_context(): macro = flask.get_template_attribute('_macro.html', 'hello') self.assert_equal(macro('World'), 'Hello World!') def test_template_filter(self): app = flask.Flask(__name__) @app.template_filter() def my_reverse(s): return s[::-1] self.assert_in('my_reverse', app.jinja_env.filters.keys()) self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse) self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba') def test_add_template_filter(self): app = flask.Flask(__name__) def my_reverse(s): return s[::-1] app.add_template_filter(my_reverse) self.assert_in('my_reverse', app.jinja_env.filters.keys()) self.assert_equal(app.jinja_env.filters['my_reverse'], my_reverse) self.assert_equal(app.jinja_env.filters['my_reverse']('abcd'), 'dcba') def test_template_filter_with_name(self): app = flask.Flask(__name__) @app.template_filter('strrev') def my_reverse(s): return s[::-1] self.assert_in('strrev', app.jinja_env.filters.keys()) self.assert_equal(app.jinja_env.filters['strrev'], my_reverse) self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba') def test_add_template_filter_with_name(self): app = flask.Flask(__name__) def my_reverse(s): return s[::-1] app.add_template_filter(my_reverse, 'strrev') self.assert_in('strrev', app.jinja_env.filters.keys()) self.assert_equal(app.jinja_env.filters['strrev'], my_reverse) self.assert_equal(app.jinja_env.filters['strrev']('abcd'), 'dcba') def test_template_filter_with_template(self): app = flask.Flask(__name__) @app.template_filter() def super_reverse(s): return s[::-1] @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') self.assert_equal(rv.data, b'dcba') def test_add_template_filter_with_template(self): app = flask.Flask(__name__) def super_reverse(s): return s[::-1] app.add_template_filter(super_reverse) @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') self.assert_equal(rv.data, b'dcba') def test_template_filter_with_name_and_template(self): app = flask.Flask(__name__) @app.template_filter('super_reverse') def my_reverse(s): return s[::-1] @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') self.assert_equal(rv.data, b'dcba') def test_add_template_filter_with_name_and_template(self): app = flask.Flask(__name__) def my_reverse(s): return s[::-1] app.add_template_filter(my_reverse, 'super_reverse') @app.route('/') def index(): return flask.render_template('template_filter.html', value='abcd') rv = app.test_client().get('/') self.assert_equal(rv.data, b'dcba') def test_template_test(self): app = flask.Flask(__name__) @app.template_test() def boolean(value): return isinstance(value, bool) self.assert_in('boolean', app.jinja_env.tests.keys()) self.assert_equal(app.jinja_env.tests['boolean'], boolean) self.assert_true(app.jinja_env.tests['boolean'](False)) def test_add_template_test(self): app = flask.Flask(__name__) def boolean(value): return isinstance(value, bool) app.add_template_test(boolean) self.assert_in('boolean', app.jinja_env.tests.keys()) self.assert_equal(app.jinja_env.tests['boolean'], boolean) self.assert_true(app.jinja_env.tests['boolean'](False)) def test_template_test_with_name(self): app = flask.Flask(__name__) @app.template_test('boolean') def is_boolean(value): return isinstance(value, bool) self.assert_in('boolean', app.jinja_env.tests.keys()) self.assert_equal(app.jinja_env.tests['boolean'], is_boolean) self.assert_true(app.jinja_env.tests['boolean'](False)) def test_add_template_test_with_name(self): app = flask.Flask(__name__) def is_boolean(value): return isinstance(value, bool) app.add_template_test(is_boolean, 'boolean') self.assert_in('boolean', app.jinja_env.tests.keys()) self.assert_equal(app.jinja_env.tests['boolean'], is_boolean) self.assert_true(app.jinja_env.tests['boolean'](False)) def test_template_test_with_template(self): app = flask.Flask(__name__) @app.template_test() def boolean(value): return isinstance(value, bool) @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') self.assert_in(b'Success!', rv.data) def test_add_template_test_with_template(self): app = flask.Flask(__name__) def boolean(value): return isinstance(value, bool) app.add_template_test(boolean) @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') self.assert_in(b'Success!', rv.data) def test_template_test_with_name_and_template(self): app = flask.Flask(__name__) @app.template_test('boolean') def is_boolean(value): return isinstance(value, bool) @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') self.assert_in(b'Success!', rv.data) def test_add_template_test_with_name_and_template(self): app = flask.Flask(__name__) def is_boolean(value): return isinstance(value, bool) app.add_template_test(is_boolean, 'boolean') @app.route('/') def index(): return flask.render_template('template_test.html', value=False) rv = app.test_client().get('/') self.assert_in(b'Success!', rv.data) def test_add_template_global(self): app = flask.Flask(__name__) @app.template_global() def get_stuff(): return 42 self.assert_in('get_stuff', app.jinja_env.globals.keys()) self.assert_equal(app.jinja_env.globals['get_stuff'], get_stuff) self.assert_true(app.jinja_env.globals['get_stuff'](), 42) with app.app_context(): rv = flask.render_template_string('{{ get_stuff() }}') self.assert_equal(rv, '42') def test_custom_template_loader(self): class MyFlask(flask.Flask): def create_global_jinja_loader(self): from jinja2 import DictLoader return DictLoader({'index.html': 'Hello Custom World!'}) app = MyFlask(__name__) @app.route('/') def index(): return flask.render_template('index.html') c = app.test_client() rv = c.get('/') self.assert_equal(rv.data, b'Hello Custom World!') def test_iterable_loader(self): app = flask.Flask(__name__) @app.context_processor def context_processor(): return {'whiskey': 'Jameson'} @app.route('/') def index(): return flask.render_template( ['no_template.xml', # should skip this one 'simple_template.html', # should render this 'context_template.html'], value=23) rv = app.test_client().get('/') self.assert_equal(rv.data, b'<h1>Jameson</h1>') def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(TemplatingTestCase)) return suite
mit
uzgit/ardupilot
mk/VRBRAIN/Tools/genmsg/scripts/genmsg_check_deps.py
51
3068
#!/usr/bin/env python # Software License Agreement (BSD License) # # Copyright (c) 2014, Open Source Robotics Foundation, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Open Source Robotics Foundation, Inc. nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from __future__ import print_function import os import sys from genmsg import EXT_MSG, EXT_SRV, MsgContext from genmsg.gentools import compute_full_type_name from genmsg.msg_loader import load_msg_from_file, load_srv_from_file from genmsg.msgs import bare_msg_type, is_builtin, resolve_type pkg_name = sys.argv[1] msg_file = sys.argv[2] deps = sys.argv[3].split(':') if len(sys.argv) > 3 else [] msg_context = MsgContext.create_default() full_type_name = compute_full_type_name(pkg_name, os.path.basename(msg_file)) if msg_file.endswith(EXT_MSG): spec = load_msg_from_file(msg_context, msg_file, full_type_name) unresolved_types = spec.types elif msg_file.endswith(EXT_SRV): spec = load_srv_from_file(msg_context, msg_file, full_type_name) unresolved_types = spec.request.types + spec.response.types else: print("Processing file: '%s' - unknown file extension" % msg_file, file=sys.stderr) sys.exit(1) package_context = spec.package for unresolved_type in unresolved_types: bare_type = bare_msg_type(unresolved_type) resolved_type = resolve_type(bare_type, package_context) if not is_builtin(resolved_type) and resolved_type not in deps: print("The dependencies of the message/service '%s' have changed. Please rerun cmake." % spec.full_name, file=sys.stderr) sys.exit(1)
gpl-3.0
vencax/django-vxk-forum
vxkforum/fields.py
1
3734
""" Details about AutoOneToOneField: http://softwaremaniacs.org/blog/2007/03/07/auto-one-to-one-field/ """ try: from cStringIO import StringIO except ImportError: from StringIO import StringIO import random from django.db.models import OneToOneField from django.db.models.fields.related import SingleRelatedObjectDescriptor from django.db import models from django.core.files.uploadedfile import SimpleUploadedFile from django.core.serializers.json import DjangoJSONEncoder from django.utils import simplejson as json from django.utils.hashcompat import sha_constructor from django.conf import settings class AutoSingleRelatedObjectDescriptor(SingleRelatedObjectDescriptor): def __get__(self, instance, instance_type=None): try: return super(AutoSingleRelatedObjectDescriptor, self).__get__(instance, instance_type) except self.related.model.DoesNotExist: obj = self.related.model(**{self.related.field.name: instance}) obj.save() return obj class AutoOneToOneField(OneToOneField): """ OneToOneField creates dependent object on first request from parent object if dependent oject has not created yet. """ def contribute_to_related_class(self, cls, related): setattr(cls, related.get_accessor_name(), AutoSingleRelatedObjectDescriptor(related)) #if not cls._meta.one_to_one_field: # cls._meta.one_to_one_field = self class ExtendedImageField(models.ImageField): """ Extended ImageField that can resize image before saving it. """ def __init__(self, *args, **kwargs): self.width = kwargs.pop('width', None) self.height = kwargs.pop('height', None) super(ExtendedImageField, self).__init__(*args, **kwargs) def save_form_data(self, instance, data): if data and self.width and self.height: content = self.resize_image(data.read(), width=self.width, height=self.height) salt = sha_constructor(str(random.random())).hexdigest()[:5] fname = sha_constructor(salt + settings.SECRET_KEY).hexdigest() + '.png' data = SimpleUploadedFile(fname, content, data.content_type) super(ExtendedImageField, self).save_form_data(instance, data) def resize_image(self, rawdata, width, height): """ Resize image to fit it into (width, height) box. """ try: import Image except ImportError: from PIL import Image image = Image.open(StringIO(rawdata)) oldw, oldh = image.size if oldw >= oldh: x = int(round((oldw - oldh) / 2.0)) image = image.crop((x, 0, (x + oldh) - 1, oldh - 1)) else: y = int(round((oldh - oldw) / 2.0)) image = image.crop((0, y, oldw - 1, (y + oldw) - 1)) image = image.resize((width, height), resample=Image.ANTIALIAS) string = StringIO() image.save(string, format='PNG') return string.getvalue() class JSONField(models.TextField): """ JSONField is a generic textfield that neatly serializes/unserializes JSON objects seamlessly. Django snippet #1478 """ __metaclass__ = models.SubfieldBase def to_python(self, value): if value == "": return None try: if isinstance(value, basestring): return json.loads(value) except ValueError: pass return value def get_prep_value(self, value): if value == "": return None if isinstance(value, dict): value = json.dumps(value, cls=DjangoJSONEncoder) return super(JSONField, self).get_prep_value(value)
bsd-3-clause
lucafavatella/intellij-community
python/lib/Lib/site-packages/django/contrib/localflavor/jp/forms.py
333
1211
""" JP-specific Form helpers """ from django.forms import ValidationError from django.utils.translation import ugettext_lazy as _ from django.forms.fields import RegexField, Select class JPPostalCodeField(RegexField): """ A form field that validates its input is a Japanese postcode. Accepts 7 digits, with or without a hyphen. """ default_error_messages = { 'invalid': _('Enter a postal code in the format XXXXXXX or XXX-XXXX.'), } def __init__(self, *args, **kwargs): super(JPPostalCodeField, self).__init__(r'^\d{3}-\d{4}$|^\d{7}$', max_length=None, min_length=None, *args, **kwargs) def clean(self, value): """ Validates the input and returns a string that contains only numbers. Returns an empty string for empty values. """ v = super(JPPostalCodeField, self).clean(value) return v.replace('-', '') class JPPrefectureSelect(Select): """ A Select widget that uses a list of Japanese prefectures as its choices. """ def __init__(self, attrs=None): from jp_prefectures import JP_PREFECTURES super(JPPrefectureSelect, self).__init__(attrs, choices=JP_PREFECTURES)
apache-2.0
FAB4D/humanitas
prediction/esn/meboot.py
1
2655
""" MEBOOT.PY - Python package for the meboot (Maximum Entropy Bootstrap) algorithm for Time Series Author: Fabian Brix Method by H.D. Vinod, Fordham University - """ import sys import numpy as np import matplotlib.pyplot as plt def sort(series): ind_sorted = np.argsort(series) s_sorted = series[ind_sorted] return s_sorted, ind_sorted def get_trm_mean(series, percent): # FIXED dev = np.abs(series[1:]-series[:-1]) n = len(dev) k = n*(percent/100.0)/2.0 k = round(k,0) return np.mean(dev[k:n-k]) def get_intermed_pts(series, s_sorted, percent): zt = (s_sorted[:-1]+s_sorted[1:])/2.0 m_trm = get_trm_mean(series, percent) print m_trm z0 = s_sorted[0]-m_trm zT = s_sorted[-1]+m_trm z = np.hstack((z0,zt,zT)) return z def get_intervals(z): return np.vstack((z[:-1], z[1:])).T def get_me_density(intervals): return 1.0/(intervals[:,1]-intervals[:,0]) def get_cpf(me_density, intervals): cpf = np.array([sum(me_density[:i+1]) for i in xrange(me_density.shape[0]-1)]) return cpf/np.max(cpf) def get_quantiles(cpf, intervals, series): quantiles = [] T = float(len(series)) t = np.arange(T+1) Rt = np.vstack((t[:-1]/T,t[1:]/T)).T print Rt for d in xrange(series.shape[0]): u = np.random.uniform(0,1) for i in xrange(cpf.shape[0]): cp = cpf[i] if u <= cp: cpm = cpf[i-1] if i == 0: cpm = 0 m = (cp-cpm)/1.0*(intervals[i,1]-intervals[i,0]) xp = (u - cpm)*1.0/m+intervals[i,0] quantiles.append(xp) break return np.array(quantiles) def meboot(series, replicates): # ASC by default print series np.random.seed(0) s_sorted, ind_sorted = sort(series) z = get_intermed_pts(series, s_sorted, 10) #print 'z ', z intervals = get_intervals(z) #print 'intervals ', intervals me_density = get_me_density(intervals) #print 'uni dens ', me_density cpf = get_cpf(me_density, intervals) #print 'cpf ', cpf quantiles = get_quantiles(cpf, intervals, series) #print 'quantiles ', quantiles quantiles = np.sort(quantiles) replicate = quantiles[ind_sorted] print 'replicate ', replicate # TODO: Undertand and add repeat mechanism plt.plot(series, color='r') plt.plot(replicate, color='b') plt.ylim(0,30) plt.show() def main(args): series = np.array([4,12,36,20,8]) meboot(series, 1) if __name__ == "__main__": if sys.argv < 2: print 'hello' else: main(*sys.argv)
bsd-3-clause
Nikoala/CouchPotatoServer
couchpotato/core/media/movie/providers/trailer/youtube_dl/aes.py
23
15187
from __future__ import unicode_literals __all__ = ['aes_encrypt', 'key_expansion', 'aes_ctr_decrypt', 'aes_cbc_decrypt', 'aes_decrypt_text'] import base64 from math import ceil from .utils import bytes_to_intlist, intlist_to_bytes BLOCK_SIZE_BYTES = 16 def aes_ctr_decrypt(data, key, counter): """ Decrypt with aes in counter mode @param {int[]} data cipher @param {int[]} key 16/24/32-Byte cipher key @param {instance} counter Instance whose next_value function (@returns {int[]} 16-Byte block) returns the next counter block @returns {int[]} decrypted data """ expanded_key = key_expansion(key) block_count = int(ceil(float(len(data)) / BLOCK_SIZE_BYTES)) decrypted_data = [] for i in range(block_count): counter_block = counter.next_value() block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES] block += [0] * (BLOCK_SIZE_BYTES - len(block)) cipher_counter_block = aes_encrypt(counter_block, expanded_key) decrypted_data += xor(block, cipher_counter_block) decrypted_data = decrypted_data[:len(data)] return decrypted_data def aes_cbc_decrypt(data, key, iv): """ Decrypt with aes in CBC mode @param {int[]} data cipher @param {int[]} key 16/24/32-Byte cipher key @param {int[]} iv 16-Byte IV @returns {int[]} decrypted data """ expanded_key = key_expansion(key) block_count = int(ceil(float(len(data)) / BLOCK_SIZE_BYTES)) decrypted_data = [] previous_cipher_block = iv for i in range(block_count): block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES] block += [0] * (BLOCK_SIZE_BYTES - len(block)) decrypted_block = aes_decrypt(block, expanded_key) decrypted_data += xor(decrypted_block, previous_cipher_block) previous_cipher_block = block decrypted_data = decrypted_data[:len(data)] return decrypted_data def key_expansion(data): """ Generate key schedule @param {int[]} data 16/24/32-Byte cipher key @returns {int[]} 176/208/240-Byte expanded key """ data = data[:] # copy rcon_iteration = 1 key_size_bytes = len(data) expanded_key_size_bytes = (key_size_bytes // 4 + 7) * BLOCK_SIZE_BYTES while len(data) < expanded_key_size_bytes: temp = data[-4:] temp = key_schedule_core(temp, rcon_iteration) rcon_iteration += 1 data += xor(temp, data[-key_size_bytes: 4 - key_size_bytes]) for _ in range(3): temp = data[-4:] data += xor(temp, data[-key_size_bytes: 4 - key_size_bytes]) if key_size_bytes == 32: temp = data[-4:] temp = sub_bytes(temp) data += xor(temp, data[-key_size_bytes: 4 - key_size_bytes]) for _ in range(3 if key_size_bytes == 32 else 2 if key_size_bytes == 24 else 0): temp = data[-4:] data += xor(temp, data[-key_size_bytes: 4 - key_size_bytes]) data = data[:expanded_key_size_bytes] return data def aes_encrypt(data, expanded_key): """ Encrypt one block with aes @param {int[]} data 16-Byte state @param {int[]} expanded_key 176/208/240-Byte expanded key @returns {int[]} 16-Byte cipher """ rounds = len(expanded_key) // BLOCK_SIZE_BYTES - 1 data = xor(data, expanded_key[:BLOCK_SIZE_BYTES]) for i in range(1, rounds + 1): data = sub_bytes(data) data = shift_rows(data) if i != rounds: data = mix_columns(data) data = xor(data, expanded_key[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]) return data def aes_decrypt(data, expanded_key): """ Decrypt one block with aes @param {int[]} data 16-Byte cipher @param {int[]} expanded_key 176/208/240-Byte expanded key @returns {int[]} 16-Byte state """ rounds = len(expanded_key) // BLOCK_SIZE_BYTES - 1 for i in range(rounds, 0, -1): data = xor(data, expanded_key[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]) if i != rounds: data = mix_columns_inv(data) data = shift_rows_inv(data) data = sub_bytes_inv(data) data = xor(data, expanded_key[:BLOCK_SIZE_BYTES]) return data def aes_decrypt_text(data, password, key_size_bytes): """ Decrypt text - The first 8 Bytes of decoded 'data' are the 8 high Bytes of the counter - The cipher key is retrieved by encrypting the first 16 Byte of 'password' with the first 'key_size_bytes' Bytes from 'password' (if necessary filled with 0's) - Mode of operation is 'counter' @param {str} data Base64 encoded string @param {str,unicode} password Password (will be encoded with utf-8) @param {int} key_size_bytes Possible values: 16 for 128-Bit, 24 for 192-Bit or 32 for 256-Bit @returns {str} Decrypted data """ NONCE_LENGTH_BYTES = 8 data = bytes_to_intlist(base64.b64decode(data)) password = bytes_to_intlist(password.encode('utf-8')) key = password[:key_size_bytes] + [0] * (key_size_bytes - len(password)) key = aes_encrypt(key[:BLOCK_SIZE_BYTES], key_expansion(key)) * (key_size_bytes // BLOCK_SIZE_BYTES) nonce = data[:NONCE_LENGTH_BYTES] cipher = data[NONCE_LENGTH_BYTES:] class Counter: __value = nonce + [0] * (BLOCK_SIZE_BYTES - NONCE_LENGTH_BYTES) def next_value(self): temp = self.__value self.__value = inc(self.__value) return temp decrypted_data = aes_ctr_decrypt(cipher, key, Counter()) plaintext = intlist_to_bytes(decrypted_data) return plaintext RCON = (0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36) SBOX = (0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76, 0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0, 0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15, 0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75, 0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84, 0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF, 0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8, 0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2, 0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73, 0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB, 0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79, 0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08, 0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A, 0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E, 0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF, 0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16) SBOX_INV = (0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e, 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92, 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06, 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73, 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b, 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4, 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef, 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d) MIX_COLUMN_MATRIX = ((0x2, 0x3, 0x1, 0x1), (0x1, 0x2, 0x3, 0x1), (0x1, 0x1, 0x2, 0x3), (0x3, 0x1, 0x1, 0x2)) MIX_COLUMN_MATRIX_INV = ((0xE, 0xB, 0xD, 0x9), (0x9, 0xE, 0xB, 0xD), (0xD, 0x9, 0xE, 0xB), (0xB, 0xD, 0x9, 0xE)) RIJNDAEL_EXP_TABLE = (0x01, 0x03, 0x05, 0x0F, 0x11, 0x33, 0x55, 0xFF, 0x1A, 0x2E, 0x72, 0x96, 0xA1, 0xF8, 0x13, 0x35, 0x5F, 0xE1, 0x38, 0x48, 0xD8, 0x73, 0x95, 0xA4, 0xF7, 0x02, 0x06, 0x0A, 0x1E, 0x22, 0x66, 0xAA, 0xE5, 0x34, 0x5C, 0xE4, 0x37, 0x59, 0xEB, 0x26, 0x6A, 0xBE, 0xD9, 0x70, 0x90, 0xAB, 0xE6, 0x31, 0x53, 0xF5, 0x04, 0x0C, 0x14, 0x3C, 0x44, 0xCC, 0x4F, 0xD1, 0x68, 0xB8, 0xD3, 0x6E, 0xB2, 0xCD, 0x4C, 0xD4, 0x67, 0xA9, 0xE0, 0x3B, 0x4D, 0xD7, 0x62, 0xA6, 0xF1, 0x08, 0x18, 0x28, 0x78, 0x88, 0x83, 0x9E, 0xB9, 0xD0, 0x6B, 0xBD, 0xDC, 0x7F, 0x81, 0x98, 0xB3, 0xCE, 0x49, 0xDB, 0x76, 0x9A, 0xB5, 0xC4, 0x57, 0xF9, 0x10, 0x30, 0x50, 0xF0, 0x0B, 0x1D, 0x27, 0x69, 0xBB, 0xD6, 0x61, 0xA3, 0xFE, 0x19, 0x2B, 0x7D, 0x87, 0x92, 0xAD, 0xEC, 0x2F, 0x71, 0x93, 0xAE, 0xE9, 0x20, 0x60, 0xA0, 0xFB, 0x16, 0x3A, 0x4E, 0xD2, 0x6D, 0xB7, 0xC2, 0x5D, 0xE7, 0x32, 0x56, 0xFA, 0x15, 0x3F, 0x41, 0xC3, 0x5E, 0xE2, 0x3D, 0x47, 0xC9, 0x40, 0xC0, 0x5B, 0xED, 0x2C, 0x74, 0x9C, 0xBF, 0xDA, 0x75, 0x9F, 0xBA, 0xD5, 0x64, 0xAC, 0xEF, 0x2A, 0x7E, 0x82, 0x9D, 0xBC, 0xDF, 0x7A, 0x8E, 0x89, 0x80, 0x9B, 0xB6, 0xC1, 0x58, 0xE8, 0x23, 0x65, 0xAF, 0xEA, 0x25, 0x6F, 0xB1, 0xC8, 0x43, 0xC5, 0x54, 0xFC, 0x1F, 0x21, 0x63, 0xA5, 0xF4, 0x07, 0x09, 0x1B, 0x2D, 0x77, 0x99, 0xB0, 0xCB, 0x46, 0xCA, 0x45, 0xCF, 0x4A, 0xDE, 0x79, 0x8B, 0x86, 0x91, 0xA8, 0xE3, 0x3E, 0x42, 0xC6, 0x51, 0xF3, 0x0E, 0x12, 0x36, 0x5A, 0xEE, 0x29, 0x7B, 0x8D, 0x8C, 0x8F, 0x8A, 0x85, 0x94, 0xA7, 0xF2, 0x0D, 0x17, 0x39, 0x4B, 0xDD, 0x7C, 0x84, 0x97, 0xA2, 0xFD, 0x1C, 0x24, 0x6C, 0xB4, 0xC7, 0x52, 0xF6, 0x01) RIJNDAEL_LOG_TABLE = (0x00, 0x00, 0x19, 0x01, 0x32, 0x02, 0x1a, 0xc6, 0x4b, 0xc7, 0x1b, 0x68, 0x33, 0xee, 0xdf, 0x03, 0x64, 0x04, 0xe0, 0x0e, 0x34, 0x8d, 0x81, 0xef, 0x4c, 0x71, 0x08, 0xc8, 0xf8, 0x69, 0x1c, 0xc1, 0x7d, 0xc2, 0x1d, 0xb5, 0xf9, 0xb9, 0x27, 0x6a, 0x4d, 0xe4, 0xa6, 0x72, 0x9a, 0xc9, 0x09, 0x78, 0x65, 0x2f, 0x8a, 0x05, 0x21, 0x0f, 0xe1, 0x24, 0x12, 0xf0, 0x82, 0x45, 0x35, 0x93, 0xda, 0x8e, 0x96, 0x8f, 0xdb, 0xbd, 0x36, 0xd0, 0xce, 0x94, 0x13, 0x5c, 0xd2, 0xf1, 0x40, 0x46, 0x83, 0x38, 0x66, 0xdd, 0xfd, 0x30, 0xbf, 0x06, 0x8b, 0x62, 0xb3, 0x25, 0xe2, 0x98, 0x22, 0x88, 0x91, 0x10, 0x7e, 0x6e, 0x48, 0xc3, 0xa3, 0xb6, 0x1e, 0x42, 0x3a, 0x6b, 0x28, 0x54, 0xfa, 0x85, 0x3d, 0xba, 0x2b, 0x79, 0x0a, 0x15, 0x9b, 0x9f, 0x5e, 0xca, 0x4e, 0xd4, 0xac, 0xe5, 0xf3, 0x73, 0xa7, 0x57, 0xaf, 0x58, 0xa8, 0x50, 0xf4, 0xea, 0xd6, 0x74, 0x4f, 0xae, 0xe9, 0xd5, 0xe7, 0xe6, 0xad, 0xe8, 0x2c, 0xd7, 0x75, 0x7a, 0xeb, 0x16, 0x0b, 0xf5, 0x59, 0xcb, 0x5f, 0xb0, 0x9c, 0xa9, 0x51, 0xa0, 0x7f, 0x0c, 0xf6, 0x6f, 0x17, 0xc4, 0x49, 0xec, 0xd8, 0x43, 0x1f, 0x2d, 0xa4, 0x76, 0x7b, 0xb7, 0xcc, 0xbb, 0x3e, 0x5a, 0xfb, 0x60, 0xb1, 0x86, 0x3b, 0x52, 0xa1, 0x6c, 0xaa, 0x55, 0x29, 0x9d, 0x97, 0xb2, 0x87, 0x90, 0x61, 0xbe, 0xdc, 0xfc, 0xbc, 0x95, 0xcf, 0xcd, 0x37, 0x3f, 0x5b, 0xd1, 0x53, 0x39, 0x84, 0x3c, 0x41, 0xa2, 0x6d, 0x47, 0x14, 0x2a, 0x9e, 0x5d, 0x56, 0xf2, 0xd3, 0xab, 0x44, 0x11, 0x92, 0xd9, 0x23, 0x20, 0x2e, 0x89, 0xb4, 0x7c, 0xb8, 0x26, 0x77, 0x99, 0xe3, 0xa5, 0x67, 0x4a, 0xed, 0xde, 0xc5, 0x31, 0xfe, 0x18, 0x0d, 0x63, 0x8c, 0x80, 0xc0, 0xf7, 0x70, 0x07) def sub_bytes(data): return [SBOX[x] for x in data] def sub_bytes_inv(data): return [SBOX_INV[x] for x in data] def rotate(data): return data[1:] + [data[0]] def key_schedule_core(data, rcon_iteration): data = rotate(data) data = sub_bytes(data) data[0] = data[0] ^ RCON[rcon_iteration] return data def xor(data1, data2): return [x ^ y for x, y in zip(data1, data2)] def rijndael_mul(a, b): if(a == 0 or b == 0): return 0 return RIJNDAEL_EXP_TABLE[(RIJNDAEL_LOG_TABLE[a] + RIJNDAEL_LOG_TABLE[b]) % 0xFF] def mix_column(data, matrix): data_mixed = [] for row in range(4): mixed = 0 for column in range(4): # xor is (+) and (-) mixed ^= rijndael_mul(data[column], matrix[row][column]) data_mixed.append(mixed) return data_mixed def mix_columns(data, matrix=MIX_COLUMN_MATRIX): data_mixed = [] for i in range(4): column = data[i * 4: (i + 1) * 4] data_mixed += mix_column(column, matrix) return data_mixed def mix_columns_inv(data): return mix_columns(data, MIX_COLUMN_MATRIX_INV) def shift_rows(data): data_shifted = [] for column in range(4): for row in range(4): data_shifted.append(data[((column + row) & 0b11) * 4 + row]) return data_shifted def shift_rows_inv(data): data_shifted = [] for column in range(4): for row in range(4): data_shifted.append(data[((column - row) & 0b11) * 4 + row]) return data_shifted def inc(data): data = data[:] # copy for i in range(len(data) - 1, -1, -1): if data[i] == 255: data[i] = 0 else: data[i] = data[i] + 1 break return data
gpl-3.0
merrywhether/autoprotocol-python
autoprotocol/unit.py
1
4980
from __future__ import division, print_function from pint import UnitRegistry from pint.quantity import _Quantity import sys if sys.version_info[0] >= 3: string_type = str else: string_type = basestring ''' :copyright: 2016 by The Autoprotocol Development Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details ''' # Preload UnitRegistry (Use default Pints definition file as a base) _UnitRegistry = UnitRegistry() '''Map string representation of Pint units over to Autoprotocol format''' # Map Temperature Unit names _UnitRegistry._units["degC"]._name = "celsius" _UnitRegistry._units["celsius"]._name = "celsius" _UnitRegistry._units["degF"]._name = "fahrenheit" _UnitRegistry._units["fahrenheit"]._name = "fahrenheit" _UnitRegistry._units["degR"]._name = "rankine" _UnitRegistry._units["rankine"]._name = "rankine" # Map Speed Unit names _UnitRegistry._units["revolutions_per_minute"]._name = "rpm" '''Add support for Molarity Unit''' _UnitRegistry.define('molar = mole/liter = M') class UnitError(Exception): """ Exceptions from creating new Unit instances with bad inputs. """ message_text = "Unit error for %s" def __init__(self, value): super(UnitError, self).__init__(self.message_text % value) self.value = value class UnitStringError(UnitError): message_text = "Invalid format for %s: when building a Unit from a " "string argument, string must be in \'1:meter\' format." class UnitValueError(UnitError): message_text = "Invalid value for %s: when building a Unit from a " "value argument, value must be numeric." class Unit(_Quantity): """ A representation of a measure of physical quantities such as length, mass, time and volume. Uses Pint's Quantity as a base class for implementing units and inherits functionalities such as conversions and proper unit arithmetic. Note that the magnitude is stored as a double-precision float, so there are inherent issues when dealing with extremely large/small numbers as well as numerical rounding for non-base 2 numbers. Example ------- .. code-block:: python vol_1 = Unit(10, 'microliter') vol_2 = Unit(10, 'liter') print(vol_1 + vol_2) time_1 = Unit(1, 'second') speed_1 = vol_1/time_1 print (speed_1) print (speed_1.to('liter/hour')) Returns ------- .. code-block:: json 10000010.0:microliter 10.0:microliter / second 0.036:liter / hour """ def __new__(cls, value, units=None): cls._REGISTRY = _UnitRegistry cls.force_ndarray = False # Automatically return Unit if Unit is provided if isinstance(value, Unit): return value # Automatically parse String if no units provided if not units and isinstance(value, string_type): try: value, units = value.split(":") except ValueError: raise UnitStringError(value) try: return super(Unit, cls).__new__(cls, float(value), units) except ValueError: raise UnitValueError(value) def __init__(self, value, units=None): super(Unit, self).__init__() self.unit = self.units.__str__() @staticmethod def fromstring(s): """ Convert a string representation of a unit into a Unit object. Example ------- .. code-block:: python Unit.fromstring("10:microliter") becomes .. code-block:: python Unit(10, "microliter") Parameters ---------- s : str String in the format of "value:unit" """ if isinstance(s, Unit): return s else: return Unit(s) def __str__(self): return ":".join([str(self._magnitude), "^".join(self.unit.split("**"))]).replace(" ", "") def __repr__(self): return "Unit({0}, '{1}')".format(self._magnitude, self._units) def _mul_div(self, other, magnitude_op, units_op=None): ''' Extends Pint's base _Quantity multiplication/division implementation by checking for dimensionality ''' if isinstance(other, Unit): if self.dimensionality == other.dimensionality: other = other.to(self.unit) return super(Unit, self)._mul_div(other, magnitude_op, units_op) def _imul_div(self, other, magnitude_op, units_op=None): ''' Extends Pint's base _Quantity multiplication/division implementation by checking for dimensionality ''' if isinstance(other, Unit): if self.dimensionality == other.dimensionality: other = other.to(self.unit) return super(Unit, self)._imul_div(other, magnitude_op, units_op)
bsd-3-clause
ErikSeguinte/Personal-Code-library
Symbol_table/frequencyCounter.py
1
1209
import __future__ import sys import string def main(ST): if len(sys.argv) > 1: input_file = sys.argv[1] else: input_file = "tale.txt" st = ST() with open(input_file,'r') as i_file: for line in i_file: if line == '\n': continue line = line.split(" ") for word in line: word = "".join([ch for ch in word if ch not in set(string.punctuation)]) # print(st.get(word)) if len(word) < 6: continue if not st.contains(word): st.put(word, 1) else: value = st.get(word) value +=1 st.put(word, value) max_word = "-1" st.put(max_word, 0) for word in st.keys(): if st.get(word) > st.get(max_word): max_word = word print(max_word + ": "+ str(st.get(max_word))) st.delete("little") print(max_word + ": "+ str(st.get(max_word))) print(st.min_root().key, st.min_root().value) st.del_min() print(st.min_root().key, st.min_root().value) print(list(st.range_root("livelier", "lively")))
mit
1119553797/sprd-kernel-common
tools/perf/scripts/python/netdev-times.py
11271
15048
# Display a process of packets and processed time. # It helps us to investigate networking or network device. # # options # tx: show only tx chart # rx: show only rx chart # dev=: show only thing related to specified device # debug: work with debug mode. It shows buffer status. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * all_event_list = []; # insert all tracepoint event related with this script irq_dic = {}; # key is cpu and value is a list which stacks irqs # which raise NET_RX softirq net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry # and a list which stacks receive receive_hunk_list = []; # a list which include a sequence of receive events rx_skb_list = []; # received packet list for matching # skb_copy_datagram_iovec buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and # tx_xmit_list of_count_rx_skb_list = 0; # overflow count tx_queue_list = []; # list of packets which pass through dev_queue_xmit of_count_tx_queue_list = 0; # overflow count tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit of_count_tx_xmit_list = 0; # overflow count tx_free_list = []; # list of packets which is freed # options show_tx = 0; show_rx = 0; dev = 0; # store a name of device specified by option "dev=" debug = 0; # indices of event_info tuple EINFO_IDX_NAME= 0 EINFO_IDX_CONTEXT=1 EINFO_IDX_CPU= 2 EINFO_IDX_TIME= 3 EINFO_IDX_PID= 4 EINFO_IDX_COMM= 5 # Calculate a time interval(msec) from src(nsec) to dst(nsec) def diff_msec(src, dst): return (dst - src) / 1000000.0 # Display a process of transmitting a packet def print_transmit(hunk): if dev != 0 and hunk['dev'].find(dev) < 0: return print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \ (hunk['dev'], hunk['len'], nsecs_secs(hunk['queue_t']), nsecs_nsecs(hunk['queue_t'])/1000, diff_msec(hunk['queue_t'], hunk['xmit_t']), diff_msec(hunk['xmit_t'], hunk['free_t'])) # Format for displaying rx packet processing PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)" PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)" PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)" PF_JOINT= " |" PF_WJOINT= " | |" PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)" PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)" PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)" PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)" PF_CONS_SKB= " | consume_skb(+%.3fmsec)" # Display a process of received packets and interrputs associated with # a NET_RX softirq def print_receive(hunk): show_hunk = 0 irq_list = hunk['irq_list'] cpu = irq_list[0]['cpu'] base_t = irq_list[0]['irq_ent_t'] # check if this hunk should be showed if dev != 0: for i in range(len(irq_list)): if irq_list[i]['name'].find(dev) >= 0: show_hunk = 1 break else: show_hunk = 1 if show_hunk == 0: return print "%d.%06dsec cpu=%d" % \ (nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu) for i in range(len(irq_list)): print PF_IRQ_ENTRY % \ (diff_msec(base_t, irq_list[i]['irq_ent_t']), irq_list[i]['irq'], irq_list[i]['name']) print PF_JOINT irq_event_list = irq_list[i]['event_list'] for j in range(len(irq_event_list)): irq_event = irq_event_list[j] if irq_event['event'] == 'netif_rx': print PF_NET_RX % \ (diff_msec(base_t, irq_event['time']), irq_event['skbaddr']) print PF_JOINT print PF_SOFT_ENTRY % \ diff_msec(base_t, hunk['sirq_ent_t']) print PF_JOINT event_list = hunk['event_list'] for i in range(len(event_list)): event = event_list[i] if event['event_name'] == 'napi_poll': print PF_NAPI_POLL % \ (diff_msec(base_t, event['event_t']), event['dev']) if i == len(event_list) - 1: print "" else: print PF_JOINT else: print PF_NET_RECV % \ (diff_msec(base_t, event['event_t']), event['skbaddr'], event['len']) if 'comm' in event.keys(): print PF_WJOINT print PF_CPY_DGRAM % \ (diff_msec(base_t, event['comm_t']), event['pid'], event['comm']) elif 'handle' in event.keys(): print PF_WJOINT if event['handle'] == "kfree_skb": print PF_KFREE_SKB % \ (diff_msec(base_t, event['comm_t']), event['location']) elif event['handle'] == "consume_skb": print PF_CONS_SKB % \ diff_msec(base_t, event['comm_t']) print PF_JOINT def trace_begin(): global show_tx global show_rx global dev global debug for i in range(len(sys.argv)): if i == 0: continue arg = sys.argv[i] if arg == 'tx': show_tx = 1 elif arg =='rx': show_rx = 1 elif arg.find('dev=',0, 4) >= 0: dev = arg[4:] elif arg == 'debug': debug = 1 if show_tx == 0 and show_rx == 0: show_tx = 1 show_rx = 1 def trace_end(): # order all events in time all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME], b[EINFO_IDX_TIME])) # process all events for i in range(len(all_event_list)): event_info = all_event_list[i] name = event_info[EINFO_IDX_NAME] if name == 'irq__softirq_exit': handle_irq_softirq_exit(event_info) elif name == 'irq__softirq_entry': handle_irq_softirq_entry(event_info) elif name == 'irq__softirq_raise': handle_irq_softirq_raise(event_info) elif name == 'irq__irq_handler_entry': handle_irq_handler_entry(event_info) elif name == 'irq__irq_handler_exit': handle_irq_handler_exit(event_info) elif name == 'napi__napi_poll': handle_napi_poll(event_info) elif name == 'net__netif_receive_skb': handle_netif_receive_skb(event_info) elif name == 'net__netif_rx': handle_netif_rx(event_info) elif name == 'skb__skb_copy_datagram_iovec': handle_skb_copy_datagram_iovec(event_info) elif name == 'net__net_dev_queue': handle_net_dev_queue(event_info) elif name == 'net__net_dev_xmit': handle_net_dev_xmit(event_info) elif name == 'skb__kfree_skb': handle_kfree_skb(event_info) elif name == 'skb__consume_skb': handle_consume_skb(event_info) # display receive hunks if show_rx: for i in range(len(receive_hunk_list)): print_receive(receive_hunk_list[i]) # display transmit hunks if show_tx: print " dev len Qdisc " \ " netdevice free" for i in range(len(tx_free_list)): print_transmit(tx_free_list[i]) if debug: print "debug buffer status" print "----------------------------" print "xmit Qdisc:remain:%d overflow:%d" % \ (len(tx_queue_list), of_count_tx_queue_list) print "xmit netdevice:remain:%d overflow:%d" % \ (len(tx_xmit_list), of_count_tx_xmit_list) print "receive:remain:%d overflow:%d" % \ (len(rx_skb_list), of_count_rx_skb_list) # called from perf, when it finds a correspoinding event def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm, irq, irq_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, irq_name) all_event_list.append(event_info) def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret) all_event_list.append(event_info) def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, napi, dev_name) all_event_list.append(event_info) def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, rc, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, rc ,dev_name) all_event_list.append(event_info) def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, protocol, location): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, protocol, location) all_event_list.append(event_info) def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr) all_event_list.append(event_info) def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen) all_event_list.append(event_info) def handle_irq_handler_entry(event_info): (name, context, cpu, time, pid, comm, irq, irq_name) = event_info if cpu not in irq_dic.keys(): irq_dic[cpu] = [] irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time} irq_dic[cpu].append(irq_record) def handle_irq_handler_exit(event_info): (name, context, cpu, time, pid, comm, irq, ret) = event_info if cpu not in irq_dic.keys(): return irq_record = irq_dic[cpu].pop() if irq != irq_record['irq']: return irq_record.update({'irq_ext_t':time}) # if an irq doesn't include NET_RX softirq, drop. if 'event_list' in irq_record.keys(): irq_dic[cpu].append(irq_record) def handle_irq_softirq_raise(event_info): (name, context, cpu, time, pid, comm, vec) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'sirq_raise'}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_irq_softirq_entry(event_info): (name, context, cpu, time, pid, comm, vec) = event_info net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]} def handle_irq_softirq_exit(event_info): (name, context, cpu, time, pid, comm, vec) = event_info irq_list = [] event_list = 0 if cpu in irq_dic.keys(): irq_list = irq_dic[cpu] del irq_dic[cpu] if cpu in net_rx_dic.keys(): sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t'] event_list = net_rx_dic[cpu]['event_list'] del net_rx_dic[cpu] if irq_list == [] or event_list == 0: return rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time, 'irq_list':irq_list, 'event_list':event_list} # merge information realted to a NET_RX softirq receive_hunk_list.append(rec_data) def handle_napi_poll(event_info): (name, context, cpu, time, pid, comm, napi, dev_name) = event_info if cpu in net_rx_dic.keys(): event_list = net_rx_dic[cpu]['event_list'] rec_data = {'event_name':'napi_poll', 'dev':dev_name, 'event_t':time} event_list.append(rec_data) def handle_netif_rx(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'netif_rx', 'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_netif_receive_skb(event_info): global of_count_rx_skb_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu in net_rx_dic.keys(): rec_data = {'event_name':'netif_receive_skb', 'event_t':time, 'skbaddr':skbaddr, 'len':skblen} event_list = net_rx_dic[cpu]['event_list'] event_list.append(rec_data) rx_skb_list.insert(0, rec_data) if len(rx_skb_list) > buffer_budget: rx_skb_list.pop() of_count_rx_skb_list += 1 def handle_net_dev_queue(event_info): global of_count_tx_queue_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time} tx_queue_list.insert(0, skb) if len(tx_queue_list) > buffer_budget: tx_queue_list.pop() of_count_tx_queue_list += 1 def handle_net_dev_xmit(event_info): global of_count_tx_xmit_list (name, context, cpu, time, pid, comm, skbaddr, skblen, rc, dev_name) = event_info if rc == 0: # NETDEV_TX_OK for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: skb['xmit_t'] = time tx_xmit_list.insert(0, skb) del tx_queue_list[i] if len(tx_xmit_list) > buffer_budget: tx_xmit_list.pop() of_count_tx_xmit_list += 1 return def handle_kfree_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr, protocol, location) = event_info for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: del tx_queue_list[i] return for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if rec_data['skbaddr'] == skbaddr: rec_data.update({'handle':"kfree_skb", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return def handle_consume_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr) = event_info for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return def handle_skb_copy_datagram_iovec(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if skbaddr == rec_data['skbaddr']: rec_data.update({'handle':"skb_copy_datagram_iovec", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return
gpl-2.0
Subito/ansible-modules-extras
cloud/amazon/route53_zone.py
37
5487
#!/usr/bin/python # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' module: route53_zone short_description: add or delete Route53 zones description: - Creates and deletes Route53 private and public zones version_added: "2.0" options: zone: description: - "The DNS zone record (eg: foo.com.)" required: true state: description: - whether or not the zone should exist or not required: false default: true choices: [ "present", "absent" ] vpc_id: description: - The VPC ID the zone should be a part of (if this is going to be a private zone) required: false default: null vpc_region: description: - The VPC Region the zone should be a part of (if this is going to be a private zone) required: false default: null comment: description: - Comment associated with the zone required: false default: '' extends_documentation_fragment: aws author: "Christopher Troup (@minichate)" ''' import time try: import boto import boto.ec2 from boto import route53 from boto.route53 import Route53Connection from boto.route53.zone import Zone HAS_BOTO = True except ImportError: HAS_BOTO = False def main(): module = AnsibleModule( argument_spec=dict( zone=dict(required=True), state=dict(default='present', choices=['present', 'absent']), vpc_id=dict(default=None), vpc_region=dict(default=None), comment=dict(default=''), ) ) if not HAS_BOTO: module.fail_json(msg='boto required for this module') zone_in = module.params.get('zone').lower() state = module.params.get('state').lower() vpc_id = module.params.get('vpc_id') vpc_region = module.params.get('vpc_region') comment = module.params.get('comment') private_zone = vpc_id is not None and vpc_region is not None _, _, aws_connect_kwargs = get_aws_connection_info(module) # connect to the route53 endpoint try: conn = Route53Connection(**aws_connect_kwargs) except boto.exception.BotoServerError, e: module.fail_json(msg=e.error_message) results = conn.get_all_hosted_zones() zones = {} for r53zone in results['ListHostedZonesResponse']['HostedZones']: zone_id = r53zone['Id'].replace('/hostedzone/', '') zone_details = conn.get_hosted_zone(zone_id)['GetHostedZoneResponse'] if vpc_id and 'VPCs' in zone_details: # this is to deal with this boto bug: https://github.com/boto/boto/pull/2882 if isinstance(zone_details['VPCs'], dict): if zone_details['VPCs']['VPC']['VPCId'] == vpc_id: zones[r53zone['Name']] = zone_id else: # Forward compatibility for when boto fixes that bug if vpc_id in [v['VPCId'] for v in zone_details['VPCs']]: zones[r53zone['Name']] = zone_id else: zones[r53zone['Name']] = zone_id record = { 'private_zone': private_zone, 'vpc_id': vpc_id, 'vpc_region': vpc_region, 'comment': comment, } if state == 'present' and zone_in in zones: if private_zone: details = conn.get_hosted_zone(zones[zone_in]) if 'VPCs' not in details['GetHostedZoneResponse']: module.fail_json( msg="Can't change VPC from public to private" ) vpc_details = details['GetHostedZoneResponse']['VPCs']['VPC'] current_vpc_id = vpc_details['VPCId'] current_vpc_region = vpc_details['VPCRegion'] if current_vpc_id != vpc_id: module.fail_json( msg="Can't change VPC ID once a zone has been created" ) if current_vpc_region != vpc_region: module.fail_json( msg="Can't change VPC Region once a zone has been created" ) record['zone_id'] = zones[zone_in] record['name'] = zone_in module.exit_json(changed=False, set=record) elif state == 'present': result = conn.create_hosted_zone(zone_in, **record) hosted_zone = result['CreateHostedZoneResponse']['HostedZone'] zone_id = hosted_zone['Id'].replace('/hostedzone/', '') record['zone_id'] = zone_id record['name'] = zone_in module.exit_json(changed=True, set=record) elif state == 'absent' and zone_in in zones: conn.delete_hosted_zone(zones[zone_in]) module.exit_json(changed=True) elif state == 'absent': module.exit_json(changed=False) from ansible.module_utils.basic import * from ansible.module_utils.ec2 import * main()
gpl-3.0
Nosferatul/coala
coalib/core/Core.py
1
17806
import asyncio import concurrent.futures import functools import logging from coalib.core.DependencyTracker import DependencyTracker from coalib.core.Graphs import traverse_graph from coalib.core.PersistentHash import persistent_hash def group(iterable, key=lambda x: x): """ Groups elements (out-of-order) together in the given iterable. Supports non-hashable keys by comparing keys with ``==``. Accessing the groups is supported using the iterator as follows: >>> for key, elements in group([1, 3, 7, 1, 2, 1, 2]): ... print(key, list(elements)) 1 [1, 1, 1] 3 [3] 7 [7] 2 [2, 2] You can control how elements are grouped by using the ``key`` parameter. It takes a function with a single parameter and maps to the group. >>> data = [(1, 2), (3, 4), (1, 9), (2, 10), (1, 11), (7, 2), (10, 2), ... (2, 1), (3, 7), (4, 5)] >>> for key, elements in group(data, key=sum): ... print(key, list(elements)) 3 [(1, 2), (2, 1)] 7 [(3, 4)] 10 [(1, 9), (3, 7)] 12 [(2, 10), (1, 11), (10, 2)] 9 [(7, 2), (4, 5)] :param iterable: The iterable to group elements in. :param key: The key-function mapping an element to its group. :return: An iterable yielding tuples with ``key, elements``, where ``elements`` is also an iterable yielding the elements grouped under ``key``. """ keys = [] elements = [] for element in iterable: k = key(element) try: position = keys.index(k) element_list = elements[position] except ValueError: keys.append(k) element_list = [] elements.append(element_list) element_list.append(element) return zip(keys, elements) def initialize_dependencies(bears): """ Initializes and returns a ``DependencyTracker`` instance together with a set of bears ready for scheduling. This function acquires, processes and registers bear dependencies accordingly using a consumer-based system, where each dependency bear has only a single instance per section and file-dictionary. The bears set returned accounts for bears that have dependencies and excludes them accordingly. Dependency bears that have themselves no further dependencies are included so the dependency chain can be processed correctly. :param bears: The set of instantiated bears to run that serve as an entry-point. :return: A tuple with ``(dependency_tracker, bears_to_schedule)``. """ # Pre-collect bears in a set as we use them more than once. Especially # remove duplicate instances. bears = set(bears) dependency_tracker = DependencyTracker() # For a consumer-based system, we have a situation which can be visualized # with a graph. Each dependency relation from one bear-type to another # bear-type is represented with an arrow, starting from the dependent # bear-type and ending at the dependency: # # (section1, file_dict1) (section1, file_dict2) (section2, file_dict2) # | | | | # V V V V # bear1 bear2 bear3 bear4 # | | | | # V V | | # BearType1 BearType2 -----------------------| # | | | # | | V # ---------------------------------------------> BearType3 # # We need to traverse this graph and instantiate dependency bears # accordingly, one per section. # Group bears by sections and file-dictionaries. These will serve as # entry-points for the dependency-instantiation-graph. grouping = group(bears, key=lambda bear: (bear.section, bear.file_dict)) for (section, file_dict), bears_per_section in grouping: # Pre-collect bears as the iterator only works once. bears_per_section = list(bears_per_section) # Now traverse each edge of the graph, and instantiate a new dependency # bear if not already instantiated. For the entry point bears, we hack # in identity-mappings because those are already instances. Also map # the types of the instantiated bears to those instances, as if the # user already supplied an instance of a dependency, we reuse it # accordingly. type_to_instance_map = {} for bear in bears_per_section: type_to_instance_map[bear] = bear type_to_instance_map[type(bear)] = bear def get_successive_nodes_and_track(bear): for dependency_bear_type in bear.BEAR_DEPS: if dependency_bear_type not in type_to_instance_map: dependency_bear = dependency_bear_type(section, file_dict) type_to_instance_map[dependency_bear_type] = dependency_bear dependency_tracker.add( type_to_instance_map[dependency_bear_type], bear) # Return the dependencies of the instances instead of the types, so # bears are capable to specify dependencies at runtime. return (type_to_instance_map[dependency_bear_type] for dependency_bear_type in bear.BEAR_DEPS) traverse_graph(bears_per_section, get_successive_nodes_and_track) # Get all bears that aren't resolved and exclude those from scheduler set. bears -= {bear for bear in bears if dependency_tracker.get_dependencies(bear)} # Get all bears that have no further dependencies and shall be # scheduled additionally. for dependency in dependency_tracker.dependencies: if not dependency_tracker.get_dependencies(dependency): bears.add(dependency) return dependency_tracker, bears class Session: """ Maintains a session for a coala execution. For each session, there are set of bears to run along with a callback function, which is called when results are available. Dependencies of bears (provided via ``bear.BEAR_DEPS``) are automatically handled. If BearA requires BearB as dependency, then on running BearA, first BearB will be executed, followed by BearA. """ def __init__(self, bears, result_callback, cache=None, executor=None): """ :param bears: The bear instances to run. :param result_callback: A callback function which is called when results are available. Must have following signature:: def result_callback(result): pass :param cache: A cache bears can use to speed up runs. If ``None``, no cache will be used. The cache stores the results that were returned last time from the parameters passed to ``execute_task`` in bears. If the section and parameters to ``execute_task`` are the same from a previous run, the cache will be queried instead of executing ``execute_task``. The cache has to be a dictionary-like object, that maps bear types to respective cache-tables. The cache-tables itself are dictionary-like objects that map hash-values (generated by ``PersistentHash.persistent_hash`` from the task objects) to actual bear results. When bears are about to be scheduled, the core performs a cache-lookup. If there's a hit, the results stored in the cache are returned and the task won't be scheduled. In case of a miss, ``execute_task`` is called normally in the executor. :param executor: Custom executor used to run the bears. If ``None``, a ``ProcessPoolExecutor`` is used using as many processes as cores available on the system. Note that a passed custom executor is closed after the core has finished. """ self.bears = bears self.result_callback = result_callback self.cache = cache # Set up event loop and executor. self.event_loop = asyncio.SelectorEventLoop() self.executor = (concurrent.futures.ProcessPoolExecutor() if executor is None else executor) self.running_futures = {} # Initialize dependency tracking. self.dependency_tracker, self.bears_to_schedule = ( initialize_dependencies(self.bears)) def run(self): """ Runs the coala session. """ try: if self.bears: self._schedule_bears(self.bears_to_schedule) try: self.event_loop.run_forever() finally: self.event_loop.close() finally: self.executor.shutdown() def _schedule_bears(self, bears): """ Schedules the tasks of bears. :param bears: A list of bear instances to be scheduled onto the process pool. """ bears_without_tasks = [] for bear in bears: if self.dependency_tracker.get_dependencies( bear): # pragma: no cover logging.warning( 'Dependencies for {!r} not yet resolved, holding back. ' 'This should not happen, the dependency tracking system ' 'should be smarter. Please report this to the developers.' .format(bear)) else: futures = set() for task in bear.generate_tasks(): bear_args, bear_kwargs = task if self.cache is None: future = self.event_loop.run_in_executor( self.executor, bear.execute_task, bear_args, bear_kwargs) else: # Execute the cache lookup in the default # ThreadPoolExecutor, so cache updates reflect properly # in the main process. future = self.event_loop.run_in_executor( None, self._execute_task_with_cache, bear, task) futures.add(future) self.running_futures[bear] = futures # Cleanup bears without tasks after all bears had the chance to # schedule their tasks. Not doing so might stop the run too # early, as the cleanup is also responsible for stopping the # event-loop when no more tasks do exist. if not futures: logging.debug('{!r} scheduled no tasks.'.format(bear)) bears_without_tasks.append(bear) continue for future in futures: future.add_done_callback(functools.partial( self._finish_task, bear)) logging.debug('Scheduled {!r} (tasks: {})'.format( bear, len(futures))) for bear in bears_without_tasks: self._cleanup_bear(bear) def _cleanup_bear(self, bear): """ Cleans up state of an ongoing run for a bear. - If the given bear has no running tasks left: - Resolves its dependencies. - Schedules dependant bears. - Removes the bear from the ``running_tasks`` dict. - Checks whether there are any remaining tasks, and quits the event loop accordingly if none are left. :param bear: The bear to clean up state for. """ if not self.running_futures[bear]: resolved_bears = self.dependency_tracker.resolve(bear) if resolved_bears: self._schedule_bears(resolved_bears) del self.running_futures[bear] if not self.running_futures: # Check the DependencyTracker additionally for remaining # dependencies. resolved = self.dependency_tracker.are_dependencies_resolved if not resolved: # pragma: no cover logging.warning( 'Core finished with run, but it seems some dependencies ' 'were unresolved: {}. Ignoring them, but this is a bug, ' 'please report it to the developers.'.format(', '.join( repr(dependant) + ' depends on ' + repr(dependency) for dependency, dependant in self.dependency_tracker))) self.event_loop.stop() def _execute_task_with_cache(self, bear, task): if type(bear) not in self.cache: bear_cache = {} self.cache[type(bear)] = bear_cache else: bear_cache = self.cache[type(bear)] fingerprint = persistent_hash(task) if fingerprint in bear_cache: results = bear_cache[fingerprint] else: bear_args, bear_kwargs = task future = asyncio.run_coroutine_threadsafe( asyncio.wait_for( self.event_loop.run_in_executor( self.executor, bear.execute_task, bear_args, bear_kwargs), None, loop=self.event_loop), loop=self.event_loop) results = future.result() bear_cache[fingerprint] = results return results def _finish_task(self, bear, future): """ The callback for when a task of a bear completes. It is responsible for checking if the bear completed its execution and the handling of the result generated by the task. It also schedules new tasks if dependencies get resolved. :param bear: The bear that the task belongs to. :param future: The future that completed. """ try: results = future.result() for dependant in self.dependency_tracker.get_dependants(bear): dependant.dependency_results[type(bear)] += results except Exception as ex: # FIXME Try to display only the relevant traceback of the bear if # FIXME error occurred there, not the complete event-loop # FIXME traceback. logging.error('An exception was thrown during bear execution.', exc_info=ex) results = None # Unschedule/resolve dependent bears, as these can't run any more. dependants = self.dependency_tracker.get_all_dependants(bear) for dependant in dependants: self.dependency_tracker.resolve(dependant) logging.debug('Following dependent bears were unscheduled: ' + ', '.join(repr(dependant) for dependant in dependants)) finally: self.running_futures[bear].remove(future) self._cleanup_bear(bear) if results is not None: for result in results: try: # FIXME Long operations on the result-callback could block # FIXME the scheduler significantly. It should be # FIXME possible to schedule new Python Threads on the # FIXME given event_loop and process the callback there. self.result_callback(result) except Exception as ex: # FIXME Try to display only the relevant traceback of the # FIXME result handler if error occurred there, not the # FIXME complete event-loop traceback. logging.error( 'An exception was thrown during result-handling.', exc_info=ex) def run(bears, result_callback, cache=None, executor=None): """ Initiates a session with the given parameters and runs it. :param bears: The bear instances to run. :param result_callback: A callback function which is called when results are available. Must have following signature:: def result_callback(result): pass :param cache: A cache bears can use to speed up runs. If ``None``, no cache will be used. The cache stores the results that were returned last time from the parameters passed to ``execute_task`` in bears. If the section and parameters to ``execute_task`` are the same from a previous run, the cache will be queried instead of executing ``execute_task``. The cache has to be a dictionary-like object, that maps bear types to respective cache-tables. The cache-tables itself are dictionary-like objects that map hash-values (generated by ``PersistentHash.persistent_hash`` from the task objects) to actual bear results. When bears are about to be scheduled, the core performs a cache-lookup. If there's a hit, the results stored in the cache are returned and the task won't be scheduled. In case of a miss, ``execute_task`` is called normally in the executor. :param executor: Custom executor used to run the bears. If ``None``, a ``ProcessPoolExecutor`` is used using as many processes as cores available on the system. """ Session(bears, result_callback, cache, executor).run()
agpl-3.0
lebauce/artub
bike/transformer/WordRewriter.py
2
1818
from bike.parsing.load import getSourceNode from bike.transformer.undo import getUndoStack from bike.transformer.save import queueFileToSave import re # This class maintains a set of changed lines to the original source # nodes. This is important because the act of changing a line messes # up the coordinates on which renames are done. # Commit writes the changes back to the source nodes class WordRewriter: def __init__(self): self.modifiedsrc = {} def rewriteString(self, srcnode, lineno, colno, newname): filename = srcnode.filename if not self.modifiedsrc.has_key(filename): getUndoStack().addSource(filename,srcnode.getSource()) self.modifiedsrc[filename] = {} if not self.modifiedsrc[filename].has_key(lineno): line = srcnode.getLines()[lineno-1] self.modifiedsrc[filename][lineno] = self._lineToDict(line) self.modifiedsrc[filename][lineno][colno] = newname # writes all the changes back to the src nodes def commit(self): for filename in self.modifiedsrc.keys(): srcnode = getSourceNode(filename) for lineno in self.modifiedsrc[filename]: lines = srcnode.getLines() lines[lineno-1] = self._dictToLine(self.modifiedsrc[filename][lineno]) queueFileToSave(filename,"".join(srcnode.getLines())) # this function creates a dictionary with each word referenced by # its column position in the original line def _lineToDict(self, line): words = re.split("(\w+)", line) h = {};i = 0 for word in words: h[i] = word i+=len(word) return h def _dictToLine(self, d): cols = d.keys() cols.sort() return "".join([d[colno]for colno in cols])
gpl-2.0
xzYue/odoo
addons/base_report_designer/__openerp__.py
260
1704
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'OpenOffice Report Designer', 'version': '0.1', 'category': 'Reporting', 'description': """ This module is used along with OpenERP OpenOffice Plugin. ========================================================= This module adds wizards to Import/Export .sxw report that you can modify in OpenOffice. Once you have modified it you can upload the report using the same wizard. """, 'author': 'OpenERP SA', 'website': 'https://www.odoo.com', 'depends': ['base'], 'data': ['wizard/base_report_design_view.xml' , 'base_report_designer_installer.xml'], 'demo': [], 'installable': True, 'auto_install': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
hydrospanner/DForurm
DForurm/env/Lib/site-packages/django/contrib/gis/serializers/geojson.py
36
2813
from __future__ import unicode_literals from django.contrib.gis.gdal import CoordTransform, SpatialReference from django.core.serializers.base import SerializerDoesNotExist from django.core.serializers.json import Serializer as JSONSerializer class Serializer(JSONSerializer): """ Convert a queryset to GeoJSON, http://geojson.org/ """ def _init_options(self): super(Serializer, self)._init_options() self.geometry_field = self.json_kwargs.pop('geometry_field', None) self.srid = self.json_kwargs.pop('srid', 4326) if (self.selected_fields is not None and self.geometry_field is not None and self.geometry_field not in self.selected_fields): self.selected_fields = list(self.selected_fields) + [self.geometry_field] def start_serialization(self): self._init_options() self._cts = {} # cache of CoordTransform's self.stream.write( '{"type": "FeatureCollection", "crs": {"type": "name", "properties": {"name": "EPSG:%d"}},' ' "features": [' % self.srid) def end_serialization(self): self.stream.write(']}') def start_object(self, obj): super(Serializer, self).start_object(obj) self._geometry = None if self.geometry_field is None: # Find the first declared geometry field for field in obj._meta.fields: if hasattr(field, 'geom_type'): self.geometry_field = field.name break def get_dump_object(self, obj): data = { "type": "Feature", "properties": self._current, } if ((self.selected_fields is None or 'pk' in self.selected_fields) and 'pk' not in data["properties"]): data["properties"]["pk"] = obj._meta.pk.value_to_string(obj) if self._geometry: if self._geometry.srid != self.srid: # If needed, transform the geometry in the srid of the global geojson srid if self._geometry.srid not in self._cts: srs = SpatialReference(self.srid) self._cts[self._geometry.srid] = CoordTransform(self._geometry.srs, srs) self._geometry.transform(self._cts[self._geometry.srid]) data["geometry"] = eval(self._geometry.geojson) else: data["geometry"] = None return data def handle_field(self, obj, field): if field.name == self.geometry_field: self._geometry = field.value_from_object(obj) else: super(Serializer, self).handle_field(obj, field) class Deserializer(object): def __init__(self, *args, **kwargs): raise SerializerDoesNotExist("geojson is a serialization-only serializer")
mit
Belgabor/django
django/contrib/localflavor/kw/forms.py
310
1988
""" Kuwait-specific Form helpers """ import re from datetime import date from django.core.validators import EMPTY_VALUES from django.forms import ValidationError from django.forms.fields import Field, RegexField from django.utils.translation import gettext as _ id_re = re.compile(r'^(?P<initial>\d{1})(?P<yy>\d\d)(?P<mm>\d\d)(?P<dd>\d\d)(?P<mid>\d{4})(?P<checksum>\d{1})') class KWCivilIDNumberField(Field): """ Kuwaiti Civil ID numbers are 12 digits, second to seventh digits represents the person's birthdate. Checks the following rules to determine the validty of the number: * The number consist of 12 digits. * The birthdate of the person is a valid date. * The calculated checksum equals to the last digit of the Civil ID. """ default_error_messages = { 'invalid': _('Enter a valid Kuwaiti Civil ID number'), } def has_valid_checksum(self, value): weight = (2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2) calculated_checksum = 0 for i in range(11): calculated_checksum += int(value[i]) * weight[i] remainder = calculated_checksum % 11 checkdigit = 11 - remainder if checkdigit != int(value[11]): return False return True def clean(self, value): super(KWCivilIDNumberField, self).clean(value) if value in EMPTY_VALUES: return u'' if not re.match(r'^\d{12}$', value): raise ValidationError(self.error_messages['invalid']) match = re.match(id_re, value) if not match: raise ValidationError(self.error_messages['invalid']) gd = match.groupdict() try: d = date(int(gd['yy']), int(gd['mm']), int(gd['dd'])) except ValueError: raise ValidationError(self.error_messages['invalid']) if not self.has_valid_checksum(value): raise ValidationError(self.error_messages['invalid']) return value
bsd-3-clause
tst-ppenev/earthenterprise
earth_enterprise/src/server/wsgi/serve/snippets_db_manager.py
1
4640
#!/usr/bin/python # # Copyright 2017 Google Inc. # Copyright 2018 Open GEE Contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """The snippets_db_manager module. Classes for working with gesnippets database. """ import logging from common import postgres_manager from common import postgres_properties from serve import constants from serve import http_io logger = logging.getLogger("ge_snippets") class SnippetsDbManager(object): """Class for managing gesnippets database. Snippet set table name: "snippet_set_table", fields: "name", "content". """ def __init__(self): """Inits snippets DB manager.""" # Note: added as a good practice since we use SnippetsDbManager as a base # class. super(SnippetsDbManager, self).__init__() # Init database connection. self._host = '/tmp' self._snippets_db_name = "geendsnippet" self._db_user = "geuser" postgres_prop = postgres_properties.PostgresProperties() self._port = postgres_prop.GetPortNumber() # Create DB connection to gesnippets database. self._snippets_db_connection = postgres_manager.PostgresConnection( self._snippets_db_name, self._db_user, self._host, self._port, logger) def _DbQuery(self, query_string, parameters=None): """Handles DB query request to gesnippets database. Args: query_string: SQL query statement. parameters: sequence of parameters to populate into placeholders. Returns: Results as list of tuples (rows of fields). Raises: psycopg2.Error/Warning in case of error. """ return self._snippets_db_connection.Query(query_string, parameters) def _DbModify(self, command_string, parameters=None): """Handles DB modify request to gesnippets database. Args: command_string: SQL UPDATE/INSERT/DELETE command string. parameters: sequence of parameters to populate into placeholders. Returns: Number of rows that sql command affected. Raises: psycopg2.Error/Warning in case of error. """ return self._snippets_db_connection.Modify(command_string, parameters) def HandlePingRequest(self, request, response): """Handles ping server request. Args: request: request object. response: response object Raises: psycopg2.Error/Warning. """ cmd = request.GetParameter(constants.CMD) assert cmd == "Ping" # Fire off a pinq query to make sure we have a valid db connection. query_string = "SELECT 'ping'" results = self._DbQuery(query_string) if results and results[0] == "ping": http_io.ResponseWriter.AddBodyElement( response, constants.HDR_STATUS_CODE, constants.STATUS_SUCCESS) else: http_io.ResponseWriter.AddBodyElement( response, constants.HDR_STATUS_MESSAGE, "Cannot ping geendsnippet database.") http_io.ResponseWriter.AddBodyElement( response, constants.HDR_STATUS_CODE, constants.STATUS_FAILURE) def GetSnippetSetDetails(self, snippets_set_name): """Gets snippets set details (content) by name. Args: snippets_set_name: snippets set name. Raises: psycopg2.Error/Warning. Returns: Snippets set content from gesnippets database or None if snippets set is not found. """ # Get snippet set content from snippet_set_table. query_string = "SELECT content FROM snippet_set_table WHERE name = %s" result = self._DbQuery(query_string, (snippets_set_name,)) if not result: return None assert isinstance(result, list) and len(result) == 1 return result[0] def _SnippetSetExists(self, snippets_set_name): """Checks if specific snippets set exists. Args: snippets_set_name: snippets set name. Returns: Whether specific snippets set exists. """ query_string = ( "SELECT EXISTS (SELECT TRUE FROM snippet_set_table WHERE name = %s)") result = self._DbQuery(query_string, (snippets_set_name,)) if not result: return False assert isinstance(result[0], bool) return result[0] def main(): pass if __name__ == "__main__": main()
apache-2.0
nazo/ansible
lib/ansible/modules/web_infrastructure/ejabberd_user.py
48
7599
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2013, Peter Sprygada <sprygada@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ejabberd_user version_added: "1.5" author: "Peter Sprygada (@privateip)" short_description: Manages users for ejabberd servers requirements: - ejabberd with mod_admin_extra description: - This module provides user management for ejabberd servers options: username: description: - the name of the user to manage required: true host: description: - the ejabberd host associated with this username required: true password: description: - the password to assign to the username required: false logging: description: - enables or disables the local syslog facility for this module required: false default: false choices: [ 'true', 'false', 'yes', 'no' ] state: description: - describe the desired state of the user to be managed required: false default: 'present' choices: [ 'present', 'absent' ] notes: - Password parameter is required for state == present only - Passwords must be stored in clear text for this release - The ejabberd configuration file must include mod_admin_extra as a module. ''' EXAMPLES = ''' # Example playbook entries using the ejabberd_user module to manage users state. - name: create a user if it does not exists ejabberd_user: username: test host: server password: password - name: delete a user if it exists ejabberd_user: username: test host: server state: absent ''' import syslog from ansible.module_utils.pycompat24 import get_exception from ansible.module_utils.basic import * class EjabberdUserException(Exception): """ Base exception for EjabberdUser class object """ pass class EjabberdUser(object): """ This object represents a user resource for an ejabberd server. The object manages user creation and deletion using ejabberdctl. The following commands are currently supported: * ejabberdctl register * ejabberdctl deregister """ def __init__(self, module): self.module = module self.logging = module.params.get('logging') self.state = module.params.get('state') self.host = module.params.get('host') self.user = module.params.get('username') self.pwd = module.params.get('password') @property def changed(self): """ This method will check the current user and see if the password has changed. It will return True if the user does not match the supplied credentials and False if it does not """ try: options = [self.user, self.host, self.pwd] (rc, out, err) = self.run_command('check_password', options) except EjabberdUserException: e = get_exception() (rc, out, err) = (1, None, "required attribute(s) missing") return rc @property def exists(self): """ This method will check to see if the supplied username exists for host specified. If the user exists True is returned, otherwise False is returned """ try: options = [self.user, self.host] (rc, out, err) = self.run_command('check_account', options) except EjabberdUserException: e = get_exception() (rc, out, err) = (1, None, "required attribute(s) missing") return not bool(int(rc)) def log(self, entry): """ This method will log information to the local syslog facility """ if self.logging: syslog.openlog('ansible-%s' % self.module._name) syslog.syslog(syslog.LOG_NOTICE, entry) def run_command(self, cmd, options): """ This method will run the any command specified and return the returns using the Ansible common module """ if not all(options): raise EjabberdUserException cmd = 'ejabberdctl %s ' % cmd cmd += " ".join(options) self.log('command: %s' % cmd) return self.module.run_command(cmd.split()) def update(self): """ The update method will update the credentials for the user provided """ try: options = [self.user, self.host, self.pwd] (rc, out, err) = self.run_command('change_password', options) except EjabberdUserException: e = get_exception() (rc, out, err) = (1, None, "required attribute(s) missing") return (rc, out, err) def create(self): """ The create method will create a new user on the host with the password provided """ try: options = [self.user, self.host, self.pwd] (rc, out, err) = self.run_command('register', options) except EjabberdUserException: e = get_exception() (rc, out, err) = (1, None, "required attribute(s) missing") return (rc, out, err) def delete(self): """ The delete method will delete the user from the host """ try: options = [self.user, self.host] (rc, out, err) = self.run_command('unregister', options) except EjabberdUserException: e = get_exception() (rc, out, err) = (1, None, "required attribute(s) missing") return (rc, out, err) def main(): module = AnsibleModule( argument_spec = dict( host=dict(default=None, type='str'), username=dict(default=None, type='str'), password=dict(default=None, type='str', no_log=True), state=dict(default='present', choices=['present', 'absent']), logging=dict(default=False, type='bool') ), supports_check_mode = True ) obj = EjabberdUser(module) rc = None result = dict() if obj.state == 'absent': if obj.exists: if module.check_mode: module.exit_json(changed=True) (rc, out, err) = obj.delete() if rc != 0: module.fail_json(msg=err, rc=rc) elif obj.state == 'present': if not obj.exists: if module.check_mode: module.exit_json(changed=True) (rc, out, err) = obj.create() elif obj.changed: if module.check_mode: module.exit_json(changed=True) (rc, out, err) = obj.update() if rc is not None and rc != 0: module.fail_json(msg=err, rc=rc) if rc is None: result['changed'] = False else: result['changed'] = True module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
LittlePeng/redis-monitor
src/api/controller/SettingsController.py
4
1733
from BaseController import BaseController from api.util import settings import os class SettingsController(BaseController): def get(self): server_list="" for server in settings.get_redis_servers(): server_list+= "%(server)s:%(port)s %(group)s %(instance)s\r\n" % server sms_repl=0; sms_stats=0; try: sms=settings.get_master_slave_sms_type() sms=sms.split(',') sms_repl=(int)(sms[0]) sms_stats=(int)(sms[1]) except: pass servers = {"servers": server_list,"sms1":sms_repl,"sms2":sms_stats} self.write(servers) def post(self): try: server_list=self.get_argument("servers") sms1=(int)(self.get_argument("sms1")) sms2=(int)(self.get_argument("sms2")) sms= "%s,%s" %(sms1,sms2) servers=[] for server in server_list.split('\n'): eps=server.split(':') if(len(eps)!=2): raise Exception('server Ip format error.'); ip=eps[0] eps2 = eps[1].split(' ') port=(int)(eps2[0]) group='' instance='' if(len(eps2)>1): group=eps2[1] if(len(eps2)>2): instance=eps2[2] servers.append({'server':ip,'port':port,'group':group,'instance':instance}) settings.save_settings(servers, sms) self.write({"status":200}) except Exception,ex: self.write({"status":500,"error":ex.message})
mit
rajadhva/servo
python/mach/mach/commands/settings.py
96
1799
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import absolute_import, print_function, unicode_literals from textwrap import TextWrapper from mach.decorators import ( CommandProvider, Command, ) #@CommandProvider class Settings(object): """Interact with settings for mach. Currently, we only provide functionality to view what settings are available. In the future, this module will be used to modify settings, help people create configs via a wizard, etc. """ def __init__(self, context): self.settings = context.settings @Command('settings-list', category='devenv', description='Show available config settings.') def list_settings(self): """List available settings in a concise list.""" for section in sorted(self.settings): for option in sorted(self.settings[section]): short, full = self.settings.option_help(section, option) print('%s.%s -- %s' % (section, option, short)) @Command('settings-create', category='devenv', description='Print a new settings file with usage info.') def create(self): """Create an empty settings file with full documentation.""" wrapper = TextWrapper(initial_indent='# ', subsequent_indent='# ') for section in sorted(self.settings): print('[%s]' % section) print('') for option in sorted(self.settings[section]): short, full = self.settings.option_help(section, option) print(wrapper.fill(full)) print(';%s =' % option) print('')
mpl-2.0
ant6/Monte-Carlo-Modulators
mcm/peak_reader.py
1
1163
from itertools import zip_longest import numpy as np __all__ = ["read_one_peak"] def transposed(matrix): """Return transposed matrix (list of lists). This function can handle non-square matrices. In this case it fills shorter list with None. >>> transposed( [[1,2,3], [3,4]] ) [[1, 3], [2, 4], [3, None]] """ return list(map(list, zip_longest(*matrix))) def read_peak_database(peak_file): """ First line of given file is treated as labels and is ignored. """ with open(peak_file, 'r') as pf: splitcols = transposed([line.split() for line in pf.readlines()]) peak_list = [] cols = len(splitcols) # number of columns for i in range(0, cols, 2): # deleting Nones (see transposed()) and label in the first line peak_domain = np.array([float(l) for l in splitcols[i][1:] if l]) # else 0.0 or np.nan? peak_values = np.array([float(l) for l in splitcols[i + 1][1:] if l]) peak = np.array([peak_domain, peak_values]) peak_list.append(peak.T) return np.array(peak_list) def read_one_peak(f): return np.loadtxt(f, delimiter=',', unpack=True)
mit
cmarguel/skybot
bot.py
8
1256
#!/usr/bin/env python import os import Queue import sys import traceback import time sys.path += ['plugins'] # so 'import hook' works without duplication sys.path += ['lib'] os.chdir(sys.path[0] or '.') # do stuff relative to the install directory class Bot(object): def __init__(self): self.conns = {} self.persist_dir = os.path.abspath('persist') if not os.path.exists(self.persist_dir): os.mkdir(self.persist_dir) bot = Bot() print 'Loading plugins' # bootstrap the reloader eval(compile(open(os.path.join('core', 'reload.py'), 'U').read(), os.path.join('core', 'reload.py'), 'exec')) reload(init=True) print 'Connecting to IRC' try: config() if not hasattr(bot, 'config'): exit() except Exception, e: print 'ERROR: malformed config file:', e traceback.print_exc() sys.exit() print 'Running main loop' while True: reload() # these functions only do things config() # if changes have occured for conn in bot.conns.itervalues(): try: out = conn.out.get_nowait() main(conn, out) except Queue.Empty: pass while all(conn.out.empty() for conn in bot.conns.itervalues()): time.sleep(.1)
unlicense
SNoiraud/gramps
gramps/plugins/importer/importxml.py
2
132877
# # Gramps - a GTK+/GNOME based genealogy program # # Copyright (C) 2000-2007 Donald N. Allingham # Copyright (C) 200?-2013 Benny Malengier # Copyright (C) 2009 Douglas S. Blank # Copyright (C) 2010-2011 Nick Hall # Copyright (C) 2011 Michiel D. Nauta # Copyright (C) 2011 Tim G L Lyons # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # #------------------------------------------------------------------------- # # Standard Python Modules # #------------------------------------------------------------------------- import os import sys import time from xml.parsers.expat import ExpatError, ParserCreate from xml.sax.saxutils import escape from gramps.gen.const import URL_WIKISTRING from gramps.gen.const import GRAMPS_LOCALE as glocale _ = glocale.translation.gettext import re import logging from collections import abc LOG = logging.getLogger(".ImportXML") #------------------------------------------------------------------------- # # Gramps Modules # #------------------------------------------------------------------------- from gramps.gen.mime import get_type from gramps.gen.lib import (Address, Attribute, AttributeType, ChildRef, ChildRefType, Citation, Date, DateError, Event, EventRef, EventRoleType, EventType, Family, LdsOrd, Location, Media, MediaRef, Name, NameOriginType, NameType, Note, NoteType, Person, PersonRef, Place, PlaceName, PlaceRef, PlaceType, RepoRef, Repository, Researcher, Source, SrcAttribute, SrcAttributeType, StyledText, StyledTextTag, StyledTextTagType, Surname, Tag, Url) from gramps.gen.db import DbTxn #from gramps.gen.db.write import CLASS_TO_KEY_MAP from gramps.gen.errors import GrampsImportError from gramps.gen.utils.id import create_id from gramps.gen.utils.db import family_name from gramps.gen.utils.unknown import make_unknown, create_explanation_note from gramps.gen.utils.file import create_checksum, media_path, expand_media_path from gramps.gen.datehandler import parser, set_date from gramps.gen.display.name import displayer as name_displayer from gramps.gen.db.dbconst import (PERSON_KEY, FAMILY_KEY, SOURCE_KEY, EVENT_KEY, MEDIA_KEY, PLACE_KEY, REPOSITORY_KEY, NOTE_KEY, TAG_KEY, CITATION_KEY, CLASS_TO_KEY_MAP) from gramps.gen.updatecallback import UpdateCallback from gramps.version import VERSION from gramps.gen.config import config #import gramps.plugins.lib.libgrampsxml from gramps.plugins.lib import libgrampsxml from gramps.gen.plug.utils import version_str_to_tup from gramps.plugins.lib.libplaceimport import PlaceImport #------------------------------------------------------------------------- # # Try to detect the presence of gzip # #------------------------------------------------------------------------- try: import gzip GZIP_OK = True except: GZIP_OK = False PERSON_RE = re.compile(r"\s*\<person\s(.*)$") CHILD_REL_MAP = { "Birth" : ChildRefType(ChildRefType.BIRTH), "Adopted" : ChildRefType(ChildRefType.ADOPTED), "Stepchild" : ChildRefType(ChildRefType.STEPCHILD), "Sponsored" : ChildRefType(ChildRefType.SPONSORED), "Foster" : ChildRefType(ChildRefType.FOSTER), "Unknown" : ChildRefType(ChildRefType.UNKNOWN), } # feature requests 2356, 1658: avoid genitive form EVENT_FAMILY_STR = _("%(event_name)s of %(family)s") # feature requests 2356, 1658: avoid genitive form EVENT_PERSON_STR = _("%(event_name)s of %(person)s") HANDLE = 0 INSTANTIATED = 1 #------------------------------------------------------------------------- # # Importing data into the currently open database. # Must takes care of renaming media files according to their new IDs. # #------------------------------------------------------------------------- def importData(database, filename, user): filename = os.path.normpath(filename) basefile = os.path.dirname(filename) database.smap = {} database.pmap = {} database.fmap = {} line_cnt = 1 person_cnt = 0 with ImportOpenFileContextManager(filename, user) as xml_file: if xml_file is None: return if filename == '-': change = time.time() else: change = os.path.getmtime(filename) if database.get_feature("skip-import-additions"): # don't add source or tags parser = GrampsParser(database, user, change, None) else: parser = GrampsParser(database, user, change, (config.get('preferences.tag-on-import-format') if config.get('preferences.tag-on-import') else None)) if filename != '-': linecounter = LineParser(filename) line_cnt = linecounter.get_count() person_cnt = linecounter.get_person_count() read_only = database.readonly database.readonly = False try: info = parser.parse(xml_file, line_cnt, person_cnt) except GrampsImportError as err: # version error user.notify_error(*err.messages()) return except IOError as msg: user.notify_error(_("Error reading %s") % filename, str(msg)) import traceback traceback.print_exc() return except ExpatError as msg: user.notify_error(_("Error reading %s") % filename, str(msg) + "\n" + _("The file is probably either corrupt or not a " "valid Gramps database.")) return database.readonly = read_only return info #------------------------------------------------------------------------- # # Remove extraneous spaces # #------------------------------------------------------------------------- def rs(text): return ' '.join(text.split()) def fix_spaces(text_list): return '\n'.join(map(rs, text_list)) #------------------------------------------------------------------------- # # # #------------------------------------------------------------------------- class ImportInfo: """ Class object that can hold information about the import """ keyorder = [PERSON_KEY, FAMILY_KEY, SOURCE_KEY, EVENT_KEY, MEDIA_KEY, PLACE_KEY, REPOSITORY_KEY, NOTE_KEY, TAG_KEY, CITATION_KEY] key2data = { PERSON_KEY : 0, FAMILY_KEY : 1, SOURCE_KEY: 2, EVENT_KEY: 3, MEDIA_KEY: 4, PLACE_KEY: 5, REPOSITORY_KEY: 6, NOTE_KEY: 7, TAG_KEY: 8, CITATION_KEY: 9 } def __init__(self): """ Init of the import class. This creates the datastructures to hold info """ self.data_mergecandidate = [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}] self.data_newobject = [0] * 10 self.data_unknownobject = [0] * 10 self.data_families = '' self.expl_note = '' self.data_relpath = False def add(self, category, key, obj, sec_obj=None): """ Add info of a certain category. Key is one of the predefined keys, while obj is an object of which information will be extracted """ if category == 'merge-candidate': self.data_mergecandidate[self.key2data[key]][obj.handle] = \ self._extract_mergeinfo(key, obj, sec_obj) elif category == 'new-object': self.data_newobject[self.key2data[key]] += 1 elif category == 'unknown-object': self.data_unknownobject[self.key2data[key]] += 1 elif category == 'relative-path': self.data_relpath = True elif category == 'unlinked-family': # This is a bit ugly because it isn't using key in the same way as # the rest of the categories, but it is only the calling routine # that really knows what the error message should be. self.data_families += key + "\n" def _extract_mergeinfo(self, key, obj, sec_obj): """ Extract info from obj about 'merge-candidate', Key is one of the predefined keys. """ if key == PERSON_KEY: return _(" %(id)s - %(text)s with %(id2)s\n") % { 'id': obj.gramps_id, 'text' : name_displayer.display(obj), 'id2': sec_obj.gramps_id } elif key == FAMILY_KEY : return _(" Family %(id)s with %(id2)s\n") % { 'id': obj.gramps_id, 'id2': sec_obj.gramps_id} elif key == SOURCE_KEY: return _(" Source %(id)s with %(id2)s\n") % { 'id': obj.gramps_id, 'id2': sec_obj.gramps_id} elif key == EVENT_KEY: return _(" Event %(id)s with %(id2)s\n") % { 'id': obj.gramps_id, 'id2': sec_obj.gramps_id} elif key == MEDIA_KEY: return _(" Media Object %(id)s with %(id2)s\n") % { 'id': obj.gramps_id, 'id2': sec_obj.gramps_id} elif key == PLACE_KEY: return _(" Place %(id)s with %(id2)s\n") % { 'id': obj.gramps_id, 'id2': sec_obj.gramps_id} elif key == REPOSITORY_KEY: return _(" Repository %(id)s with %(id2)s\n") % { 'id': obj.gramps_id, 'id2': sec_obj.gramps_id} elif key == NOTE_KEY: return _(" Note %(id)s with %(id2)s\n") % { 'id': obj.gramps_id, 'id2': sec_obj.gramps_id} elif key == TAG_KEY: pass # Tags can't be merged elif key == CITATION_KEY: return _(" Citation %(id)s with %(id2)s\n") % { 'id': obj.gramps_id, 'id2': sec_obj.gramps_id} def info_text(self): """ Construct an info message from the data in the class. """ key2string = { PERSON_KEY : _(' People: %d\n'), FAMILY_KEY : _(' Families: %d\n'), SOURCE_KEY : _(' Sources: %d\n'), EVENT_KEY : _(' Events: %d\n'), MEDIA_KEY : _(' Media Objects: %d\n'), PLACE_KEY : _(' Places: %d\n'), REPOSITORY_KEY : _(' Repositories: %d\n'), NOTE_KEY : _(' Notes: %d\n'), TAG_KEY : _(' Tags: %d\n'), CITATION_KEY : _(' Citations: %d\n'), } txt = _("Number of new objects imported:\n") for key in self.keyorder: if any(self.data_unknownobject): strng = key2string[key][0:-1] + ' (%d)\n' txt += strng % (self.data_newobject[self.key2data[key]], self.data_unknownobject[self.key2data[key]]) else: txt += key2string[key] % self.data_newobject[self.key2data[key]] if any(self.data_unknownobject): txt += _("\nThe imported file was not self-contained.\n" "To correct for that, %(new)d objects were created and\n" "their typifying attribute was set to 'Unknown'.\n" "The breakdown per category is depicted by the\n" "number in parentheses. Where possible these\n" "'Unknown' objects are referenced by note %(unknown)s.\n" ) % {'new': sum(self.data_unknownobject), 'unknown': self.expl_note} if self.data_relpath: txt += _("\nMedia objects with relative paths have been\n" "imported. These paths are considered relative to\n" "the media directory you can set in the preferences,\n" "or, if not set, relative to the user's directory.\n" ) merge = False for key in self.keyorder: if self.data_mergecandidate[self.key2data[key]]: merge = True break if merge: txt += _("\nObjects that are candidates to be merged:\n") for key in self.keyorder: datakey = self.key2data[key] for handle in list(self.data_mergecandidate[datakey].keys()): txt += self.data_mergecandidate[datakey][handle] if self.data_families: txt += "\n\n" txt += self.data_families return txt class LineParser: def __init__(self, filename): self.count = 0 self.person_count = 0 if GZIP_OK: use_gzip = 1 try: with gzip.open(filename, "r") as f: f.read(1) except IOError as msg: use_gzip = 0 except ValueError as msg: use_gzip = 1 else: use_gzip = 0 try: if use_gzip: import io # Bug 6255. TextIOWrapper is required for python3 to # present file contents as text, otherwise they # are read as binary. However due to a missing # method (read1) in early python3 versions this # try block will fail. # Gramps will still import XML files using python # versions < 3.3.0 but the file progress meter # will not work properly, going immediately to # 100%. # It should work correctly from version 3.3. ofile = io.TextIOWrapper(gzip.open(filename, "rb"), encoding='utf8', errors='replace') else: ofile = open(filename, "r", encoding='utf8', errors='replace') for line in ofile: self.count += 1 if PERSON_RE.match(line): self.person_count += 1 except: self.count = 0 self.person_count = 0 finally: # Ensure the file handle is always closed ofile.close() def get_count(self): return self.count def get_person_count(self): return self.person_count #------------------------------------------------------------------------- # # ImportOpenFileContextManager # #------------------------------------------------------------------------- class ImportOpenFileContextManager: """ Context manager to open a file or stdin for reading. """ def __init__(self, filename, user): self.filename = filename self.filehandle = None self.user = user def __enter__(self): if self.filename == '-': try: self.filehandle = sys.stdin.buffer except: self.filehandle = sys.stdin else: self.filehandle = self.open_file(self.filename) return self.filehandle def __exit__(self, exc_type, exc_value, traceback): if self.filename != '-': if self.filehandle: self.filehandle.close() return False def open_file(self, filename): """ Open the xml file. Return a valid file handle if the file opened sucessfully. Return None if the file was not able to be opened. """ if GZIP_OK: use_gzip = True try: with gzip.open(filename, "r") as ofile: ofile.read(1) except IOError as msg: use_gzip = False except ValueError as msg: use_gzip = True else: use_gzip = False try: if use_gzip: xml_file = gzip.open(filename, "rb") else: xml_file = open(filename, "rb") except IOError as msg: self.user.notify_error(_("%s could not be opened") % filename, str(msg)) xml_file = None except: self.user.notify_error(_("%s could not be opened") % filename) xml_file = None return xml_file #------------------------------------------------------------------------- # # Gramps database parsing class. Derived from SAX XML parser # #------------------------------------------------------------------------- class GrampsParser(UpdateCallback): def __init__(self, database, user, change, default_tag_format=None): UpdateCallback.__init__(self, user.callback) self.user = user self.__gramps_version = 'unknown' self.__xml_version = (1, 0, 0) self.stext_list = [] self.scomments_list = [] self.note_list = [] self.tlist = [] self.conf = 2 self.gid2id = {} self.gid2fid = {} self.gid2eid = {} self.gid2pid = {} self.gid2oid = {} self.gid2sid = {} self.gid2rid = {} self.gid2nid = {} self.childref_map = {} self.change = change self.dp = parser self.info = ImportInfo() self.all_abs = True self.db = database # Data with handles already present in the db will overwrite existing # data, so all imported data gets a new handle. This behavior is not # needed and even unwanted if data is imported in an empty family tree # because NarWeb urls are based on handles. Also for debugging purposes # it can be advantageous to preserve the orginal handle. self.replace_import_handle = (self.db.get_number_of_people() > 0 and not LOG.isEnabledFor(logging.DEBUG)) # Similarly, if the data is imported into an empty family tree, we also # import the Researcher; if the tree was not empty, the existing # Researcher is retained self.import_researcher = self.db.get_total() == 0 self.ord = None self.objref = None self.object = None self.repo = None self.reporef = None self.pref = None self.use_p = 0 self.in_note = 0 self.in_stext = 0 self.in_scomments = 0 self.note = None self.note_text = None self.note_tags = [] self.in_witness = False self.photo = None self.person = None self.family = None self.address = None self.citation = None self.in_old_sourceref = False self.source = None self.attribute = None self.srcattribute = None self.placeobj = None self.placeref = None self.place_name = None self.locations = 0 self.place_names = 0 self.place_map = {} self.place_import = PlaceImport(self.db) self.resname = "" self.resaddr = "" self.reslocality = "" self.rescity = "" self.resstate = "" self.rescon = "" self.respos = "" self.resphone = "" self.resemail = "" self.mediapath = "" self.pmap = {} self.fmap = {} self.smap = {} self.lmap = {} self.media_file_map = {} # List of new name formats and a dict for remapping them self.name_formats = [] self.name_formats_map = {} self.taken_name_format_numbers = [num[0] for num in self.db.name_formats] self.event = None self.eventref = None self.childref = None self.personref = None self.name = None self.surname = None self.surnamepat = None self.home = None self.owner = Researcher() self.func_list = [None]*50 self.func_index = 0 self.func = None self.witness_comment = "" self.idswap = {} self.fidswap = {} self.eidswap = {} self.cidswap = {} self.sidswap = {} self.pidswap = {} self.oidswap = {} self.ridswap = {} self.nidswap = {} self.eidswap = {} self.import_handles = {} if default_tag_format: name = time.strftime(default_tag_format) tag = self.db.get_tag_from_name(name) if tag: self.default_tag = tag else: self.default_tag = Tag() self.default_tag.set_name(name) else: self.default_tag = None self.func_map = { #name part "name": (self.start_name, self.stop_name), "first": (None, self.stop_first), "call": (None, self.stop_call), "aka": (self.start_name, self.stop_aka), #deprecated < 1.3.0 "last": (self.start_last, self.stop_last), #deprecated in 1.4.0 "nick": (None, self.stop_nick), "title": (None, self.stop_title), "suffix": (None, self.stop_suffix), "patronymic": (self.start_patronymic, self.stop_patronymic), #deprecated in 1.4.0 "familynick": (None, self.stop_familynick), #new in 1.4.0 "group": (None, self.stop_group), #new in 1.4.0, replaces attribute #new in 1.4.0 "surname": (self.start_surname, self.stop_surname), # "namemaps": (None, None), "name-formats": (None, None), #other "address": (self.start_address, self.stop_address), "addresses": (None, None), "alt_name": (None, self.stop_alt_name), "childlist": (None, None), "attribute": (self.start_attribute, self.stop_attribute), "attr_type": (None, self.stop_attr_type), "attr_value": (None, self.stop_attr_value), "srcattribute": (self.start_srcattribute, self.stop_srcattribute), "bookmark": (self.start_bmark, None), "bookmarks": (None, None), "format": (self.start_format, None), "child": (self.start_child, None), "childof": (self.start_childof, None), "childref": (self.start_childref, self.stop_childref), "personref": (self.start_personref, self.stop_personref), "citation": (self.start_citation, self.stop_citation), "citationref": (self.start_citationref, None), "citations": (None, None), "city": (None, self.stop_city), "county": (None, self.stop_county), "country": (None, self.stop_country), "comment": (None, self.stop_comment), "confidence": (None, self.stop_confidence), "created": (self.start_created, None), "ref": (None, self.stop_ref), "database": (self.start_database, self.stop_database), "phone": (None, self.stop_phone), "date": (None, self.stop_date), "cause": (None, self.stop_cause), "code": (None, self.stop_code), "description": (None, self.stop_description), "event": (self.start_event, self.stop_event), "type": (None, self.stop_type), "witness": (self.start_witness, self.stop_witness), "eventref": (self.start_eventref, self.stop_eventref), "data_item": (self.start_data_item, None), #deprecated in 1.6.0 "families": (None, self.stop_families), "family": (self.start_family, self.stop_family), "rel": (self.start_rel, None), "region": (self.start_region, None), "father": (self.start_father, None), "gender": (None, self.stop_gender), "header": (None, self.stop_header), "map": (self.start_namemap, None), "mediapath": (None, self.stop_mediapath), "mother": (self.start_mother, None), "note": (self.start_note, self.stop_note), "noteref": (self.start_noteref, None), "p": (None, self.stop_ptag), "parentin": (self.start_parentin, None), "people": (self.start_people, self.stop_people), "person": (self.start_person, self.stop_person), "img": (self.start_photo, self.stop_photo), "objref": (self.start_objref, self.stop_objref), "object": (self.start_media, self.stop_media), "file": (self.start_file, None), "page": (None, self.stop_page), "place": (self.start_place, self.stop_place), "dateval": (self.start_dateval, None), "daterange": (self.start_daterange, None), "datespan": (self.start_datespan, None), "datestr": (self.start_datestr, None), "places": (None, self.stop_places), "placeobj": (self.start_placeobj, self.stop_placeobj), "placeref": (self.start_placeref, self.stop_placeref), "ptitle": (None, self.stop_ptitle), "pname": (self.start_place_name, self.stop_place_name), "locality": (None, self.stop_locality), "location": (self.start_location, None), "lds_ord": (self.start_lds_ord, self.stop_lds_ord), "temple": (self.start_temple, None), "status": (self.start_status, None), "sealed_to": (self.start_sealed_to, None), "coord": (self.start_coord, None), "pos": (self.start_pos, None), "postal": (None, self.stop_postal), "range": (self.start_range, None), "researcher": (None, self.stop_research), "resname": (None, self.stop_resname), "resaddr": (None, self.stop_resaddr), "reslocality": (None, self.stop_reslocality), "rescity": (None, self.stop_rescity), "resstate": (None, self.stop_resstate), "rescountry": (None, self.stop_rescountry), "respostal": (None, self.stop_respostal), "resphone": (None, self.stop_resphone), "resemail": (None, self.stop_resemail), "sauthor": (None, self.stop_sauthor), "sabbrev": (None, self.stop_sabbrev), "scomments": (None, self.stop_scomments), "source": (self.start_source, self.stop_source), "sourceref": (self.start_sourceref, self.stop_sourceref), "sources": (None, None), "spage": (None, self.stop_spage), "spubinfo": (None, self.stop_spubinfo), "state": (None, self.stop_state), "stext": (None, self.stop_stext), "stitle": (None, self.stop_stitle), "street": (None, self.stop_street), "style": (self.start_style, None), "tag": (self.start_tag, self.stop_tag), "tagref": (self.start_tagref, None), "tags": (None, None), "text": (None, self.stop_text), "url": (self.start_url, None), "repository": (self.start_repo, self.stop_repo), "reporef": (self.start_reporef, self.stop_reporef), "rname": (None, self.stop_rname), } self.grampsuri = re.compile(r"^gramps://(?P<object_class>[A-Z][a-z]+)/" r"handle/(?P<handle>\w+)$") def inaugurate(self, handle, target, prim_obj): """ Assign a handle (identity) to a primary object (and create it if it doesn't exist yet) and add it to the database. This method can be called with an object instance or with a class object. Be aware that in the first case the side effect of this function is to fill the object instance with the data read from the db. In the second case, an empty object with the correct handle will be created. :param handle: The handle of the primary object, typically as read directly from the XML attributes. :type handle: str :param target: Indicates the primary object type this handle relates to. :type target: str, identical to target attr of bookmarks. :param prim_obj: template of the primary object that is to be created. :type prim_obj: Either an empty instance of a primary object or the class object of a primary object. :returns: The handle of the primary object. :rtype: str """ handle = str(handle.replace('_', '')) orig_handle = handle if (orig_handle in self.import_handles and target in self.import_handles[orig_handle]): handle = self.import_handles[handle][target][HANDLE] if not isinstance(prim_obj, abc.Callable): # This method is called by a start_<primary_object> method. get_raw_obj_data = {"person": self.db.get_raw_person_data, "family": self.db.get_raw_family_data, "event": self.db.get_raw_event_data, "place": self.db.get_raw_place_data, "source": self.db.get_raw_source_data, "citation": self.db.get_raw_citation_data, "repository": self.db.get_raw_repository_data, "media": self.db.get_raw_media_data, "note": self.db.get_raw_note_data, "tag": self.db.get_raw_tag_data}[target] raw = get_raw_obj_data(handle) prim_obj.unserialize(raw) self.import_handles[orig_handle][target][INSTANTIATED] = True return handle elif handle in self.import_handles: LOG.warning("The file you import contains duplicate handles " "which is illegal and being fixed now.") handle = create_id() while handle in self.import_handles: handle = create_id() self.import_handles[orig_handle][target] = [handle, False] else: orig_handle = handle if self.replace_import_handle: handle = create_id() while handle in self.import_handles: handle = create_id() else: has_handle_func = {"person": self.db.has_person_handle, "family": self.db.has_family_handle, "event": self.db.has_event_handle, "place": self.db.has_place_handle, "source": self.db.has_source_handle, "citation": self.db.get_raw_citation_data, "repository": self.db.has_repository_handle, "media": self.db.has_media_handle, "note": self.db.has_note_handle, "tag": self.db.has_tag_handle}[target] while has_handle_func(handle): handle = create_id() self.import_handles[orig_handle] = {target: [handle, False]} # method is called by a reference if isinstance(prim_obj, abc.Callable): prim_obj = prim_obj() else: self.import_handles[orig_handle][target][INSTANTIATED] = True prim_obj.set_handle(handle) if target == "tag": self.db.add_tag(prim_obj, self.trans) else: add_func = {"person": self.db.add_person, "family": self.db.add_family, "event": self.db.add_event, "place": self.db.add_place, "source": self.db.add_source, "citation": self.db.add_citation, "repository": self.db.add_repository, "media": self.db.add_media, "note": self.db.add_note}[target] add_func(prim_obj, self.trans, set_gid=False) return handle def inaugurate_id(self, id_, key, prim_obj): """ Equivalent of inaugurate but for old style XML. """ if id_ is None: raise GrampsImportError(_("The Gramps Xml you are trying to " "import is malformed."), _("Attributes that link the data " "together are missing.")) id2handle_map = [self.gid2id, self.gid2fid, self.gid2sid, self.gid2eid, self.gid2oid, self.gid2pid, self.gid2rid, 'reference', self.gid2nid][key] has_handle_func = [self.db.has_person_handle, self.db.has_family_handle, self.db.has_source_handle, self.db.has_event_handle, self.db.has_media_handle, self.db.has_place_handle, self.db.has_repository_handle, 'reference', self.db.has_note_handle][key] add_func = [self.db.add_person, self.db.add_family, self.db.add_source, self.db.add_event, self.db.add_media, self.db.add_place, self.db.add_repository, 'reference', self.db.add_note][key] get_raw_obj_data = [self.db.get_raw_person_data, self.db.get_raw_family_data, self.db.get_raw_source_data, self.db.get_raw_event_data, self.db.get_raw_media_data, self.db.get_raw_place_data, self.db.get_raw_repository_data, 'reference', self.db.get_raw_note_data][key] id2id_map = [self.idswap, self.fidswap, self.sidswap, self.eidswap, self.oidswap, self.pidswap, self.ridswap, 'reference', self.nidswap][key] id2user_format = [self.db.id2user_format, self.db.fid2user_format, self.db.sid2user_format, self.db.eid2user_format, self.db.oid2user_format, self.db.pid2user_format, self.db.rid2user_format, 'reference', self.db.nid2user_format][key] find_next_gramps_id = [self.db.find_next_person_gramps_id, self.db.find_next_family_gramps_id, self.db.find_next_source_gramps_id, self.db.find_next_event_gramps_id, self.db.find_next_media_gramps_id, self.db.find_next_place_gramps_id, self.db.find_next_repository_gramps_id, 'reference', self.db.find_next_note_gramps_id][key] has_gramps_id = [self.db.has_person_gramps_id, self.db.has_family_gramps_id, self.db.has_source_gramps_id, self.db.has_event_gramps_id, self.db.has_media_gramps_id, self.db.has_place_gramps_id, self.db.has_repository_gramps_id, 'reference', self.db.has_note_gramps_id][key] gramps_id = self.legalize_id(id_, key, id2id_map, id2user_format, find_next_gramps_id, has_gramps_id) handle = id2handle_map.get(gramps_id) if handle: raw = get_raw_obj_data(handle) prim_obj.unserialize(raw) else: handle = create_id() while has_handle_func(handle): handle = create_id() if isinstance(prim_obj, abc.Callable): prim_obj = prim_obj() prim_obj.set_handle(handle) prim_obj.set_gramps_id(gramps_id) add_func(prim_obj, self.trans) id2handle_map[gramps_id] = handle return handle def legalize_id(self, id_, key, gramps_ids, id2user_format, find_next_gramps_id, has_gramps_id): """ Given an import id, adjust it so that it fits with the existing data. :param id_: The id as it is in the Xml import file, might be None. :type id_: str :param key: Indicates kind of primary object this id is for. :type key: int :param gramps_ids: Dictionary with id's that have already been imported. :type import_ids: dict :param id2user_format: Function to convert a raw id into the format as specified in the prefixes. :type id2user_format: func :param find_next_gramps_id: function to get the next available id. :type find_next_gramps_id: func :returns: The id. :rtype: str """ gramps_id = id2user_format(id_) if gramps_id is None or not gramps_ids.get(id_): if gramps_id is None or has_gramps_id(gramps_id): gramps_ids[id_] = find_next_gramps_id() else: gramps_ids[id_] = gramps_id return gramps_ids[id_] def parse(self, ifile, linecount=1, personcount=0): """ Parse the xml file :param ifile: must be a file handle that is already open, with position at the start of the file """ if personcount < 1000: no_magic = True else: no_magic = False with DbTxn(_("Gramps XML import"), self.db, batch=True, no_magic=no_magic) as self.trans: self.set_total(linecount) self.db.disable_signals() if self.default_tag and self.default_tag.handle is None: self.db.add_tag(self.default_tag, self.trans) self.p = ParserCreate() self.p.StartElementHandler = self.startElement self.p.EndElementHandler = self.endElement self.p.CharacterDataHandler = self.characters self.p.ParseFile(ifile) if len(self.name_formats) > 0: # add new name formats to the existing table self.db.name_formats += self.name_formats # Register new formats name_displayer.set_name_format(self.db.name_formats) # If the database was originally empty we update the researcher from # the XML (or initialised to no researcher) if self.import_researcher: self.db.set_researcher(self.owner) if self.home is not None: person = self.db.get_person_from_handle(self.home) self.db.set_default_person_handle(person.handle) # Set media path # The paths are normalized before being compared. if self.mediapath: if not self.db.get_mediapath(): self.db.set_mediapath(self.mediapath) elif not media_path(self.db) == expand_media_path(self.mediapath, self.db): self.user.notify_error(_("Could not change media path"), _("The opened file has media path %s, which conflicts with" " the media path of the Family Tree you import into. " "The original media path has been retained. Copy the " "files to a correct directory or change the media " "path in the Preferences." ) % self.mediapath ) self.fix_not_instantiated() self.fix_families() for key in list(self.func_map.keys()): del self.func_map[key] del self.func_map del self.func_list del self.p del self.update self.db.enable_signals() self.db.request_rebuild() return self.info def start_database(self, attrs): """ Get the xml version of the file. """ if 'xmlns' in attrs: xmlns = attrs.get('xmlns').split('/') if len(xmlns)>= 2 and not xmlns[2] == 'gramps-project.org': self.__xml_version = (0, 0, 0) else: try: self.__xml_version = version_str_to_tup(xmlns[4], 3) except: #leave version at 1.0.0 although it could be 0.0.0 ?? pass else: #1.0 or before xml, no dtd schema yet on # http://www.gramps-project.org/xml/ self.__xml_version = (0, 0, 0) def start_created(self, attrs): """ Get the Gramps version that produced the file. """ if 'sources' in attrs: self.num_srcs = int(attrs['sources']) else: self.num_srcs = 0 if 'places' in attrs: self.num_places = int(attrs['places']) else: self.num_places = 0 if 'version' in attrs: self.__gramps_version = attrs.get('version') def stop_header(self, *dummy): """ Check the version of Gramps and XML. """ xmlversion_str = '.'.join(str(i) for i in self.__xml_version) if self.__gramps_version == 'unknown': msg = _("The .gramps file you are importing does not contain information about " "the version of Gramps with, which it was produced.\n\n" "The file will not be imported.") raise GrampsImportError(_('Import file misses Gramps version'), msg) if self.__xml_version > libgrampsxml.GRAMPS_XML_VERSION_TUPLE: msg = _("The .gramps file you are importing was made by " "version %(newer)s of " "Gramps, while you are running an older version %(older)s. " "The file will not be imported. Please upgrade to the " "latest version of Gramps and try again." ) % { 'newer' : self.__gramps_version, 'older' : VERSION } raise GrampsImportError('', msg) if self.__xml_version < (1, 0, 0): msg = _("The .gramps file you are importing was made by version " "%(oldgramps)s of Gramps, while you are running a more " "recent version %(newgramps)s.\n\n" "The file will not be imported. Please use an older version" " of Gramps that supports version %(xmlversion)s of the " "xml.\nSee\n %(gramps_wiki_xml_url)s\n for more info." ) % {'oldgramps': self.__gramps_version, 'newgramps': VERSION, 'xmlversion': xmlversion_str, 'gramps_wiki_xml_url': URL_WIKISTRING + "Gramps_XML" , } raise GrampsImportError(_('The file will not be imported'), msg) elif self.__xml_version < (1, 1, 0): msg = _("The .gramps file you are importing was made by version " "%(oldgramps)s of Gramps, while you are running a much " "more recent version %(newgramps)s.\n\n" "Ensure after import everything is imported correctly. In " "the event of problems, please submit a bug and use an " "older version of Gramps in the meantime to import this " "file, which is version %(xmlversion)s of the xml.\nSee\n " "%(gramps_wiki_xml_url)s\nfor more info." ) % {'oldgramps': self.__gramps_version, 'newgramps': VERSION, 'xmlversion': xmlversion_str, 'gramps_wiki_xml_url': URL_WIKISTRING + "Gramps_XML" , } self.user.warn(_('Old xml file'), msg) def start_lds_ord(self, attrs): self.ord = LdsOrd() self.ord.set_type_from_xml(attrs['type']) self.ord.private = bool(attrs.get("priv")) if self.person: self.person.lds_ord_list.append(self.ord) elif self.family: self.family.lds_ord_list.append(self.ord) def start_temple(self, attrs): self.ord.set_temple(attrs['val']) def start_data_item(self, attrs): """ Deprecated in 1.6.0, replaced by srcattribute """ sat = SrcAttributeType(attrs['key']) sa = SrcAttribute() sa.set_type(sat) sa.set_value(attrs['value']) if self.source: self.source.add_attribute(sa) else: self.citation.add_attribute(sa) def start_status(self, attrs): try: # old xml with integer statuses self.ord.set_status(int(attrs['val'])) except ValueError: # string self.ord.set_status_from_xml(attrs['val']) def start_sealed_to(self, attrs): """ Add a family reference to the LDS ordinance currently processed. """ if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "family", Family) else: # old style XML handle = self.inaugurate_id(attrs.get('ref'), FAMILY_KEY, Family) self.ord.set_family_handle(handle) def start_place(self, attrs): """A reference to a place in an object: event or lds_ord """ if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "place", Place) else: # old style XML handle = self.inaugurate_id(attrs.get('ref'), PLACE_KEY, Place) if self.ord: self.ord.set_place_handle(handle) elif self.event: self.event.set_place_handle(handle) def start_placeobj(self, attrs): """ Add a place object to db if it doesn't exist yet and assign id, privacy and changetime. """ self.placeobj = Place() if 'handle' in attrs: orig_handle = attrs['handle'].replace('_', '') is_merge_candidate = (self.replace_import_handle and self.db.has_place_handle(orig_handle)) self.inaugurate(orig_handle, "place", self.placeobj) gramps_id = self.legalize_id(attrs.get('id'), PLACE_KEY, self.pidswap, self.db.pid2user_format, self.db.find_next_place_gramps_id, self.db.has_place_gramps_id) self.placeobj.set_gramps_id(gramps_id) if is_merge_candidate: orig_place = self.db.get_place_from_handle(orig_handle) self.info.add('merge-candidate', PLACE_KEY, orig_place, self.placeobj) else: self.inaugurate_id(attrs.get('id'), PLACE_KEY, self.placeobj) self.placeobj.private = bool(attrs.get("priv")) self.placeobj.change = int(attrs.get('change', self.change)) if self.__xml_version == (1, 6, 0): place_name = PlaceName() place_name.set_value(attrs.get('name', '')) self.placeobj.name = place_name if 'type' in attrs: self.placeobj.place_type.set_from_xml_str(attrs.get('type')) self.info.add('new-object', PLACE_KEY, self.placeobj) self.place_names = 0 # Gramps LEGACY: title in the placeobj tag self.placeobj.title = attrs.get('title', '') self.locations = 0 self.update(self.p.CurrentLineNumber) if self.default_tag: self.placeobj.add_tag(self.default_tag.handle) return self.placeobj def start_location(self, attrs): """Bypass the function calls for this one, since it appears to take up quite a bit of time""" loc = Location() loc.street = attrs.get('street', '') loc.locality = attrs.get('locality', '') loc.city = attrs.get('city', '') loc.parish = attrs.get('parish', '') loc.county = attrs.get('county', '') loc.state = attrs.get('state', '') loc.country = attrs.get('country', '') loc.postal = attrs.get('postal', '') loc.phone = attrs.get('phone', '') if self.__xml_version < (1, 6, 0): if self.locations > 0: self.placeobj.add_alternate_locations(loc) else: location = (attrs.get('street', ''), attrs.get('locality', ''), attrs.get('parish', ''), attrs.get('city', ''), attrs.get('county', ''), attrs.get('state', ''), attrs.get('country', '')) self.place_import.store_location(location, self.placeobj.handle) for level, name in enumerate(location): if name: break place_name = PlaceName() place_name.set_value(name) self.placeobj.set_name(place_name) type_num = 7 - level if name else PlaceType.UNKNOWN self.placeobj.set_type(PlaceType(type_num)) codes = [attrs.get('postal'), attrs.get('phone')] self.placeobj.set_code(' '.join(code for code in codes if code)) else: self.placeobj.add_alternate_locations(loc) self.locations = self.locations + 1 def start_witness(self, attrs): """ Add a note about a witness to the currently processed event or add an event reference connecting that event with a person assigning the role of witness. """ # Parse witnesses created by older gramps self.in_witness = True self.witness_comment = "" if 'name' in attrs: note = Note() note.handle = create_id() note.set(_("Witness name: %s") % attrs['name']) note.type.set(NoteType.EVENT) note.private = self.event.private self.db.add_note(note, self.trans) #set correct change time self.db.commit_note(note, self.trans, self.change) self.info.add('new-object', NOTE_KEY, note) self.event.add_note(note.handle) return person = Person() if 'hlink' in attrs: self.inaugurate(attrs['hlink'], "person", person) elif 'ref' in attrs: self.inaugurate_id(attrs['ref'], PERSON_KEY, person) else: person = None # Add an EventRef from that person # to this event using ROLE_WITNESS role if person: event_ref = EventRef() event_ref.ref = self.event.handle event_ref.role.set(EventRoleType.WITNESS) person.event_ref_list.append(event_ref) self.db.commit_person(person, self.trans, self.change) def start_coord(self, attrs): self.placeobj.lat = attrs.get('lat', '') self.placeobj.long = attrs.get('long', '') def start_event(self, attrs): """ Add an event object to db if it doesn't exist yet and assign id, privacy and changetime. """ if self.person or self.family: # Gramps LEGACY: old events that were written inside # person or family objects. self.event = Event() self.event.handle = create_id() self.event.type = EventType() self.event.type.set_from_xml_str(attrs['type']) self.db.add_event(self.event, self.trans) #set correct change time self.db.commit_event(self.event, self.trans, self.change) self.info.add('new-object', EVENT_KEY, self.event) else: # This is new event, with ID and handle already existing self.update(self.p.CurrentLineNumber) self.event = Event() if 'handle' in attrs: orig_handle = attrs['handle'].replace('_', '') is_merge_candidate = (self.replace_import_handle and self.db.has_event_handle(orig_handle)) self.inaugurate(orig_handle, "event", self.event) gramps_id = self.legalize_id(attrs.get('id'), EVENT_KEY, self.eidswap, self.db.eid2user_format, self.db.find_next_event_gramps_id, self.db.has_event_gramps_id) self.event.set_gramps_id(gramps_id) if is_merge_candidate: orig_event = self.db.get_event_from_handle(orig_handle) self.info.add('merge-candidate', EVENT_KEY, orig_event, self.event) else: #old style XML self.inaugurate_id(attrs.get('id'), EVENT_KEY, self.event) self.event.private = bool(attrs.get("priv")) self.event.change = int(attrs.get('change', self.change)) self.info.add('new-object', EVENT_KEY, self.event) if self.default_tag: self.event.add_tag(self.default_tag.handle) return self.event def start_eventref(self, attrs): """ Add an event reference to the object currently processed. """ self.eventref = EventRef() if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "event", Event) else: # there is no old style XML raise GrampsImportError(_("The Gramps Xml you are trying to " "import is malformed."), _("Any event reference must have a " "'hlink' attribute.")) self.eventref.ref = handle self.eventref.private = bool(attrs.get('priv')) if 'role' in attrs: self.eventref.role.set_from_xml_str(attrs['role']) # We count here on events being already parsed prior to parsing # people or families. This code will fail if this is not true. event = self.db.get_event_from_handle(self.eventref.ref) if not event: return if self.family: self.family.add_event_ref(self.eventref) elif self.person: if (event.type == EventType.BIRTH) \ and (self.eventref.role == EventRoleType.PRIMARY) \ and (self.person.get_birth_ref() is None): self.person.set_birth_ref(self.eventref) elif (event.type == EventType.DEATH) \ and (self.eventref.role == EventRoleType.PRIMARY) \ and (self.person.get_death_ref() is None): self.person.set_death_ref(self.eventref) else: self.person.add_event_ref(self.eventref) def start_placeref(self, attrs): """ Add a place reference to the place currently being processed. """ self.placeref = PlaceRef() handle = self.inaugurate(attrs['hlink'], "place", Place) self.placeref.ref = handle self.placeobj.add_placeref(self.placeref) def start_attribute(self, attrs): self.attribute = Attribute() self.attribute.private = bool(attrs.get("priv")) self.attribute.type = AttributeType() if 'type' in attrs: self.attribute.type.set_from_xml_str(attrs["type"]) self.attribute.value = attrs.get("value", '') if self.photo: self.photo.add_attribute(self.attribute) elif self.object: self.object.add_attribute(self.attribute) elif self.objref: self.objref.add_attribute(self.attribute) elif self.event: self.event.add_attribute(self.attribute) elif self.eventref: self.eventref.add_attribute(self.attribute) elif self.person: self.person.add_attribute(self.attribute) elif self.family: self.family.add_attribute(self.attribute) def start_srcattribute(self, attrs): self.srcattribute = SrcAttribute() self.srcattribute.private = bool(attrs.get("priv")) self.srcattribute.type = SrcAttributeType() if 'type' in attrs: self.srcattribute.type.set_from_xml_str(attrs["type"]) self.srcattribute.value = attrs.get("value", '') if self.source: self.source.add_attribute(self.srcattribute) elif self.citation: self.citation.add_attribute(self.srcattribute) def start_address(self, attrs): self.address = Address() self.address.private = bool(attrs.get("priv")) def start_bmark(self, attrs): """ Add a bookmark to db. """ target = attrs.get('target') if not target: # Old XML. Can be either handle or id reference # and this is guaranteed to be a person bookmark if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "person", Person) else: handle = self.inaugurate_id(attrs.get('ref'), PERSON_KEY, Person) self.db.bookmarks.append(handle) return # This is new XML, so we are guaranteed to have a handle ref handle = attrs['hlink'].replace('_', '') handle = self.import_handles[handle][target][HANDLE] # Due to pre 2.2.9 bug, bookmarks might be handle of other object # Make sure those are filtered out. # Bookmarks are at end, so all handle must exist before we do bookmrks if target == 'person': if (self.db.get_person_from_handle(handle) is not None and handle not in self.db.bookmarks.get() ): self.db.bookmarks.append(handle) elif target == 'family': if (self.db.get_family_from_handle(handle) is not None and handle not in self.db.family_bookmarks.get() ): self.db.family_bookmarks.append(handle) elif target == 'event': if (self.db.get_event_from_handle(handle) is not None and handle not in self.db.event_bookmarks.get() ): self.db.event_bookmarks.append(handle) elif target == 'source': if (self.db.get_source_from_handle(handle) is not None and handle not in self.db.source_bookmarks.get() ): self.db.source_bookmarks.append(handle) elif target == 'citation': if (self.db.get_citation_from_handle(handle) is not None and handle not in self.db.citation_bookmarks.get() ): self.db.citation_bookmarks.append(handle) elif target == 'place': if (self.db.get_place_from_handle(handle) is not None and handle not in self.db.place_bookmarks.get() ): self.db.place_bookmarks.append(handle) elif target == 'media': if (self.db.get_media_from_handle(handle) is not None and handle not in self.db.media_bookmarks.get() ): self.db.media_bookmarks.append(handle) elif target == 'repository': if (self.db.get_repository_from_handle(handle) is not None and handle not in self.db.repo_bookmarks.get()): self.db.repo_bookmarks.append(handle) elif target == 'note': if (self.db.get_note_from_handle(handle) is not None and handle not in self.db.note_bookmarks.get() ): self.db.note_bookmarks.append(handle) def start_format(self, attrs): number = int(attrs['number']) name = attrs['name'] fmt_str = attrs['fmt_str'] active = bool(attrs.get('active', True)) if number in self.taken_name_format_numbers: number = self.remap_name_format(number) self.name_formats.append((number, name, fmt_str, active)) def remap_name_format(self, old_number): if old_number in self.name_formats_map: # This should not happen return self.name_formats_map[old_number] # Find the lowest new number not taken yet: new_number = -1 while new_number in self.taken_name_format_numbers: new_number -= 1 # Add this to the taken list self.taken_name_format_numbers.append(new_number) # Set up the mapping entry self.name_formats_map[old_number] = new_number # Return new number return new_number def start_person(self, attrs): """ Add a person to db if it doesn't exist yet and assign id, privacy and changetime. """ self.update(self.p.CurrentLineNumber) self.person = Person() if 'handle' in attrs: orig_handle = attrs['handle'].replace('_', '') is_merge_candidate = (self.replace_import_handle and self.db.has_person_handle(orig_handle)) self.inaugurate(orig_handle, "person", self.person) gramps_id = self.legalize_id(attrs.get('id'), PERSON_KEY, self.idswap, self.db.id2user_format, self.db.find_next_person_gramps_id, self.db.has_person_gramps_id) self.person.set_gramps_id(gramps_id) if is_merge_candidate: orig_person = self.db.get_person_from_handle(orig_handle) self.info.add('merge-candidate', PERSON_KEY, orig_person, self.person) else: # old style XML self.inaugurate_id(attrs.get('id'), PERSON_KEY, self.person) self.person.private = bool(attrs.get("priv")) self.person.change = int(attrs.get('change', self.change)) self.info.add('new-object', PERSON_KEY, self.person) self.convert_marker(attrs, self.person) if self.default_tag: self.person.add_tag(self.default_tag.handle) return self.person def start_people(self, attrs): """ Store the home person of the database. """ if 'home' in attrs: handle = self.inaugurate(attrs['home'], "person", Person) self.home = handle def start_father(self, attrs): """ Add a father reference to the family currently processed. """ if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "person", Person) else: # old style XML handle = self.inaugurate_id(attrs.get('ref'), PERSON_KEY, Person) self.family.set_father_handle(handle) def start_mother(self, attrs): """ Add a mother reference to the family currently processed. """ if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "person", Person) else: # old style XML handle = self.inaugurate_id(attrs.get('ref'), PERSON_KEY, Person) self.family.set_mother_handle(handle) def start_child(self, attrs): """ Add a child reference to the family currently processed. Here we are handling the old XML, in which frel and mrel belonged to the "childof" tag """ if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "person", Person) else: # old style XML handle = self.inaugurate_id(attrs.get('ref'), PERSON_KEY, Person) # If that were the case then childref_map has the childref ready if (self.family.handle, handle) in self.childref_map: self.family.add_child_ref( self.childref_map[(self.family.handle, handle)]) def start_childref(self, attrs): """ Add a child reference to the family currently processed. Here we are handling the new XML, in which frel and mrel belong to the "childref" tag under family. """ self.childref = ChildRef() handle = self.inaugurate(attrs['hlink'], "person", Person) self.childref.ref = handle self.childref.private = bool(attrs.get('priv')) mrel = ChildRefType() if attrs.get('mrel'): mrel.set_from_xml_str(attrs['mrel']) frel = ChildRefType() if attrs.get('frel'): frel.set_from_xml_str(attrs['frel']) if not mrel.is_default(): self.childref.set_mother_relation(mrel) if not frel.is_default(): self.childref.set_father_relation(frel) self.family.add_child_ref(self.childref) def start_personref(self, attrs): """ Add a person reference to the person currently processed. """ self.personref = PersonRef() if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "person", Person) else: # there is no old style XML raise GrampsImportError(_("The Gramps Xml you are trying to " "import is malformed."), _("Any person reference must have a " "'hlink' attribute.")) self.personref.ref = handle self.personref.private = bool(attrs.get('priv')) self.personref.rel = attrs['rel'] self.person.add_person_ref(self.personref) def start_url(self, attrs): if "href" not in attrs: return url = Url() url.path = attrs["href"] url.set_description(attrs.get("description", '')) url.private = bool(attrs.get('priv')) url.type.set_from_xml_str(attrs.get('type', '')) if self.person: self.person.add_url(url) elif self.placeobj: self.placeobj.add_url(url) elif self.repo: self.repo.add_url(url) def start_family(self, attrs): """ Add a family object to db if it doesn't exist yet and assign id, privacy and changetime. """ self.update(self.p.CurrentLineNumber) self.family = Family() if 'handle' in attrs: orig_handle = attrs['handle'].replace('_', '') is_merge_candidate = (self.replace_import_handle and self.db.has_family_handle(orig_handle)) self.inaugurate(orig_handle, "family", self.family) gramps_id = self.legalize_id(attrs.get('id'), FAMILY_KEY, self.fidswap, self.db.fid2user_format, self.db.find_next_family_gramps_id, self.db.has_family_gramps_id) self.family.set_gramps_id(gramps_id) if is_merge_candidate: orig_family = self.db.get_family_from_handle(orig_handle) self.info.add('merge-candidate', FAMILY_KEY, orig_family, self.family) else: # old style XML self.inaugurate_id(attrs.get('id'), FAMILY_KEY, self.family) self.family.private = bool(attrs.get("priv")) self.family.change = int(attrs.get('change', self.change)) self.info.add('new-object', FAMILY_KEY, self.family) # Gramps LEGACY: the type now belongs to <rel> tag # Here we need to support old format of <family type="Married"> if 'type' in attrs: self.family.type.set_from_xml_str(attrs["type"]) self.convert_marker(attrs, self.family) if self.default_tag: self.family.add_tag(self.default_tag.handle) return self.family def start_rel(self, attrs): if 'type' in attrs: self.family.type.set_from_xml_str(attrs["type"]) def start_file(self, attrs): self.object.mime = attrs['mime'] if 'description' in attrs: self.object.desc = attrs['description'] else: self.object.desc = "" #keep value of path, no longer make absolute paths on import src = attrs["src"] if src: self.object.path = src if self.all_abs and not os.path.isabs(src): self.all_abs = False self.info.add('relative-path', None, None) if 'checksum' in attrs: self.object.checksum = attrs['checksum'] else: if os.path.isabs(src): full_path = src else: full_path = os.path.join(self.mediapath, src) self.object.checksum = create_checksum(full_path) def start_childof(self, attrs): """ Add a family reference to the person currently processed in which that person is a child. """ if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "family", Family) else: # old style XML handle = self.inaugurate_id(attrs.get('ref'), FAMILY_KEY, Family) # Here we are handling the old XML, in which # frel and mrel belonged to the "childof" tag mrel = ChildRefType() frel = ChildRefType() if 'mrel' in attrs: mrel.set_from_xml_str(attrs['mrel']) if 'frel' in attrs: frel.set_from_xml_str(attrs['frel']) childref = ChildRef() childref.ref = self.person.handle if not mrel.is_default(): childref.set_mother_relation(mrel) if not frel.is_default(): childref.set_father_relation(frel) self.childref_map[(handle, self.person.handle)] = childref self.person.add_parent_family_handle(handle) def start_parentin(self, attrs): """ Add a family reference to the person currently processed in which that person is a parent. """ if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "family", Family) else: # old style XML handle = self.inaugurate_id(attrs.get('ref'), FAMILY_KEY, Family) self.person.add_family_handle(handle) def start_name(self, attrs): if self.person: self.start_person_name(attrs) if self.placeobj: # XML 1.7.0 self.start_place_name(attrs) def start_place_name(self, attrs): self.place_name = PlaceName() self.place_name.set_value(attrs["value"]) if "lang" in attrs: self.place_name.set_language(attrs["lang"]) if self.place_names == 0: self.placeobj.set_name(self.place_name) else: self.placeobj.add_alternative_name(self.place_name) self.place_names += 1 def start_person_name(self, attrs): if not self.in_witness: self.name = Name() name_type = attrs.get('type', "Birth Name") # Mapping "Other Name" from gramps 2.0.x to Unknown if (self.__xml_version == (1, 0, 0)) and (name_type == 'Other Name'): self.name.set_type(NameType.UNKNOWN) else: self.name.type.set_from_xml_str(name_type) self.name.private = bool(attrs.get("priv", 0)) self.alt_name = bool(attrs.get("alt", 0)) try: sort_as = int(attrs["sort"]) # check if these pointers need to be remapped # and set the name attributes if sort_as in self.name_formats_map: self.name.sort_as = self.name_formats_map[sort_as] else: self.name.sort_as = sort_as except KeyError: pass try: display_as = int(attrs["display"]) # check if these pointers need to be remapped # and set the name attributes if display_as in self.name_formats_map: self.name.display_as = self.name_formats_map[display_as] else: self.name.display_as = display_as except KeyError: pass def start_surname(self, attrs): self.surname = Surname() self.surname.set_prefix(attrs.get("prefix", "")) self.surname.set_primary(attrs.get("prim", "1") == "1") self.surname.set_connector(attrs.get("connector", "")) origin_type = attrs.get("derivation", "") self.surname.origintype.set_from_xml_str(origin_type) def start_namemap(self, attrs): type = attrs.get('type') key = attrs['key'] value = attrs['value'] if type == 'group_as': if self.db.has_name_group_key(key): present = self.db.get_name_group_mapping(key) if not value == present: msg = _('Your Family Tree groups name "%(key)s" together' ' with "%(parent)s", did not change this grouping to "%(value)s".') % { 'key' : key, 'parent' : present, 'value' : value } self.user.warn(_("Gramps ignored a name grouping"), msg) elif value != 'None': # None test fixes file corrupted by 11011 self.db.set_name_group_mapping(key, value) def start_last(self, attrs): """ This is the element in version < 1.4.0 to do the surname""" self.surname = Surname() self.surname.prefix = attrs.get('prefix', '') self.name.group_as = attrs.get('group', '') def start_patronymic(self, attrs): """ This is the element in version < 1.4.0 to do the patronymic""" self.surnamepat = Surname() self.surnamepat.set_origintype(NameOriginType( NameOriginType.PATRONYMIC)) def start_style(self, attrs): """ Styled text tag in notes (v1.4.0 onwards). """ tagtype = StyledTextTagType() tagtype.set_from_xml_str(attrs['name'].lower()) try: val = attrs['value'] match = self.grampsuri.match(val) if match: target = {"Person" : "person", "Family" : "family", "Event" : "event", "Place" : "place", "Source" : "source", "Citation" : "citation", "Repository" : "repository", "Media" : "media", "Note" : "note"}[str(match.group('object_class'))] if match.group('handle') in self.import_handles: if target in self.import_handles[match.group('handle')]: val = "gramps://%s/handle/%s" % ( match.group('object_class'), self.import_handles[match.group('handle')] [target][HANDLE]) tagvalue = StyledTextTagType.STYLE_TYPE[int(tagtype)](val) except KeyError: tagvalue = None except ValueError: return self.note_tags.append(StyledTextTag(tagtype, tagvalue)) def start_tag(self, attrs): """ Tag definition. """ if self.note is not None: # Styled text tag in notes (prior to v1.4.0) self.start_style(attrs) return # Tag defintion self.tag = Tag() self.inaugurate(attrs['handle'], "tag", self.tag) self.tag.change = int(attrs.get('change', self.change)) self.info.add('new-object', TAG_KEY, self.tag) self.tag.set_name(attrs.get('name', _('Unknown when imported'))) self.tag.set_color(attrs.get('color', '#000000000000')) self.tag.set_priority(int(attrs.get('priority', 0))) return self.tag def stop_tag(self, *tag): if self.note is not None: # Styled text tag in notes (prior to v1.4.0) return self.db.commit_tag(self.tag, self.trans, self.tag.get_change_time()) self.tag = None def start_tagref(self, attrs): """ Tag reference in a primary object. """ handle = self.inaugurate(attrs['hlink'], "tag", Tag) if self.person: self.person.add_tag(handle) if self.family: self.family.add_tag(handle) if self.object: self.object.add_tag(handle) if self.note: self.note.add_tag(handle) if self.event: self.event.add_tag(handle) if self.placeobj: self.placeobj.add_tag(handle) if self.repo: self.repo.add_tag(handle) if self.source: self.source.add_tag(handle) if self.citation: self.citation.add_tag(handle) def start_range(self, attrs): self.note_tags[-1].ranges.append((int(attrs['start']), int(attrs['end']))) def start_note(self, attrs): """ Add a note to db if it doesn't exist yet and assign id, privacy, changetime, format and type. """ self.in_note = 0 if 'handle' in attrs: # This is new note, with ID and handle already existing self.update(self.p.CurrentLineNumber) self.note = Note() if 'handle' in attrs: orig_handle = attrs['handle'].replace('_', '') is_merge_candidate = (self.replace_import_handle and self.db.has_note_handle(orig_handle)) self.inaugurate(orig_handle, "note", self.note) gramps_id = self.legalize_id(attrs.get('id'), NOTE_KEY, self.nidswap, self.db.nid2user_format, self.db.find_next_note_gramps_id, self.db.has_note_gramps_id) self.note.set_gramps_id(gramps_id) if is_merge_candidate: orig_note = self.db.get_note_from_handle(orig_handle) self.info.add('merge-candicate', NOTE_KEY, orig_note, self.note) else: self.inaugurate_id(attrs.get('id'), NOTE_KEY, self.note) self.note.private = bool(attrs.get("priv")) self.note.change = int(attrs.get('change', self.change)) self.info.add('new-object', NOTE_KEY, self.note) self.note.format = int(attrs.get('format', Note.FLOWED)) self.note.type.set_from_xml_str(attrs.get('type', NoteType.UNKNOWN)) self.convert_marker(attrs, self.note) # Since StyledText was introduced (XML v1.3.0) the clear text # part of the note is moved between <text></text> tags. # To catch the different versions here we reset the note_text # variable. It will be checked in stop_note() then. self.note_text = None self.note_tags = [] else: # Gramps LEGACY: old notes that were written inside other objects # We need to create a top-level note, it's type depends on # the caller object, and inherits privacy from caller object # On stop_note the reference to this note will be added self.note = Note() self.note.handle = create_id() self.note.format = int(attrs.get('format', Note.FLOWED)) # The order in this long if-then statement should reflect the # DTD: most deeply nested elements come first. if self.citation: self.note.type.set(NoteType.CITATION) self.note.private = self.citation.private elif self.address: self.note.type.set(NoteType.ADDRESS) self.note.private = self.address.private elif self.ord: self.note.type.set(NoteType.LDS) self.note.private = self.ord.private elif self.attribute: self.note.type.set(NoteType.ATTRIBUTE) self.note.private = self.attribute.private elif self.object: self.note.type.set(NoteType.MEDIA) self.note.private = self.object.private elif self.objref: self.note.type.set(NoteType.MEDIAREF) self.note.private = self.objref.private elif self.photo: self.note.type.set(NoteType.MEDIA) self.note.private = self.photo.private elif self.name: self.note.type.set(NoteType.PERSONNAME) self.note.private = self.name.private elif self.eventref: self.note.type.set(NoteType.EVENTREF) self.note.private = self.eventref.private elif self.reporef: self.note.type.set(NoteType.REPOREF) self.note.private = self.reporef.private elif self.source: self.note.type.set(NoteType.SOURCE) self.note.private = self.source.private elif self.event: self.note.type.set(NoteType.EVENT) self.note.private = self.event.private elif self.personref: self.note.type.set(NoteType.ASSOCIATION) self.note.private = self.personref.private elif self.person: self.note.type.set(NoteType.PERSON) self.note.private = self.person.private elif self.childref: self.note.type.set(NoteType.CHILDREF) self.note.private = self.childref.private elif self.family: self.note.type.set(NoteType.FAMILY) self.note.private = self.family.private elif self.placeobj: self.note.type.set(NoteType.PLACE) self.note.private = self.placeobj.private elif self.repo: self.note.type.set(NoteType.REPO) self.note.private = self.repo.private self.db.add_note(self.note, self.trans) #set correct change time self.db.commit_note(self.note, self.trans, self.change) self.info.add('new-object', NOTE_KEY, self.note) if self.default_tag: self.note.add_tag(self.default_tag.handle) return self.note def start_noteref(self, attrs): """ Add a note reference to the object currently processed. """ if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "note", Note) else: raise GrampsImportError(_("The Gramps Xml you are trying to " "import is malformed."), _("Any note reference must have a " "'hlink' attribute.")) # The order in this long if-then statement should reflect the # DTD: most deeply nested elements come first. if self.citation: self.citation.add_note(handle) elif self.address: self.address.add_note(handle) elif self.ord: self.ord.add_note(handle) elif self.attribute: self.attribute.add_note(handle) elif self.object: self.object.add_note(handle) elif self.objref: self.objref.add_note(handle) elif self.photo: self.photo.add_note(handle) elif self.name: self.name.add_note(handle) elif self.eventref: self.eventref.add_note(handle) elif self.reporef: self.reporef.add_note(handle) elif self.source: self.source.add_note(handle) elif self.event: self.event.add_note(handle) elif self.personref: self.personref.add_note(handle) elif self.person: self.person.add_note(handle) elif self.childref: self.childref.add_note(handle) elif self.family: self.family.add_note(handle) elif self.placeobj: self.placeobj.add_note(handle) elif self.repo: self.repo.add_note(handle) def __add_citation(self, citation_handle): """ Add a citation to the object currently processed. """ if self.photo: self.photo.add_citation(citation_handle) elif self.ord: self.ord.add_citation(citation_handle) elif self.attribute: self.attribute.add_citation(citation_handle) elif self.object: self.object.add_citation(citation_handle) elif self.objref: self.objref.add_citation(citation_handle) elif self.event: self.event.add_citation(citation_handle) elif self.address: self.address.add_citation(citation_handle) elif self.name: self.name.add_citation(citation_handle) elif self.placeobj: self.placeobj.add_citation(citation_handle) elif self.childref: self.childref.add_citation(citation_handle) elif self.family: self.family.add_citation(citation_handle) elif self.personref: self.personref.add_citation(citation_handle) elif self.person: self.person.add_citation(citation_handle) def start_citationref(self, attrs): """ Add a citation reference to the object currently processed. """ handle = self.inaugurate(attrs['hlink'], "citation", Citation) self.__add_citation(handle) def start_citation(self, attrs): """ Add a citation object to db if it doesn't exist yet and assign id, privacy and changetime. """ self.update(self.p.CurrentLineNumber) self.citation = Citation() orig_handle = attrs['handle'].replace('_', '') is_merge_candidate = (self.replace_import_handle and self.db.has_citation_handle(orig_handle)) self.inaugurate(orig_handle, "citation", self.citation) gramps_id = self.legalize_id(attrs.get('id'), CITATION_KEY, self.cidswap, self.db.cid2user_format, self.db.find_next_citation_gramps_id, self.db.has_citation_gramps_id) self.citation.set_gramps_id(gramps_id) if is_merge_candidate: orig_citation = self.db.get_citation_from_handle(orig_handle) self.info.add('merge-candidate', CITATION_KEY, orig_citation, self.citation) self.citation.private = bool(attrs.get("priv")) self.citation.change = int(attrs.get('change', self.change)) self.citation.confidence = ( self.conf if self.__xml_version >= (1, 5, 1) else 0 ) # See bug# 7125 self.info.add('new-object', CITATION_KEY, self.citation) if self.default_tag: self.citation.add_tag(self.default_tag.handle) return self.citation def start_sourceref(self, attrs): """ Add a source reference to the object currently processed. """ if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "source", Source) else: handle = self.inaugurate_id(attrs.get('ref'), SOURCE_KEY, Source) if self.citation: self.citation.set_reference_handle(handle) else: # Gramps LEGACY: Prior to v1.5.0 there were no citation objects. # We need to copy the contents of the old SourceRef into a new # Citation object. self.in_old_sourceref = True self.citation = Citation() self.citation.set_reference_handle(handle) self.citation.confidence = int(attrs.get("conf", self.conf)) self.citation.private = bool(attrs.get("priv")) citation_handle = self.db.add_citation(self.citation, self.trans) self.__add_citation(citation_handle) def start_source(self, attrs): """ Add a source object to db if it doesn't exist yet and assign id, privacy and changetime. """ self.update(self.p.CurrentLineNumber) self.source = Source() if 'handle' in attrs: orig_handle = attrs['handle'].replace('_', '') is_merge_candidate = (self.replace_import_handle and self.db.has_source_handle(orig_handle)) self.inaugurate(orig_handle, "source", self.source) gramps_id = self.legalize_id(attrs.get('id'), SOURCE_KEY, self.sidswap, self.db.sid2user_format, self.db.find_next_source_gramps_id, self.db.has_source_gramps_id) self.source.set_gramps_id(gramps_id) if is_merge_candidate: orig_source = self.db.get_source_from_handle(orig_handle) self.info.add('merge-candidate', SOURCE_KEY, orig_source, self.source) else: # old style XML self.inaugurate_id(attrs.get('id'), SOURCE_KEY, self.source) self.source.private = bool(attrs.get("priv")) self.source.change = int(attrs.get('change', self.change)) self.info.add('new-object', SOURCE_KEY, self.source) if self.default_tag: self.source.add_tag(self.default_tag.handle) return self.source def start_reporef(self, attrs): """ Add a repository reference to the source currently processed. """ self.reporef = RepoRef() if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "repository", Repository) else: # old style XML handle = self.inaugurate_id(attrs.get('ref'), REPOSITORY_KEY, Repository) self.reporef.ref = handle self.reporef.call_number = attrs.get('callno', '') if 'medium' in attrs: self.reporef.media_type.set_from_xml_str(attrs['medium']) self.reporef.private = bool(attrs.get("priv")) # we count here on self.source being available # reporefs can only be found within source self.source.add_repo_reference(self.reporef) def start_objref(self, attrs): """ Add a media object reference to the object currently processed. """ self.objref = MediaRef() if 'hlink' in attrs: handle = self.inaugurate(attrs['hlink'], "media", Media) else: # old style XML handle = self.inaugurate_id(attrs.get('ref'), MEDIA_KEY, Media) self.objref.ref = handle self.objref.private = bool(attrs.get('priv')) if self.event: self.event.add_media_reference(self.objref) elif self.family: self.family.add_media_reference(self.objref) elif self.source: self.source.add_media_reference(self.objref) elif self.person: self.person.add_media_reference(self.objref) elif self.placeobj: self.placeobj.add_media_reference(self.objref) elif self.citation: self.citation.add_media_reference(self.objref) def start_region(self, attrs): rect = (int(attrs.get('corner1_x')), int(attrs.get('corner1_y')), int(attrs.get('corner2_x')), int(attrs.get('corner2_y')) ) self.objref.set_rectangle(rect) def start_media(self, attrs): """ Add a media object to db if it doesn't exist yet and assign id, privacy and changetime. """ self.object = Media() if 'handle' in attrs: orig_handle = attrs['handle'].replace('_', '') is_merge_candidate = (self.replace_import_handle and self.db.has_media_handle(orig_handle)) self.inaugurate(orig_handle, "media", self.object) gramps_id = self.legalize_id(attrs.get('id'), MEDIA_KEY, self.oidswap, self.db.oid2user_format, self.db.find_next_media_gramps_id, self.db.has_media_gramps_id) self.object.set_gramps_id(gramps_id) if is_merge_candidate: orig_media = self.db.get_media_from_handle(orig_handle) self.info.add('merge-candidate', MEDIA_KEY, orig_media, self.object) else: self.inaugurate_id(attrs.get('id'), MEDIA_KEY, self.object) self.object.private = bool(attrs.get("priv")) self.object.change = int(attrs.get('change', self.change)) self.info.add('new-object', MEDIA_KEY, self.object) # Gramps LEGACY: src, mime, and description attributes # now belong to the <file> tag. Here we are supporting # the old format of <object src="blah"...> self.object.mime = attrs.get('mime', '') self.object.desc = attrs.get('description', '') src = attrs.get("src", '') if src: self.object.path = src if self.default_tag: self.object.add_tag(self.default_tag.handle) return self.object def start_repo(self, attrs): """ Add a repository to db if it doesn't exist yet and assign id, privacy and changetime. """ self.repo = Repository() if 'handle' in attrs: orig_handle = attrs['handle'].replace('_', '') is_merge_candidate = (self.replace_import_handle and self.db.has_repository_handle(orig_handle)) self.inaugurate(orig_handle, "repository", self.repo) gramps_id = self.legalize_id(attrs.get('id'), REPOSITORY_KEY, self.ridswap, self.db.rid2user_format, self.db.find_next_repository_gramps_id, self.db.has_repository_gramps_id) self.repo.set_gramps_id(gramps_id) if is_merge_candidate: orig_repo = self.db.get_repository_from_handle(orig_handle) self.info.add('merge-candidate', REPOSITORY_KEY, orig_repo, self.repo) else: # old style XML self.inaugurate_id(attrs.get('id'), REPOSITORY_KEY, self.repo) self.repo.private = bool(attrs.get("priv")) self.repo.change = int(attrs.get('change', self.change)) self.info.add('new-object', REPOSITORY_KEY, self.repo) if self.default_tag: self.repo.add_tag(self.default_tag.handle) return self.repo def stop_people(self, *tag): pass def stop_database(self, *tag): self.update(self.p.CurrentLineNumber) def stop_media(self, *tag): self.db.commit_media(self.object, self.trans, self.object.get_change_time()) self.object = None def stop_objref(self, *tag): self.objref = None def stop_repo(self, *tag): self.db.commit_repository(self.repo, self.trans, self.repo.get_change_time()) self.repo = None def stop_reporef(self, *tag): self.reporef = None def start_photo(self, attrs): self.photo = Media() self.pref = MediaRef() self.pref.set_reference_handle(self.photo.get_handle()) for key in list(attrs.keys()): if key == "descrip" or key == "description": self.photo.set_description(attrs[key]) elif key == "priv": self.pref.set_privacy(int(attrs[key])) self.photo.set_privacy(int(attrs[key])) elif key == "src": src = attrs["src"] self.photo.set_path(src) else: attr = Attribute() attr.set_type(key) attr.set_value(attrs[key]) self.photo.add_attribute(attr) self.photo.set_mime_type(get_type(self.photo.get_path())) self.db.add_media(self.photo, self.trans) #set correct change time self.db.commit_media(self.photo, self.trans, self.change) self.info.add('new-object', MEDIA_KEY, self.photo) if self.family: self.family.add_media_reference(self.pref) elif self.source: self.source.add_media_reference(self.pref) elif self.person: self.person.add_media_reference(self.pref) elif self.placeobj: self.placeobj.add_media_reference(self.pref) def start_daterange(self, attrs): self.start_compound_date(attrs, Date.MOD_RANGE) def start_datespan(self, attrs): self.start_compound_date(attrs, Date.MOD_SPAN) def start_compound_date(self, attrs, mode): if self.citation: date_value = self.citation.get_date_object() elif self.ord: date_value = self.ord.get_date_object() elif self.object: date_value = self.object.get_date_object() elif self.address: date_value = self.address.get_date_object() elif self.name: date_value = self.name.get_date_object() elif self.event: date_value = self.event.get_date_object() elif self.placeref: date_value = self.placeref.get_date_object() elif self.place_name: date_value = self.place_name.get_date_object() start = attrs['start'].split('-') stop = attrs['stop'].split('-') try: year = int(start[0]) except ValueError: year = 0 try: month = int(start[1]) except: month = 0 try: day = int(start[2]) except: day = 0 try: rng_year = int(stop[0]) except: rng_year = 0 try: rng_month = int(stop[1]) except: rng_month = 0 try: rng_day = int(stop[2]) except: rng_day = 0 if "cformat" in attrs: cal = Date.calendar_names.index(attrs['cformat']) else: cal = Date.CAL_GREGORIAN if 'quality' in attrs: val = attrs['quality'] if val == 'estimated': qual = Date.QUAL_ESTIMATED elif val == 'calculated': qual = Date.QUAL_CALCULATED else: qual = Date.QUAL_NONE else: qual = Date.QUAL_NONE dualdated = False if 'dualdated' in attrs: val = attrs['dualdated'] if val == "1": dualdated = True newyear = Date.NEWYEAR_JAN1 if 'newyear' in attrs: newyear = attrs['newyear'] if newyear.isdigit(): newyear = int(newyear) else: newyear = Date.newyear_to_code(newyear) try: date_value.set(qual, mode, cal, (day, month, year, dualdated, rng_day, rng_month, rng_year, dualdated), newyear=newyear) except DateError as e: self._set_date_to_xml_text(date_value, e, xml_element_name = ("datespan" if mode == Date.MOD_SPAN else "daterange"), xml_attrs = attrs) def start_dateval(self, attrs): if self.citation: date_value = self.citation.get_date_object() elif self.ord: date_value = self.ord.get_date_object() elif self.object: date_value = self.object.get_date_object() elif self.address: date_value = self.address.get_date_object() elif self.name: date_value = self.name.get_date_object() elif self.event: date_value = self.event.get_date_object() elif self.placeref: date_value = self.placeref.get_date_object() elif self.place_name: date_value = self.place_name.get_date_object() bce = 1 val = attrs['val'] if val[0] == '-': bce = -1 val = val[1:] start = val.split('-') try: year = int(start[0])*bce except: year = 0 try: month = int(start[1]) except: month = 0 try: day = int(start[2]) except: day = 0 if "cformat" in attrs: cal = Date.calendar_names.index(attrs['cformat']) else: cal = Date.CAL_GREGORIAN if 'type' in attrs: val = attrs['type'] if val == "about": mod = Date.MOD_ABOUT elif val == "after": mod = Date.MOD_AFTER else: mod = Date.MOD_BEFORE else: mod = Date.MOD_NONE if 'quality' in attrs: val = attrs['quality'] if val == 'estimated': qual = Date.QUAL_ESTIMATED elif val == 'calculated': qual = Date.QUAL_CALCULATED else: qual = Date.QUAL_NONE else: qual = Date.QUAL_NONE dualdated = False if 'dualdated' in attrs: val = attrs['dualdated'] if val == "1": dualdated = True newyear = Date.NEWYEAR_JAN1 if 'newyear' in attrs: newyear = attrs['newyear'] if newyear.isdigit(): newyear = int(newyear) else: newyear = Date.newyear_to_code(newyear) try: date_value.set(qual, mod, cal, (day, month, year, dualdated), newyear=newyear) except DateError as e: self._set_date_to_xml_text(date_value, e, 'dateval', attrs) def _set_date_to_xml_text(self, date_value, date_error, xml_element_name, xml_attrs): """ Common handling of invalid dates for the date... element handlers. Prints warning on console and sets date_value to a text-only date with the problematic XML inside. """ xml = "<{element_name} {attrs}/>".format( element_name = xml_element_name, attrs = " ".join( ['{}="{}"'.format(k,escape(v, entities={'"' : "&quot;"})) for k,v in xml_attrs.items()])) # Translators: leave the {date} and {xml} untranslated in the format string, # but you may re-order them if needed. LOG.warning(_("Invalid date {date} in XML {xml}, preserving XML as text" ).format(date=date_error.date.__dict__, xml=xml)) date_value.set(modifier=Date.MOD_TEXTONLY, text=xml) def start_datestr(self, attrs): if self.citation: date_value = self.citation.get_date_object() elif self.ord: date_value = self.ord.get_date_object() elif self.object: date_value = self.object.get_date_object() elif self.address: date_value = self.address.get_date_object() elif self.name: date_value = self.name.get_date_object() elif self.event: date_value = self.event.get_date_object() elif self.placeref: date_value = self.placeref.get_date_object() else: date_value = self.place_name.get_date_object() date_value.set_as_text(attrs['val']) def start_pos(self, attrs): self.person.position = (int(attrs["x"]), int(attrs["y"])) def stop_attribute(self, *tag): self.attribute = None def stop_srcattribute(self, *tag): self.srcattribute = None def stop_comment(self, tag): # Parse witnesses created by older gramps if tag.strip(): self.witness_comment = tag else: self.witness_comment = "" def stop_witness(self, tag): # Parse witnesses created by older gramps if self.witness_comment: text = self.witness_comment elif tag.strip(): text = tag else: text = None if text is not None: note = Note() note.handle = create_id() note.set(_("Witness comment: %s") % text) note.type.set(NoteType.EVENT) note.private = self.event.private self.db.add_note(note, self.trans) #set correct change time self.db.commit_note(note, self.trans, self.change) self.info.add('new-object', NOTE_KEY, note) self.event.add_note(note.handle) self.in_witness = False def stop_attr_type(self, tag): self.attribute.set_type(tag) def stop_attr_value(self, tag): self.attribute.set_value(tag) def stop_address(self, *tag): if self.person: self.person.add_address(self.address) elif self.repo: self.repo.add_address(self.address) self.address = None def stop_places(self, *tag): self.placeobj = None if self.__xml_version < (1, 6, 0): self.place_import.generate_hierarchy(self.trans) def stop_photo(self, *tag): self.photo = None def stop_ptitle(self, tag): self.placeobj.title = tag def stop_code(self, tag): self.placeobj.code = tag def stop_alt_name(self, tag): place_name = PlaceName() place_name.set_value(tag) self.placeobj.add_alternative_name(place_name) def stop_placeobj(self, *tag): if self.placeobj.name.get_value() == '': self.placeobj.name.set_value(self.placeobj.title) self.db.commit_place(self.placeobj, self.trans, self.placeobj.get_change_time()) self.placeobj = None def stop_family(self, *tag): self.db.commit_family(self.family, self.trans, self.family.get_change_time()) self.family = None def stop_type(self, tag): if self.event: # Event type self.event.type.set_from_xml_str(tag) elif self.repo: # Repository type self.repo.type.set_from_xml_str(tag) def stop_childref(self, tag): self.childref = None def stop_personref(self, tag): self.personref = None def stop_eventref(self, tag): self.eventref = None def stop_placeref(self, tag): self.placeref = None def stop_event(self, *tag): if self.family: ref = EventRef() ref.ref = self.event.handle ref.private = self.event.private ref.role.set(EventRoleType.FAMILY) self.family.add_event_ref(ref) elif self.person: ref = EventRef() ref.ref = self.event.handle ref.private = self.event.private ref.role.set(EventRoleType.PRIMARY) if (self.event.type == EventType.BIRTH) \ and (self.person.get_birth_ref() is None): self.person.set_birth_ref(ref) elif (self.event.type == EventType.DEATH) \ and (self.person.get_death_ref() is None): self.person.set_death_ref(ref) else: self.person.add_event_ref(ref) if self.event.get_description() == "" and \ self.event.get_type() != EventType.CUSTOM: if self.family: text = EVENT_FAMILY_STR % { 'event_name' : str(self.event.get_type()), 'family' : family_name(self.family, self.db), } elif self.person: text = EVENT_PERSON_STR % { 'event_name' : str(self.event.get_type()), 'person' : name_displayer.display(self.person), } else: text = '' self.event.set_description(text) self.db.commit_event(self.event, self.trans, self.event.get_change_time()) self.event = None def stop_name(self, attrs): if self.person: self.stop_person_name(attrs) if self.placeobj: # XML 1.7.0 self.stop_place_name(attrs) def stop_place_name(self, tag): self.place_name = None def stop_person_name(self, tag): if self.in_witness: # Parse witnesses created by older gramps note = Note() note.handle = create_id() note.set(_("Witness name: %s") % tag) note.type.set(NoteType.EVENT) note.private = self.event.private self.db.add_note(note, self.trans) #set correct change time self.db.commit_note(note, self.trans, self.change) self.info.add('new-object', NOTE_KEY, note) self.event.add_note(note.handle) else: #first correct old xml that has no nametype set if self.alt_name: # alternate name or former aka tag if self.name.get_type() == "": self.name.set_type(NameType.AKA) else: if self.name.get_type() == "": self.name.set_type(NameType.BIRTH) #same logic as bsddb upgrade for xml < 1.4.0 which will #have a surnamepat and/or surname. From 1.4.0 surname has been #added to name in self.stop_surname if not self.surnamepat: #no patronymic, only add surname if present if self.surname: self.name.add_surname(self.surname) self.name.set_primary_surname(0) else: #a patronymic, if no surname, a single surname if not self.surname: self.name.add_surname(self.surnamepat) self.name.set_primary_surname(0) else: #two surnames, first patronymic, then surname which is primary self.name.add_surname(self.surnamepat) self.name.add_surname(self.surname) self.name.set_primary_surname(1) if self.alt_name: self.person.add_alternate_name(self.name) else: self.person.set_primary_name(self.name) self.name = None self.surname = None self.surnamepat = None def stop_aka(self, tag): if self.name.get_type() == "": self.name.set_type(NameType.AKA) if not self.surnamepat: #no patronymic, only add surname if present if self.surname: self.name.add_surname(self.surname) self.name.set_primary_surname(0) else: #a patronymic, if no surname, a single surname if not self.surname: self.name.add_surname(self.surnamepat) self.name.set_primary_surname(0) else: #two surnames, first patronymic, then surname which is primary self.name.add_surname(self.surnamepat) self.name.add_surname(self.surname) self.name.set_primary_surname(1) self.person.add_alternate_name(self.name) self.name = None def stop_rname(self, tag): # Repository name self.repo.name = tag def stop_ref(self, tag): """ Parse witnesses created by older gramps """ person = Person() self.inaugurate_id(tag, PERSON_KEY, person) # Add an EventRef from that person # to this event using ROLE_WITNESS role event_ref = EventRef() event_ref.ref = self.event.handle event_ref.role.set(EventRoleType.WITNESS) person.event_ref_list.append(event_ref) self.db.commit_person(person, self.trans, self.change) def stop_place(self, tag): """end of a reference to place, should do nothing ... Note, if we encounter <place>blabla</place> this method is called with tag='blabla """ ##place = None ##handle = None ##if self.place_ref is None: #todo, add place_ref in start and init ## #legacy cody? I see no reason for this, but it was present ## if tag in self.place_map: ## place = self.place_map[tag] ## handle = place.get_handle() ## place = None ## else: ## place = RelLib.Place() ## place.set_title(tag) ## handle = place.get_handle() ## if self.ord: ## self.ord.set_place_handle(handle) ## elif self.object: ## self.object.set_place_handle(handle) ## else: ## self.event.set_place_handle(handle) ## if place : ## self.db.commit_place(self.placeobj,self.trans,self.change) ##self.place_ref = None pass def stop_date(self, tag): if tag: if self.address: set_date(self.address, tag) else: set_date(self.event, tag) def stop_first(self, tag): # bug 9242 if len(tag.splitlines()) != 1: tag = "".join(tag.splitlines()) self.name.set_first_name(tag) def stop_call(self, tag): self.name.set_call_name(tag) def stop_families(self, *tag): self.family = None def stop_person(self, *tag): self.db.commit_person(self.person, self.trans, self.person.get_change_time()) self.person = None def stop_description(self, tag): self.event.set_description(tag) def stop_cause(self, tag): # The old event's cause is now an attribute attr = Attribute() attr.set_type(AttributeType.CAUSE) attr.set_value(tag) self.event.add_attribute(attr) def stop_gender(self, tag): t = tag if t == "M": self.person.set_gender (Person.MALE) elif t == "F": self.person.set_gender (Person.FEMALE) else: self.person.set_gender (Person.UNKNOWN) def stop_stitle(self, tag): self.source.title = tag def stop_sourceref(self, *tag): # if we are in an old style sourceref we need to commit the citation if self.in_old_sourceref: self.db.commit_citation(self.citation, self.trans, self.citation.get_change_time()) self.citation = None self.in_old_sourceref = False def stop_source(self, *tag): self.db.commit_source(self.source, self.trans, self.source.get_change_time()) self.source = None def stop_citation(self, *tag): self.db.commit_citation(self.citation, self.trans, self.citation.get_change_time()) self.citation = None def stop_sauthor(self, tag): self.source.author = tag def stop_phone(self, tag): self.address.phone = tag def stop_street(self, tag): self.address.street = tag def stop_locality(self, tag): self.address.locality = tag def stop_city(self, tag): self.address.city = tag def stop_county(self, tag): self.address.county = tag def stop_state(self, tag): self.address.state = tag def stop_country(self, tag): self.address.country = tag def stop_postal(self, tag): self.address.set_postal_code(tag) def stop_spage(self, tag): # Valid for version <= 1.4.0 self.citation.set_page(tag) def stop_page(self, tag): # Valid for version >= 1.5.0 self.citation.set_page(tag) def stop_confidence(self, tag): # Valid for version >= 1.5.0 self.citation.set_confidence_level(int(tag)) def stop_lds_ord(self, *tag): self.ord = None def stop_spubinfo(self, tag): self.source.set_publication_info(tag) def stop_sabbrev(self, tag): self.source.set_abbreviation(tag) def stop_stext(self, tag): if self.use_p: self.use_p = 0 text = fix_spaces(self.stext_list) else: text = tag # This is old XML. We no longer have "text" attribute in soure_ref. # So we create a new note, commit, and add the handle to note list. note = Note() note.handle = create_id() note.private = self.citation.private note.set(text) note.type.set(NoteType.SOURCE_TEXT) self.db.add_note(note, self.trans) #set correct change time self.db.commit_note(note, self.trans, self.change) self.info.add('new-object', NOTE_KEY, note) self.citation.add_note(note.handle) def stop_scomments(self, tag): if self.use_p: self.use_p = 0 text = fix_spaces(self.scomments_list) else: text = tag note = Note() note.handle = create_id() note.private = self.citation.private note.set(text) note.type.set(NoteType.CITATION) self.db.add_note(note, self.trans) #set correct change time self.db.commit_note(note, self.trans, self.change) self.info.add('new-object', NOTE_KEY, note) self.citation.add_note(note.handle) def stop_last(self, tag): if self.surname: self.surname.set_surname(tag) if not tag.strip() and not self.surname.get_prefix().strip(): #consider empty surname as no surname self.surname = None def stop_surname(self, tag): """Add surname to name, validating only one primary.""" if self.name: self.surname.set_surname(tag) if any(sname.get_primary() for sname in self.name.get_surname_list()): self.surname.set_primary(False) self.name.add_surname(self.surname) self.surname = None def stop_group(self, tag): """ group name of a name""" if self.name: self.name.set_group_as(tag) def stop_suffix(self, tag): if self.name: self.name.set_suffix(tag) def stop_patronymic(self, tag): if self.surnamepat: self.surnamepat.set_surname(tag) if not tag.strip(): self.surnamepat = None def stop_title(self, tag): if self.name: self.name.set_title(tag) def stop_nick(self, tag): """in < 1.3.0 nick is on person and mapped to attribute from 1.4.0 it is a name element """ if self.name: self.name.set_nick_name(tag) elif self.person: attr = Attribute() attr.set_type(AttributeType.NICKNAME) attr.set_value(tag) self.person.add_attribute(attr) def stop_familynick(self, tag): if self.name: self.name.set_family_nick_name(tag) def stop_text(self, tag): self.note_text = tag def stop_note(self, tag): self.in_note = 0 if self.use_p: self.use_p = 0 text = fix_spaces(self.note_list) elif self.note_text is not None: text = self.note_text else: text = tag self.note.set_styledtext(StyledText(text, self.note_tags)) # The order in this long if-then statement should reflect the # DTD: most deeply nested elements come first. if self.address: self.address.add_note(self.note.handle) elif self.ord: self.ord.add_note(self.note.handle) elif self.attribute: self.attribute.add_note(self.note.handle) elif self.object: self.object.add_note(self.note.handle) elif self.objref: self.objref.add_note(self.note.handle) elif self.photo: self.photo.add_note(self.note.handle) elif self.name: self.name.add_note(self.note.handle) elif self.eventref: self.eventref.add_note(self.note.handle) elif self.reporef: self.reporef.add_note(self.note.handle) elif self.source: self.source.add_note(self.note.handle) elif self.event: self.event.add_note(self.note.handle) elif self.personref: self.personref.add_note(self.note.handle) elif self.person: self.person.add_note(self.note.handle) elif self.childref: self.childref.add_note(self.note.handle) elif self.family: self.family.add_note(self.note.handle) elif self.placeobj: self.placeobj.add_note(self.note.handle) elif self.repo: self.repo.add_note(self.note.handle) self.db.commit_note(self.note, self.trans, self.note.get_change_time()) self.note = None def stop_note_asothers(self, *tag): self.db.commit_note(self.note, self.trans, self.note.get_change_time()) self.note = None def stop_research(self, tag): self.owner.set_name(self.resname) self.owner.set_address(self.resaddr) self.owner.set_locality(self.reslocality) self.owner.set_city(self.rescity) self.owner.set_state(self.resstate) self.owner.set_country(self.rescon) self.owner.set_postal_code(self.respos) self.owner.set_phone(self.resphone) self.owner.set_email(self.resemail) def stop_resname(self, tag): self.resname = tag def stop_resaddr(self, tag): self.resaddr = tag def stop_reslocality(self, tag): self.reslocality = tag def stop_rescity(self, tag): self.rescity = tag def stop_resstate(self, tag): self.resstate = tag def stop_rescountry(self, tag): self.rescon = tag def stop_respostal(self, tag): self.respos = tag def stop_resphone(self, tag): self.resphone = tag def stop_resemail(self, tag): self.resemail = tag def stop_mediapath(self, tag): self.mediapath = tag def stop_ptag(self, tag): self.use_p = 1 if self.in_note: self.note_list.append(tag) elif self.in_stext: self.stext_list.append(tag) elif self.in_scomments: self.scomments_list.append(tag) def startElement(self, tag, attrs): self.func_list[self.func_index] = (self.func, self.tlist) self.func_index += 1 self.tlist = [] try: f, self.func = self.func_map[tag] if f: f(attrs) except KeyError: self.func_map[tag] = (None, None) self.func = None def endElement(self, tag): if self.func: self.func(''.join(self.tlist)) self.func_index -= 1 self.func, self.tlist = self.func_list[self.func_index] def characters(self, data): if self.func: self.tlist.append(data) def convert_marker(self, attrs, obj): """ Convert markers into tags. Old and new markers: complete=1 and marker=word """ if attrs.get('complete'): # this is only true for complete=1 tag_name = 'Complete' else: tag_name = attrs.get('marker') if tag_name is not None: tag_name = _(tag_name) tag = self.db.get_tag_from_name(tag_name) if tag is None: tag = Tag() tag.set_name(tag_name) tag.set_priority(self.db.get_number_of_tags()) tag_handle = self.db.add_tag(tag, self.trans) else: tag_handle = tag.get_handle() obj.add_tag(tag_handle) def fix_not_instantiated(self): uninstantiated = [] for orig_handle in self.import_handles.keys(): tglist = [target for target in self.import_handles[orig_handle].keys() if not self.import_handles[orig_handle][target][INSTANTIATED]] for target in tglist: uninstantiated += [(orig_handle, target)] if uninstantiated: expl_note = create_explanation_note(self.db) self.db.commit_note(expl_note, self.trans, time.time()) self.info.expl_note = expl_note.get_gramps_id() for orig_handle, target in uninstantiated: class_arg = {'handle': orig_handle, 'id': None, 'priv': False} if target == 'family': objs = make_unknown(class_arg, expl_note.handle, self.func_map[target][0], self.func_map[target][1], self.trans, db=self.db) elif target == 'citation': objs = make_unknown(class_arg, expl_note.handle, self.func_map[target][0], self.func_map[target][1], self.trans, source_class_func=self.func_map['source'][0], source_commit_func=self.func_map['source'][1], source_class_arg={'handle':create_id(), 'id':None, 'priv':False}) elif target == 'note': objs = make_unknown(class_arg, expl_note.handle, self.func_map[target][0], self.stop_note_asothers, self.trans) else: if target == 'place': target = 'placeobj' elif target == 'media': target = 'object' objs = make_unknown(class_arg, expl_note.handle, self.func_map[target][0], self.func_map[target][1], self.trans) for obj in objs: key = CLASS_TO_KEY_MAP[obj.__class__.__name__] self.info.add('unknown-object', key, obj) def fix_families(self): # Fix any imported families where there is a link from the family to an # individual, but no corresponding link from the individual to the # family. for orig_handle in list(self.import_handles.keys()): for target in list(self.import_handles[orig_handle].keys()): if target == 'family': family_handle = self.import_handles[orig_handle][target][HANDLE] family = self.db.get_family_from_handle(family_handle) father_handle = family.get_father_handle() mother_handle = family.get_mother_handle() if father_handle: father = self.db.get_person_from_handle(father_handle) if father and \ family_handle not in father.get_family_handle_list(): father.add_family_handle(family_handle) self.db.commit_person(father, self.trans) txt = _("Error: family '%(family)s'" " father '%(father)s'" " does not refer" " back to the family." " Reference added." % {'family' : family.gramps_id, 'father' : father.gramps_id}) self.info.add('unlinked-family', txt, None) LOG.warning(txt) if mother_handle: mother = self.db.get_person_from_handle(mother_handle) if mother and \ family_handle not in mother.get_family_handle_list(): mother.add_family_handle(family_handle) self.db.commit_person(mother, self.trans) txt = _("Error: family '%(family)s'" " mother '%(mother)s'" " does not refer" " back to the family." " Reference added." % {'family' : family.gramps_id, 'mother' : mother.gramps_id}) self.info.add('unlinked-family', txt, None) LOG.warning(txt) for child_ref in family.get_child_ref_list(): child_handle = child_ref.ref child = self.db.get_person_from_handle(child_handle) if child: if family_handle not in \ child.get_parent_family_handle_list(): # The referenced child has no reference to the # family. There was a link from the FAM record # to the child, but no FAMC link from the child # to the FAM. child.add_parent_family_handle(family_handle) self.db.commit_person(child, self.trans) txt = _("Error: family '%(family)s'" " child '%(child)s'" " does not " "refer back to the family. " "Reference added." % {'family' : family.gramps_id, 'child' : child.gramps_id}) self.info.add('unlinked-family', txt, None) LOG.warning(txt) def append_value(orig, val): if orig: return "%s, %s" % (orig, val) else: return val def build_place_title(loc): "Builds a title from a location" value = "" if loc.parish: value = loc.parish if loc.city: value = append_value(value, loc.city) if loc.county: value = append_value(value, loc.county) if loc.state: value = append_value(value, loc.state) if loc.country: value = append_value(value, loc.country) return value
gpl-2.0
EnEff-BIM/EnEffBIM-Framework
SimModel_Python_API/simmodel_swig/Release/SimGroup_HvacDemandGroup_WaterSystem.py
1
8891
# This file was automatically generated by SWIG (http://www.swig.org). # Version 3.0.7 # # Do not make changes to this file unless you know what you are doing--modify # the SWIG interface file instead. from sys import version_info if version_info >= (2, 6, 0): def swig_import_helper(): from os.path import dirname import imp fp = None try: fp, pathname, description = imp.find_module('_SimGroup_HvacDemandGroup_WaterSystem', [dirname(__file__)]) except ImportError: import _SimGroup_HvacDemandGroup_WaterSystem return _SimGroup_HvacDemandGroup_WaterSystem if fp is not None: try: _mod = imp.load_module('_SimGroup_HvacDemandGroup_WaterSystem', fp, pathname, description) finally: fp.close() return _mod _SimGroup_HvacDemandGroup_WaterSystem = swig_import_helper() del swig_import_helper else: import _SimGroup_HvacDemandGroup_WaterSystem del version_info try: _swig_property = property except NameError: pass # Python < 2.2 doesn't have 'property'. def _swig_setattr_nondynamic(self, class_type, name, value, static=1): if (name == "thisown"): return self.this.own(value) if (name == "this"): if type(value).__name__ == 'SwigPyObject': self.__dict__[name] = value return method = class_type.__swig_setmethods__.get(name, None) if method: return method(self, value) if (not static): if _newclass: object.__setattr__(self, name, value) else: self.__dict__[name] = value else: raise AttributeError("You cannot add attributes to %s" % self) def _swig_setattr(self, class_type, name, value): return _swig_setattr_nondynamic(self, class_type, name, value, 0) def _swig_getattr_nondynamic(self, class_type, name, static=1): if (name == "thisown"): return self.this.own() method = class_type.__swig_getmethods__.get(name, None) if method: return method(self) if (not static): return object.__getattr__(self, name) else: raise AttributeError(name) def _swig_getattr(self, class_type, name): return _swig_getattr_nondynamic(self, class_type, name, 0) def _swig_repr(self): try: strthis = "proxy of " + self.this.__repr__() except: strthis = "" return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) try: _object = object _newclass = 1 except AttributeError: class _object: pass _newclass = 0 try: import weakref weakref_proxy = weakref.proxy except: weakref_proxy = lambda x: x import base import SimGroup_BldgCompGroup_ZoneEquipment class SimGroup_HvacDemandGroup(base.SimGroup): __swig_setmethods__ = {} for _s in [base.SimGroup]: __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) __setattr__ = lambda self, name, value: _swig_setattr(self, SimGroup_HvacDemandGroup, name, value) __swig_getmethods__ = {} for _s in [base.SimGroup]: __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) __getattr__ = lambda self, name: _swig_getattr(self, SimGroup_HvacDemandGroup, name) __repr__ = _swig_repr def __init__(self, *args): this = _SimGroup_HvacDemandGroup_WaterSystem.new_SimGroup_HvacDemandGroup(*args) try: self.this.append(this) except: self.this = this def _clone(self, f=0, c=None): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup__clone(self, f, c) __swig_destroy__ = _SimGroup_HvacDemandGroup_WaterSystem.delete_SimGroup_HvacDemandGroup __del__ = lambda self: None SimGroup_HvacDemandGroup_swigregister = _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_swigregister SimGroup_HvacDemandGroup_swigregister(SimGroup_HvacDemandGroup) class SimGroup_HvacDemandGroup_WaterSystem(SimGroup_HvacDemandGroup): __swig_setmethods__ = {} for _s in [SimGroup_HvacDemandGroup]: __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) __setattr__ = lambda self, name, value: _swig_setattr(self, SimGroup_HvacDemandGroup_WaterSystem, name, value) __swig_getmethods__ = {} for _s in [SimGroup_HvacDemandGroup]: __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) __getattr__ = lambda self, name: _swig_getattr(self, SimGroup_HvacDemandGroup_WaterSystem, name) __repr__ = _swig_repr def __init__(self, *args): this = _SimGroup_HvacDemandGroup_WaterSystem.new_SimGroup_HvacDemandGroup_WaterSystem(*args) try: self.this.append(this) except: self.this = this def _clone(self, f=0, c=None): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem__clone(self, f, c) __swig_destroy__ = _SimGroup_HvacDemandGroup_WaterSystem.delete_SimGroup_HvacDemandGroup_WaterSystem __del__ = lambda self: None SimGroup_HvacDemandGroup_WaterSystem_swigregister = _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_swigregister SimGroup_HvacDemandGroup_WaterSystem_swigregister(SimGroup_HvacDemandGroup_WaterSystem) class SimGroup_HvacDemandGroup_WaterSystem_sequence(base.sequence_common): __swig_setmethods__ = {} for _s in [base.sequence_common]: __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) __setattr__ = lambda self, name, value: _swig_setattr(self, SimGroup_HvacDemandGroup_WaterSystem_sequence, name, value) __swig_getmethods__ = {} for _s in [base.sequence_common]: __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) __getattr__ = lambda self, name: _swig_getattr(self, SimGroup_HvacDemandGroup_WaterSystem_sequence, name) __repr__ = _swig_repr def __init__(self, *args): this = _SimGroup_HvacDemandGroup_WaterSystem.new_SimGroup_HvacDemandGroup_WaterSystem_sequence(*args) try: self.this.append(this) except: self.this = this def assign(self, n, x): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_assign(self, n, x) def begin(self, *args): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_begin(self, *args) def end(self, *args): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_end(self, *args) def rbegin(self, *args): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_rbegin(self, *args) def rend(self, *args): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_rend(self, *args) def at(self, *args): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_at(self, *args) def front(self, *args): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_front(self, *args) def back(self, *args): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_back(self, *args) def push_back(self, *args): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_push_back(self, *args) def pop_back(self): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_pop_back(self) def detach_back(self, pop=True): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_detach_back(self, pop) def insert(self, *args): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_insert(self, *args) def erase(self, *args): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_erase(self, *args) def detach(self, position, r, erase=True): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_detach(self, position, r, erase) def swap(self, x): return _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_swap(self, x) __swig_destroy__ = _SimGroup_HvacDemandGroup_WaterSystem.delete_SimGroup_HvacDemandGroup_WaterSystem_sequence __del__ = lambda self: None SimGroup_HvacDemandGroup_WaterSystem_sequence_swigregister = _SimGroup_HvacDemandGroup_WaterSystem.SimGroup_HvacDemandGroup_WaterSystem_sequence_swigregister SimGroup_HvacDemandGroup_WaterSystem_sequence_swigregister(SimGroup_HvacDemandGroup_WaterSystem_sequence) # This file is compatible with both classic and new-style classes.
mit
Prash88/my-wedding
node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
1824
3474
# Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """gypd output module This module produces gyp input as its output. Output files are given the .gypd extension to avoid overwriting the .gyp files that they are generated from. Internal references to .gyp files (such as those found in "dependencies" sections) are not adjusted to point to .gypd files instead; unlike other paths, which are relative to the .gyp or .gypd file, such paths are relative to the directory from which gyp was run to create the .gypd file. This generator module is intended to be a sample and a debugging aid, hence the "d" for "debug" in .gypd. It is useful to inspect the results of the various merges, expansions, and conditional evaluations performed by gyp and to see a representation of what would be fed to a generator module. It's not advisable to rename .gypd files produced by this module to .gyp, because they will have all merges, expansions, and evaluations already performed and the relevant constructs not present in the output; paths to dependencies may be wrong; and various sections that do not belong in .gyp files such as such as "included_files" and "*_excluded" will be present. Output will also be stripped of comments. This is not intended to be a general-purpose gyp pretty-printer; for that, you probably just want to run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip comments but won't do all of the other things done to this module's output. The specific formatting of the output generated by this module is subject to change. """ import gyp.common import errno import os import pprint # These variables should just be spit back out as variable references. _generator_identity_variables = [ 'CONFIGURATION_NAME', 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', 'INTERMEDIATE_DIR', 'LIB_DIR', 'PRODUCT_DIR', 'RULE_INPUT_ROOT', 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', 'RULE_INPUT_NAME', 'RULE_INPUT_PATH', 'SHARED_INTERMEDIATE_DIR', 'SHARED_LIB_DIR', 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', ] # gypd doesn't define a default value for OS like many other generator # modules. Specify "-D OS=whatever" on the command line to provide a value. generator_default_variables = { } # gypd supports multiple toolsets generator_supports_multiple_toolsets = True # TODO(mark): This always uses <, which isn't right. The input module should # notify the generator to tell it which phase it is operating in, and this # module should use < for the early phase and then switch to > for the late # phase. Bonus points for carrying @ back into the output too. for v in _generator_identity_variables: generator_default_variables[v] = '<(%s)' % v def GenerateOutput(target_list, target_dicts, data, params): output_files = {} for qualified_target in target_list: [input_file, target] = \ gyp.common.ParseQualifiedTarget(qualified_target)[0:2] if input_file[-4:] != '.gyp': continue input_file_stem = input_file[:-4] output_file = input_file_stem + params['options'].suffix + '.gypd' if not output_file in output_files: output_files[output_file] = input_file for output_file, input_file in output_files.iteritems(): output = open(output_file, 'w') pprint.pprint(data[input_file], output) output.close()
mit
ioam/svn-history
contrib/basic.py
1
38948
""" Simple two-dimensional mathematical or geometrical pattern generators. $Id$ """ from __future__ import with_statement __version__='$Revision: 10131 $' from math import pi, sin, cos, sqrt import numpy from numpy.oldnumeric import around,bitwise_and,sin,add,Float,bitwise_or from numpy import alltrue from .. import param from ..param.parameterized import ParamOverrides import topo # Imported here so that all PatternGenerators will be in the same package from topo.base.patterngenerator import Constant from topo.base.patterngenerator import PatternGenerator from topo.base.arrayutil import wrap from topo.misc.patternfn import gaussian,exponential,gabor,line,disk,ring from topo.misc.patternfn import arc_by_radian,arc_by_center,smooth_rectangle,float_error_ignore from topo.misc.patternfn import spiral,hyperbola,radial,concentricrings from topo import numbergen # Could add a Gradient class, where the brightness varies as a # function of an equation for a plane. This could be useful as a # background, or to see how sharp a gradient is needed to get a # response. # CEBALERT: do we need this? If so, please remove this question. class Null(Constant): """ A constant pattern of zero activity. """ scale = param.Number(default=0,constant=True,precedence=-1) class HalfPlane(PatternGenerator): """ Constant pattern on in half of the plane, and off in the rest, with optional Gaussian smoothing. """ smoothing = param.Number(default=0.02,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.61,doc="Width of the Gaussian fall-off.") def function(self,p): if p.smoothing==0.0: falloff=self.pattern_y*0.0 else: with float_error_ignore(): falloff=numpy.exp(numpy.divide(-self.pattern_y*self.pattern_y, 2*p.smoothing*p.smoothing)) return numpy.where(self.pattern_y>0.0,1.0,falloff) class Gaussian(PatternGenerator): """ 2D Gaussian pattern generator. The sigmas of the Gaussian are calculated from the size and aspect_ratio parameters: ysigma=size/2 xsigma=ysigma*aspect_ratio The Gaussian is then computed for the given (x,y) values as:: exp(-x^2/(2*xsigma^2) - y^2/(2*ysigma^2) """ aspect_ratio = param.Number(default=1/0.31,bounds=(0.0,None),softbounds=(0.0,6.0), precedence=0.31,doc=""" Ratio of the width to the height. Specifically, xsigma=ysigma*aspect_ratio (see size).""") size = param.Number(default=0.155,doc=""" Overall size of the Gaussian, defined by: exp(-x^2/(2*xsigma^2) - y^2/(2*ysigma^2) where ysigma=size/2 and xsigma=size/2*aspect_ratio.""") def function(self,p): ysigma = p.size/2.0 xsigma = p.aspect_ratio*ysigma return gaussian(self.pattern_x,self.pattern_y,xsigma,ysigma) class ExponentialDecay(PatternGenerator): """ 2D Exponential pattern generator. Exponential decay based on distance from a central peak, i.e. exp(-d), where d is the distance from the center (assuming size=1.0 and aspect_ratio==1.0). More generally, the size and aspect ratio determine the scaling of x and y dimensions: yscale=size/2 xscale=yscale*aspect_ratio The exponential is then computed for the given (x,y) values as:: exp(-sqrt((x/xscale)^2 - (y/yscale)^2)) """ aspect_ratio = param.Number(default=1/0.31,bounds=(0.0,None),softbounds=(0.0,2.0), precedence=0.31,doc="""Ratio of the width to the height.""") size = param.Number(default=0.155,doc=""" Overall scaling of the x and y dimensions.""") def function(self,p): yscale = p.size/2.0 xscale = p.aspect_ratio*yscale return exponential(self.pattern_x,self.pattern_y,xscale,yscale) class SineGrating(PatternGenerator): """2D sine grating pattern generator.""" frequency = param.Number(default=2.4,bounds=(0.0,None),softbounds=(0.0,10.0), precedence=0.50, doc="Frequency of the sine grating.") phase = param.Number(default=0.0,bounds=(0.0,None),softbounds=(0.0,2*pi), precedence=0.51,doc="Phase of the sine grating.") def function(self,p): """Return a sine grating pattern (two-dimensional sine wave).""" return 0.5 + 0.5*sin(p.frequency*2*pi*self.pattern_y + p.phase) class Gabor(PatternGenerator): """2D Gabor pattern generator.""" frequency = param.Number(default=2.4,bounds=(0.0,None),softbounds=(0.0,10.0), precedence=0.50,doc="Frequency of the sine grating component.") phase = param.Number(default=0.0,bounds=(0.0,None),softbounds=(0.0,2*pi), precedence=0.51,doc="Phase of the sine grating component.") aspect_ratio = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,2.0), precedence=0.31,doc= """ Ratio of pattern width to height. The width of the Gaussian component is size*aspect_ratio (see Gaussian). """) size = param.Number(default=0.25,doc=""" Determines the height of the Gaussian component (see Gaussian).""") def function(self,p): height = p.size/2.0 width = p.aspect_ratio*height return gabor(self.pattern_x,self.pattern_y,width,height, p.frequency,p.phase) class Line(PatternGenerator): """2D line pattern generator.""" thickness = param.Number(default=0.006,bounds=(0.0,None),softbounds=(0.0,1.0), precedence=0.60, doc="Thickness (width) of the solid central part of the line.") smoothing = param.Number(default=0.05,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.61, doc="Width of the Gaussian fall-off.") def function(self,p): return line(self.pattern_y,p.thickness,p.smoothing) class Disk(PatternGenerator): """ 2D disk pattern generator. An elliptical disk can be obtained by adjusting the aspect_ratio of a circular disk; this transforms a circle into an ellipse by stretching the circle in the y (vertical) direction. The Gaussian fall-off at a point P is an approximation for non-circular disks, since the point on the ellipse closest to P is taken to be the same point as the point on the circle before stretching that was closest to P. """ aspect_ratio = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,2.0), precedence=0.31,doc= "Ratio of width to height; size*aspect_ratio gives the width of the disk.") size = param.Number(default=0.5,doc="Top to bottom height of the disk") smoothing = param.Number(default=0.1,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.61,doc="Width of the Gaussian fall-off") def function(self,p): height = p.size if p.aspect_ratio==0.0: return self.pattern_x*0.0 return disk(self.pattern_x/p.aspect_ratio,self.pattern_y,height, p.smoothing) class Ring(PatternGenerator): """ 2D ring pattern generator. See the Disk class for a note about the Gaussian fall-off. """ thickness = param.Number(default=0.015,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.60,doc="Thickness (line width) of the ring.") smoothing = param.Number(default=0.1,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.61,doc="Width of the Gaussian fall-off inside and outside the ring.") aspect_ratio = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,2.0), precedence=0.31,doc= "Ratio of width to height; size*aspect_ratio gives the overall width.") size = param.Number(default=0.5) def function(self,p): height = p.size if p.aspect_ratio==0.0: return self.pattern_x*0.0 return ring(self.pattern_x/p.aspect_ratio,self.pattern_y,height, p.thickness,p.smoothing) class OrientationContrast(SineGrating): """ Circular pattern for testing responses to differences in contrast. The pattern contains a sine grating ring surrounding a sine grating disk, each with parameters (orientation, size, scale and offset) that can be changed independently. """ orientationcenter = param.Number(default=0.0,bounds=(0.0,2*pi), doc="Orientation of the center grating.") orientationsurround = param.Number(default=0.0,bounds=(0.0,2*pi), doc="Orientation of the surround grating.") sizecenter = param.Number(default=0.5,bounds=(0.0,None),softbounds=(0.0,10.0), doc="Size of the center grating.") sizesurround = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,10.0), doc="Size of the surround grating.") scalecenter = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,10.0), doc="Scale of the center grating.") scalesurround = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,10.0), doc="Scale of the surround grating.") offsetcenter = param.Number(default=0.0,bounds=(0.0,None),softbounds=(0.0,10.0), doc="Offset of the center grating.") offsetsurround = param.Number(default=0.0,bounds=(0.0,None),softbounds=(0.0,10.0), doc="Offset of the surround grating.") smoothing = param.Number(default=0.0,bounds=(0.0,None),softbounds=(0.0,0.5), doc="Width of the Gaussian fall-off inside and outside the ring.") thickness = param.Number(default=0.015,bounds=(0.0,None),softbounds=(0.0,0.5),doc="Thickness (line width) of the ring.") aspect_ratio = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,2.0), doc="Ratio of width to height; size*aspect_ratio gives the overall width.") size = param.Number(default=0.5) def __call__(self,**params_to_override): p = ParamOverrides(self,params_to_override) input_1=SineGrating(mask_shape=Disk(smoothing=0,size=1.0),phase=p.phase, frequency=p.frequency, orientation=p.orientationcenter, scale=p.scalecenter, offset=p.offsetcenter, aspect_ratio=p.aspect_ratio,smoothing=0.0,x=p.x, y=p.y,size=p.sizecenter) input_2=SineGrating(mask_shape=Ring(smoothing=0,size=1.0),phase=p.phase, frequency=p.frequency, orientation=p.orientationsurround, scale=p.scalesurround, offset=p.offsetsurround, thickness=p.thickness,aspect_ratio=p.aspect_ratio,smoothing=0.0,x=p.x, y=p.y, size=p.sizesurround) patterns = [input_1(xdensity=p.xdensity,ydensity=p.ydensity,bounds=p.bounds), input_2(xdensity=p.xdensity,ydensity=p.ydensity,bounds=p.bounds)] image_array = numpy.add.reduce(patterns) return image_array class RawRectangle(PatternGenerator): """ 2D rectangle pattern generator with no smoothing, for use when drawing patterns pixel by pixel. """ aspect_ratio = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,2.0), precedence=0.31,doc= "Ratio of width to height; size*aspect_ratio gives the width of the rectangle.") size = param.Number(default=0.5,doc="Height of the rectangle.") def function(self,p): height = p.size width = p.aspect_ratio*height return bitwise_and(abs(self.pattern_x)<=width/2.0, abs(self.pattern_y)<=height/2.0) class Rectangle(PatternGenerator): """2D rectangle pattern, with Gaussian smoothing around the edges.""" aspect_ratio = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,6.0), precedence=0.31,doc= "Ratio of width to height; size*aspect_ratio gives the width of the rectangle.") size = param.Number(default=0.5,doc="Height of the rectangle.") smoothing = param.Number(default=0.05,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.61,doc="Width of the Gaussian fall-off outside the rectangle.") def function(self,p): height=p.size width=p.aspect_ratio*height return smooth_rectangle(self.pattern_x, self.pattern_y, width, height, p.smoothing, p.smoothing) class Arc(PatternGenerator): """ 2D arc pattern generator. Draws an arc (partial ring) of the specified size (radius*2), starting at radian 0.0 and ending at arc_length. The orientation can be changed to choose other start locations. The pattern is centered at the center of the ring. See the Disk class for a note about the Gaussian fall-off. """ aspect_ratio = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,6.0), precedence=0.31,doc=""" Ratio of width to height; size*aspect_ratio gives the overall width.""") thickness = param.Number(default=0.015,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.60,doc="Thickness (line width) of the ring.") smoothing = param.Number(default=0.05,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.61,doc="Width of the Gaussian fall-off inside and outside the ring.") arc_length = param.Number(default=pi,bounds=(0.0,None),softbounds=(0.0,2.0*pi), inclusive_bounds=(True,False),precedence=0.62, doc=""" Length of the arc, in radians, starting from orientation 0.0.""") size = param.Number(default=0.5) def function(self,p): if p.aspect_ratio==0.0: return self.pattern_x*0.0 return arc_by_radian(self.pattern_x/p.aspect_ratio, self.pattern_y, p.size, (2*pi-p.arc_length, 0.0), p.thickness, p.smoothing) class Curve(Arc): """ 2D curve pattern generator. Based on Arc, but centered on a tangent point midway through the arc, rather than at the center of a ring, and with curvature controlled directly rather than through the overall size of the pattern. Depending on the size_type, the size parameter can control either the width of the pattern, keeping this constant regardless of curvature, or the length of the curve, keeping that constant instead (as for a long thin object being bent). Specifically, for size_type=='constant_length', the curvature parameter determines the ratio of height to width of the arc, with positive curvature for concave shape and negative for convex. The size parameter determines the width of the curve. For size_type=='constant_width', the curvature parameter determines the portion of curve radian to 2pi, and the curve radius is changed accordingly following the formula:: size=2pi*radius*curvature Thus, the size parameter determines the total length of the curve. Positive curvature stands for concave shape, and negative for convex. See the Disk class for a note about the Gaussian fall-off. """ # Hide unused parameters arc_length = param.Number(precedence=-1.0) aspect_ratio = param.Number(default=1.0, precedence=-1.0) size_type = param.ObjectSelector(default='constant_length', objects=['constant_length','constant_width'],precedence=0.61, doc=""" For a given size, whether to draw a curve with that total length, or with that width, keeping it constant as curvature is varied.""") curvature = param.Number(default=0.5, bounds=(-0.5, 0.5), precedence=0.62, doc=""" Ratio of height to width of the arc, with positive value giving a concave shape and negative value giving convex.""") def function(self,p): return arc_by_center(self.pattern_x/p.aspect_ratio,self.pattern_y, (p.size,p.size*p.curvature), (p.size_type=='constant_length'), p.thickness, p.smoothing) #JABALERT: Can't this be replaced with a Composite? class TwoRectangles(Rectangle): """Two 2D rectangle pattern generator.""" x1 = param.Number(default=-0.15,bounds=(-1.0,1.0),softbounds=(-0.5,0.5), doc="X center of rectangle 1.") y1 = param.Number(default=-0.15,bounds=(-1.0,1.0),softbounds=(-0.5,0.5), doc="Y center of rectangle 1.") x2 = param.Number(default=0.15,bounds=(-1.0,1.0),softbounds=(-0.5,0.5), doc="X center of rectangle 2.") y2 = param.Number(default=0.15,bounds=(-1.0,1.0),softbounds=(-0.5,0.5), doc="Y center of rectangle 2.") # YC: Maybe this can be implemented much more cleanly by calling # the parent's function() twice, but it's hard to see how to # set the (x,y) offset for the parent. def function(self,p): height = p.size width = p.aspect_ratio*height return bitwise_or( bitwise_and(bitwise_and( (self.pattern_x-p.x1)<=p.x1+width/4.0, (self.pattern_x-p.x1)>=p.x1-width/4.0), bitwise_and( (self.pattern_y-p.y1)<=p.y1+width/4.0, (self.pattern_y-p.y1)>=p.y1-width/4.0)), bitwise_and(bitwise_and( (self.pattern_x-p.x2)<=p.x2+width/4.0, (self.pattern_x-p.x2)>=p.x2-width/4.0), bitwise_and( (self.pattern_y-p.y2)<=p.y2+width/4.0, (self.pattern_y-p.y2)>=p.y2-width/4.0))) class SquareGrating(PatternGenerator): """2D squarewave grating pattern generator.""" frequency = param.Number(default=2.4,bounds=(0.0,None),softbounds=(0.0,10.0), precedence=0.50,doc="Frequency of the square grating.") phase = param.Number(default=0.0,bounds=(0.0,None),softbounds=(0.0,2*pi), precedence=0.51,doc="Phase of the square grating.") # We will probably want to add anti-aliasing to this, # and there might be an easier way to do it than by # cropping a sine grating. def function(self,p): """ Return a square-wave grating (alternating black and white bars). """ return around(0.5 + 0.5*sin(p.frequency*2*pi*self.pattern_y + p.phase)) # CB: I removed motion_sign from this class because I think it is # unnecessary. But maybe I misunderstood the original author's # intention? # # In any case, the original implementation was incorrect - it was not # possible to get some motion directions (directions in one whole # quadrant were missed out). # # Note that to get a 2pi range of directions, one must use a 2pi range # of orientations (there are two directions for any given # orientation). Alternatively, we could generate a random sign, and # use an orientation restricted to a pi range. class Sweeper(PatternGenerator): """ PatternGenerator that sweeps a supplied PatternGenerator in a direction perpendicular to its orientation. """ generator = param.Parameter(default=Gaussian(),precedence=0.97, doc="Pattern to sweep.") speed = param.Number(default=0.25,bounds=(0.0,None),doc=""" Sweep speed: number of sheet coordinate units per unit time.""") step = param.Number(default=1,doc=""" Number of steps at the given speed to move in the sweep direction. The distance moved is speed*step.""") # Provide access to value needed for measuring maps def __get_phase(self): return self.generator.phase def __set_phase(self,new_val): self.generator.phase = new_val phase = property(__get_phase,__set_phase) def function(self,p): """Selects and returns one of the patterns in the list.""" pg = p.generator motion_orientation=p.orientation+pi/2.0 new_x = p.x+p.size*pg.x new_y = p.y+p.size*pg.y image_array = pg(xdensity=p.xdensity,ydensity=p.ydensity,bounds=p.bounds, x=new_x + p.speed*p.step*cos(motion_orientation), y=new_y + p.speed*p.step*sin(motion_orientation), orientation=p.orientation, scale=pg.scale*p.scale,offset=pg.offset+p.offset) return image_array class Composite(PatternGenerator): """ PatternGenerator that accepts a list of other PatternGenerators. To create a new pattern, asks each of the PatternGenerators in the list to create a pattern, then it combines the patterns to create a single pattern that it returns. """ # The Accum_Replace operator from LISSOM is not yet supported, # but it should be added once PatternGenerator bounding boxes # are respected and/or GenericImage patterns support transparency. operator = param.Parameter(numpy.maximum,precedence=0.98,doc=""" Binary Numpy function used to combine the individual patterns. Any binary Numpy array "ufunc" returning the same type of array as the operands and supporting the reduce operator is allowed here. Supported ufuncs include:: add subtract multiply divide maximum minimum remainder power logical_and logical_or logical_xor The most useful ones are probably add and maximum, but there are uses for at least some of the others as well (e.g. to remove pieces of other patterns). You can also write your own operators, by making a class that has a static method named "reduce" that returns an array of the same size and type as the arrays in the list. For example:: class return_first(object): @staticmethod def reduce(x): return x[0] """) generators = param.List(default=[Constant(scale=0.0)],precedence=0.97, class_=PatternGenerator,doc=""" List of patterns to use in the composite pattern. The default is a blank pattern, and should thus be overridden for any useful work.""") size = param.Number(default=1.0,doc="Scaling factor applied to all sub-patterns.") def _advance_pattern_generators(self,p): """ Subclasses can override this method to provide constraints on the values of generators' parameters and/or eliminate generators from this list if necessary. """ return p.generators # JABALERT: To support large numbers of patterns on a large input region, # should be changed to evaluate each pattern in a small box, and then # combine them at the full Composite Bounding box size. def function(self,p): """Constructs combined pattern out of the individual ones.""" generators = self._advance_pattern_generators(p) assert hasattr(p.operator,'reduce'),repr(p.operator)+" does not support 'reduce'." # CEBALERT: mask gets applied by all PGs including the Composite itself # (leads to redundant calculations in current lissom_oo_or usage, but # will lead to problems/limitations in the future). patterns = [pg(xdensity=p.xdensity,ydensity=p.ydensity, bounds=p.bounds,mask=p.mask, x=p.x+p.size*(pg.x*cos(p.orientation)- pg.y*sin(p.orientation)), y=p.y+p.size*(pg.x*sin(p.orientation)+ pg.y*cos(p.orientation)), orientation=pg.orientation+p.orientation, size=pg.size*p.size) for pg in generators] image_array = p.operator.reduce(patterns) return image_array class SeparatedComposite(Composite): """ Generalized version of the Composite PatternGenerator that enforces spacing constraints between pattern centers. Currently supports minimum spacing, but can be generalized to support maximum spacing also (and both at once). """ min_separation = param.Number(default=0.0, bounds = (0,None), softbounds = (0.0,1.0), doc=""" Minimum distance to enforce between all pairs of pattern centers. Useful for ensuring that multiple randomly generated patterns do not overlap spatially. Note that as this this value is increased relative to the area in which locations are chosen, the likelihood of a pattern appearing near the center of the area will decrease. As this value approaches the available area, the corners become far more likely to be chosen, due to the distances being greater along the diagonals. """) ### JABNOTE: Should provide a mechanism for collecting and ### plotting the training pattern center distribution, so that ### such issues can be checked. max_trials = param.Integer(default = 50, bounds = (0,None), softbounds = (0,100), precedence=-1, doc=""" Number of times to try for a new pattern location that meets the criteria. This is an essentially arbitrary timeout value that helps prevent an endless loop in case the requirements cannot be met.""") def __distance_valid(self, g0, g1, p): """ Returns true if the distance between the (x,y) locations of two generators g0 and g1 is greater than a minimum separation. Can be extended easily to support other criteria. """ dist = sqrt((g1.x - g0.x) ** 2 + (g1.y - g0.y) ** 2) return dist >= p.min_separation def _advance_pattern_generators(self,p): """ Advance the parameters for each generator for this presentation. Picks a position for each generator that is accepted by __distance_valid for all combinations. Returns a new list of the generators, with some potentially omitted due to failure to meet the constraints. """ valid_generators = [] for g in p.generators: for trial in xrange(self.max_trials): # Generate a new position and add generator if it's ok if alltrue([self.__distance_valid(g,v,p) for v in valid_generators]): valid_generators.append(g) break vals = (g.force_new_dynamic_value('x'), g.force_new_dynamic_value('y')) else: self.warning("Unable to place pattern %s subject to given constraints" % g.name) return valid_generators class Selector(PatternGenerator): """ PatternGenerator that selects from a list of other PatternGenerators. """ generators = param.List(precedence=0.97,class_=PatternGenerator,bounds=(1,None), default=[Disk(x=-0.3,aspect_ratio=0.5), Rectangle(x=0.3,aspect_ratio=0.5)], doc="List of patterns from which to select.") size = param.Number(default=1.0,doc="Scaling factor applied to all sub-patterns.") # CB: needs to have time_fn=None index = param.Number(default=numbergen.UniformRandom(lbound=0,ubound=1.0,seed=76), bounds=(-1.0,1.0),precedence=0.20,doc=""" Index into the list of pattern generators, on a scale from 0 (start of the list) to 1.0 (end of the list). Typically a random value or other number generator, to allow a different item to be selected each time.""") def function(self,p): """Selects and returns one of the patterns in the list.""" int_index=int(len(p.generators)*wrap(0,1.0,p.index)) pg=p.generators[int_index] image_array = pg(xdensity=p.xdensity,ydensity=p.ydensity,bounds=p.bounds, x=p.x+p.size*(pg.x*cos(p.orientation)-pg.y*sin(p.orientation)), y=p.y+p.size*(pg.x*sin(p.orientation)+pg.y*cos(p.orientation)), orientation=pg.orientation+p.orientation,size=pg.size*p.size, scale=pg.scale*p.scale,offset=pg.offset+p.offset) return image_array def get_current_generator(self): """Return the current generator (as specified by self.index).""" int_index=int(len(self.generators)*wrap(0,1.0,self.inspect_value('index'))) return self.generators[int_index] ### JABALERT: This class should be eliminated if at all possible; it ### is just a specialized version of Composite, and should be ### implementable directly using what is already in Composite. class GaussiansCorner(PatternGenerator): """Two Gaussian pattern generators arranged into a corner shape.""" x = param.Number(default=-0.15,bounds=(-1.0,1.0),softbounds=(-0.5,0.5), doc="X center of the corner") y = param.Number(default=-0.15,bounds=(-1.0,1.0),softbounds=(-0.5,0.5), doc="Y center of the corner") size = param.Number(default=0.5,doc="The size of the corner") def __call__(self,**params_to_override): p = ParamOverrides(self,params_to_override) input_1=Gaussian() input_2=Gaussian() patterns = [input_1(orientation = p.orientation, bounds = p.bounds, xdensity = p.xdensity, ydensity = p.ydensity, offset = p.offset, size = p.size, x = p.x + cos(p.orientation) * p.size*0.9, y = p.y + sin(p.orientation) * p.size*0.9), input_2(orientation = p.orientation+pi/2, bounds = p.bounds, xdensity = p.xdensity, ydensity = p.ydensity, offset = p.offset, size = p.size, x = p.x + cos(p.orientation+pi/2) * p.size*0.9, y = p.y + sin(p.orientation+pi/2) * p.size*0.9)] return numpy.maximum(patterns[0],patterns[1]) class Translator(PatternGenerator): """ PatternGenerator that translates another PatternGenerator over time. This PatternGenerator will create a series of episodes, where in each episode the underlying generator is moved in a fixed direction at a fixed speed. To begin an episode, the Translator's x, y, and direction are evaluated (e.g. from random distributions), and the underlying generator is then drawn at those values plus changes over time that are determined by the speed. The orientation of the underlying generator should be set to 0 to get motion perpendicular to the generator's orientation (which is typical). Note that at present the parameter values for x, y, and direction cannot be passed in when the instance is called; only the values set on the instance are used. """ generator = param.ClassSelector(default=Gaussian(), class_=PatternGenerator,doc="""Pattern to be translated.""") direction = param.Number(default=0.0,softbounds=(-pi,pi),doc=""" The direction in which the pattern should move, in radians.""") speed = param.Number(default=0.01,bounds=(0.0,None),doc=""" The speed with which the pattern should move, in sheet coordinates per simulation time unit.""") reset_period = param.Number(default=1,bounds=(0.0,None),doc=""" Period between generating each new translation episode.""") episode_interval = param.Number(default=0,doc=""" Interval between successive translation episodes. If nonzero, the episode_separator pattern is presented for this amount of simulation time after each episode, e.g. to allow processing of the previous episode to complete.""") episode_separator = param.ClassSelector(default=Constant(scale=0.0), class_=PatternGenerator,doc=""" Pattern to display during the episode_interval, if any. The default is a blank pattern.""") def _advance_params(self): """ Explicitly generate new values for these parameters only when appropriate. """ for param in ['x','y','direction']: self.force_new_dynamic_value(param) self.last_time = topo.sim.time() def __init__(self,**params): super(Translator,self).__init__(**params) self._advance_params() def __call__(self,**params_to_override): p=ParamOverrides(self,params_to_override) if topo.sim.time() >= self.last_time + p.reset_period: ## Returns early if within episode interval if topo.sim.time()<self.last_time+p.reset_period+p.episode_interval: return p.episode_separator(xdensity=p.xdensity, ydensity=p.ydensity, bounds=p.bounds) else: self._advance_params() # JABALERT: Does not allow x, y, or direction to be passed in # to the call; fixing this would require implementing # inspect_value and force_new_dynamic_value (for # use in _advance_params) for ParamOverrides. # # Access parameter values without giving them new values assert ('x' not in params_to_override and 'y' not in params_to_override and 'direction' not in params_to_override) x = self.inspect_value('x') y = self.inspect_value('y') direction = self.inspect_value('direction') # compute how much time elapsed from the last reset # float(t) required because time could be e.g. gmpy.mpq t = float(topo.sim.time()-self.last_time) ## CEBALERT: mask gets applied twice, both for the underlying ## generator and for this one. (leads to redundant ## calculations in current lissom_oo_or usage, but will lead ## to problems/limitations in the future). return p.generator( xdensity=p.xdensity,ydensity=p.ydensity,bounds=p.bounds, x=x+t*cos(direction)*p.speed+p.generator.x, y=y+t*sin(direction)*p.speed+p.generator.y, orientation=(direction-pi/2)+p.generator.orientation) class SpiralGrating(PatternGenerator): """ Archemidean spiral grating. Successive turnings of the spiral have a constant separation distance. Spiral is defined by polar equation r=density*angle plotted in gausian plane. Spiral starts at radian 0.0, this can be changed by orientation. """ aspect_ratio = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,2.0), precedence=0.31,doc="Ratio of width to height.") thickness = param.Number(default=0.02,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.60,doc="Thickness (line width) of the spiral.") smoothing = param.Number(default=0.05,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.61,doc="Width of the Gaussian fall-off inside and outside the spiral.") density = param.Number(default=0.05,bounds=(0.01,None),softbounds=(0.01,2.0), precedence=0.62,doc="Density of turnings - density*angle gives the actual radius.") def function(self,params): """Archemidean spiral function.""" aspect_ratio = params['aspect_ratio'] return spiral(self.pattern_x/aspect_ratio,self.pattern_y,params['thickness'], params['smoothing'],params['density']) class HyperbolicGrating(PatternGenerator): """ Hyperbolic grating consists of two conjugate rectangular hyperbolas which share the same asymptotes: abs(x^2/a^2 - y^2/b^2) = 1 As a = b = axis these hyperbolas are rectangular. """ aspect_ratio = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,2.0), precedence=0.31,doc="Ratio of width to height.") thickness = param.Number(default=0.05,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.60,doc="Thickness of the hyperbolas.") smoothing = param.Number(default=0.1,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.61,doc="Width of the Gaussian fall-off inside and outside the hyperbolas.") axis = param.Number(default=0.5,bounds=(0.0,None),softbounds=(0.0,2.0), precedence=0.62,doc="Ratio of width to height.") def function(self,params): """Hyperbolic function.""" aspect_ratio = params['aspect_ratio'] return hyperbola(self.pattern_x/aspect_ratio,self.pattern_y,params['thickness'], params['smoothing'],params['axis']) class RadialGrating(PatternGenerator): """ Radial grating - one sector of a circle centered along radian 0.0 with size defined in radians. The orientation can be changed to choose other locations. """ aspect_ratio = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,2.0), precedence=0.31,doc="Ratio of width to height.") wide = param.Number(default=pi/4,bounds=(0.0,None),softbounds=(0.0,2.0*pi), precedence=0.60,doc="Wide of the sector in radians.") smoothing = param.Number(default=0.4,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.61,doc="Width of the Gaussian fall-off outside the sector.") def function(self,params): """Radial function.""" aspect_ratio = params['aspect_ratio'] return radial(self.pattern_x/aspect_ratio,self.pattern_y,params['wide'], params['smoothing']) class ConcentricRings(PatternGenerator): """ Concentric rings centered at (0.0,0.0) with linearly increasing radius. """ aspect_ratio = param.Number(default=1.0,bounds=(0.0,None),softbounds=(0.0,2.0), precedence=0.31,doc="Ratio of width to height.") thickness = param.Number(default=0.04,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.60,doc="Thickness (line width) of the ring.") smoothing = param.Number(default=0.05,bounds=(0.0,None),softbounds=(0.0,0.5), precedence=0.61,doc="Width of the Gaussian fall-off inside and outside the rings.") spacing = param.Number(default=0.4,bounds=(0.01,None),softbounds=(0.1,2.0), precedence=0.62,doc="Radius difference of neighbouring rings.") def function(self,params): """Concentric rings.""" aspect_ratio = params['aspect_ratio'] return concentricrings(self.pattern_x/aspect_ratio,self.pattern_y,params['thickness'], params['smoothing'],params['spacing']) __all__ = list(set([k for k,v in locals().items() if isinstance(v,type) and issubclass(v,PatternGenerator)]))
bsd-3-clause
llonchj/sentry
src/sentry/web/frontend/projects/plugins.py
13
5812
""" sentry.web.frontend.projects.plugins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from django.contrib import messages from django.core.context_processors import csrf from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect from django.views.decorators.csrf import csrf_protect from django.utils.translation import ugettext_lazy as _ from sentry.constants import MEMBER_ADMIN from sentry.plugins import plugins from sentry.web.decorators import has_access from sentry.web.helpers import render_to_response, plugin_config @has_access(MEMBER_ADMIN) @csrf_protect def manage_plugins(request, organization, project): result = plugins.first('has_perm', request.user, 'configure_project_plugin', project) if result is False and not request.user.is_superuser: return HttpResponseRedirect(reverse('sentry')) if request.POST: enabled = set(request.POST.getlist('plugin')) for plugin in plugins.all(version=None): if plugin.can_enable_for_projects(): plugin.set_option('enabled', plugin.slug in enabled, project) messages.add_message( request, messages.SUCCESS, _('Your settings were saved successfully.')) return HttpResponseRedirect(request.path) context = csrf(request) context.update({ 'organization': organization, 'team': project.team, 'page': 'plugins', 'project': project, }) return render_to_response('sentry/projects/plugins/list.html', context, request) @has_access(MEMBER_ADMIN) @csrf_protect def configure_project_plugin(request, organization, project, slug): try: plugin = plugins.get(slug) except KeyError: return HttpResponseRedirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug])) if not plugin.can_enable_for_projects(): return HttpResponseRedirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug])) result = plugins.first('has_perm', request.user, 'configure_project_plugin', project, plugin) if result is False and not request.user.is_superuser: return HttpResponseRedirect(reverse('sentry')) form = plugin.project_conf_form if form is None: return HttpResponseRedirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug])) action, view = plugin_config(plugin, project, request) if action == 'redirect': messages.add_message( request, messages.SUCCESS, _('Your settings were saved successfully.')) return HttpResponseRedirect(request.path) context = csrf(request) context.update({ 'organization': organization, 'team': project.team, 'page': 'plugin', 'title': plugin.get_title(), 'view': view, 'project': project, 'plugin': plugin, 'plugin_is_enabled': plugin.is_enabled(project), }) return render_to_response('sentry/projects/plugins/configure.html', context, request) @has_access(MEMBER_ADMIN) @csrf_protect def reset_project_plugin(request, organization, project, slug): try: plugin = plugins.get(slug) except KeyError: return HttpResponseRedirect(reverse('sentry-configure-project-plugin', args=[project.organization.slug, project.slug, slug])) if not plugin.is_enabled(project): return HttpResponseRedirect(reverse('sentry-configure-project-plugin', args=[project.organization.slug, project.slug, slug])) result = plugins.first('has_perm', request.user, 'configure_project_plugin', project, plugin) if result is False and not request.user.is_superuser: return HttpResponseRedirect(reverse('sentry')) plugin.reset_options(project=project) return HttpResponseRedirect(reverse('sentry-configure-project-plugin', args=[project.organization.slug, project.slug, slug])) @has_access(MEMBER_ADMIN) @csrf_protect def enable_project_plugin(request, organization, project, slug): try: plugin = plugins.get(slug) except KeyError: return HttpResponseRedirect(reverse('sentry-manage-project-plugins', args=[project.organization.slug, project.slug])) redirect_to = reverse('sentry-configure-project-plugin', args=[project.organization.slug, project.slug, slug]) if plugin.is_enabled(project) or not plugin.can_enable_for_projects(): return HttpResponseRedirect(redirect_to) result = plugins.first('has_perm', request.user, 'configure_project_plugin', project, plugin) if result is False and not request.user.is_superuser: return HttpResponseRedirect(reverse('sentry')) plugin.set_option('enabled', True, project) return HttpResponseRedirect(redirect_to) @has_access(MEMBER_ADMIN) @csrf_protect def disable_project_plugin(request, organization, project, slug): try: plugin = plugins.get(slug) except KeyError: return HttpResponseRedirect(reverse('sentry-manage-project-plugins', args=[project.organization.slug, project.slug])) redirect_to = reverse('sentry-configure-project-plugin', args=[project.organization.slug, project.slug, slug]) if not (plugin.can_disable and plugin.is_enabled(project) and plugin.can_enable_for_projects()): return HttpResponseRedirect(redirect_to) result = plugins.first('has_perm', request.user, 'configure_project_plugin', project, plugin) if result is False and not request.user.is_superuser: return HttpResponseRedirect(reverse('sentry')) plugin.set_option('enabled', False, project) return HttpResponseRedirect(redirect_to)
bsd-3-clause
Reflexe/doc_to_pdf
Windows/program/python-core-3.5.0/lib/encodings/tis_620.py
272
12300
""" Python Character Mapping Codec tis_620 generated from 'python-mappings/TIS-620.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='tis-620', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( '\x00' # 0x00 -> NULL '\x01' # 0x01 -> START OF HEADING '\x02' # 0x02 -> START OF TEXT '\x03' # 0x03 -> END OF TEXT '\x04' # 0x04 -> END OF TRANSMISSION '\x05' # 0x05 -> ENQUIRY '\x06' # 0x06 -> ACKNOWLEDGE '\x07' # 0x07 -> BELL '\x08' # 0x08 -> BACKSPACE '\t' # 0x09 -> HORIZONTAL TABULATION '\n' # 0x0A -> LINE FEED '\x0b' # 0x0B -> VERTICAL TABULATION '\x0c' # 0x0C -> FORM FEED '\r' # 0x0D -> CARRIAGE RETURN '\x0e' # 0x0E -> SHIFT OUT '\x0f' # 0x0F -> SHIFT IN '\x10' # 0x10 -> DATA LINK ESCAPE '\x11' # 0x11 -> DEVICE CONTROL ONE '\x12' # 0x12 -> DEVICE CONTROL TWO '\x13' # 0x13 -> DEVICE CONTROL THREE '\x14' # 0x14 -> DEVICE CONTROL FOUR '\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE '\x16' # 0x16 -> SYNCHRONOUS IDLE '\x17' # 0x17 -> END OF TRANSMISSION BLOCK '\x18' # 0x18 -> CANCEL '\x19' # 0x19 -> END OF MEDIUM '\x1a' # 0x1A -> SUBSTITUTE '\x1b' # 0x1B -> ESCAPE '\x1c' # 0x1C -> FILE SEPARATOR '\x1d' # 0x1D -> GROUP SEPARATOR '\x1e' # 0x1E -> RECORD SEPARATOR '\x1f' # 0x1F -> UNIT SEPARATOR ' ' # 0x20 -> SPACE '!' # 0x21 -> EXCLAMATION MARK '"' # 0x22 -> QUOTATION MARK '#' # 0x23 -> NUMBER SIGN '$' # 0x24 -> DOLLAR SIGN '%' # 0x25 -> PERCENT SIGN '&' # 0x26 -> AMPERSAND "'" # 0x27 -> APOSTROPHE '(' # 0x28 -> LEFT PARENTHESIS ')' # 0x29 -> RIGHT PARENTHESIS '*' # 0x2A -> ASTERISK '+' # 0x2B -> PLUS SIGN ',' # 0x2C -> COMMA '-' # 0x2D -> HYPHEN-MINUS '.' # 0x2E -> FULL STOP '/' # 0x2F -> SOLIDUS '0' # 0x30 -> DIGIT ZERO '1' # 0x31 -> DIGIT ONE '2' # 0x32 -> DIGIT TWO '3' # 0x33 -> DIGIT THREE '4' # 0x34 -> DIGIT FOUR '5' # 0x35 -> DIGIT FIVE '6' # 0x36 -> DIGIT SIX '7' # 0x37 -> DIGIT SEVEN '8' # 0x38 -> DIGIT EIGHT '9' # 0x39 -> DIGIT NINE ':' # 0x3A -> COLON ';' # 0x3B -> SEMICOLON '<' # 0x3C -> LESS-THAN SIGN '=' # 0x3D -> EQUALS SIGN '>' # 0x3E -> GREATER-THAN SIGN '?' # 0x3F -> QUESTION MARK '@' # 0x40 -> COMMERCIAL AT 'A' # 0x41 -> LATIN CAPITAL LETTER A 'B' # 0x42 -> LATIN CAPITAL LETTER B 'C' # 0x43 -> LATIN CAPITAL LETTER C 'D' # 0x44 -> LATIN CAPITAL LETTER D 'E' # 0x45 -> LATIN CAPITAL LETTER E 'F' # 0x46 -> LATIN CAPITAL LETTER F 'G' # 0x47 -> LATIN CAPITAL LETTER G 'H' # 0x48 -> LATIN CAPITAL LETTER H 'I' # 0x49 -> LATIN CAPITAL LETTER I 'J' # 0x4A -> LATIN CAPITAL LETTER J 'K' # 0x4B -> LATIN CAPITAL LETTER K 'L' # 0x4C -> LATIN CAPITAL LETTER L 'M' # 0x4D -> LATIN CAPITAL LETTER M 'N' # 0x4E -> LATIN CAPITAL LETTER N 'O' # 0x4F -> LATIN CAPITAL LETTER O 'P' # 0x50 -> LATIN CAPITAL LETTER P 'Q' # 0x51 -> LATIN CAPITAL LETTER Q 'R' # 0x52 -> LATIN CAPITAL LETTER R 'S' # 0x53 -> LATIN CAPITAL LETTER S 'T' # 0x54 -> LATIN CAPITAL LETTER T 'U' # 0x55 -> LATIN CAPITAL LETTER U 'V' # 0x56 -> LATIN CAPITAL LETTER V 'W' # 0x57 -> LATIN CAPITAL LETTER W 'X' # 0x58 -> LATIN CAPITAL LETTER X 'Y' # 0x59 -> LATIN CAPITAL LETTER Y 'Z' # 0x5A -> LATIN CAPITAL LETTER Z '[' # 0x5B -> LEFT SQUARE BRACKET '\\' # 0x5C -> REVERSE SOLIDUS ']' # 0x5D -> RIGHT SQUARE BRACKET '^' # 0x5E -> CIRCUMFLEX ACCENT '_' # 0x5F -> LOW LINE '`' # 0x60 -> GRAVE ACCENT 'a' # 0x61 -> LATIN SMALL LETTER A 'b' # 0x62 -> LATIN SMALL LETTER B 'c' # 0x63 -> LATIN SMALL LETTER C 'd' # 0x64 -> LATIN SMALL LETTER D 'e' # 0x65 -> LATIN SMALL LETTER E 'f' # 0x66 -> LATIN SMALL LETTER F 'g' # 0x67 -> LATIN SMALL LETTER G 'h' # 0x68 -> LATIN SMALL LETTER H 'i' # 0x69 -> LATIN SMALL LETTER I 'j' # 0x6A -> LATIN SMALL LETTER J 'k' # 0x6B -> LATIN SMALL LETTER K 'l' # 0x6C -> LATIN SMALL LETTER L 'm' # 0x6D -> LATIN SMALL LETTER M 'n' # 0x6E -> LATIN SMALL LETTER N 'o' # 0x6F -> LATIN SMALL LETTER O 'p' # 0x70 -> LATIN SMALL LETTER P 'q' # 0x71 -> LATIN SMALL LETTER Q 'r' # 0x72 -> LATIN SMALL LETTER R 's' # 0x73 -> LATIN SMALL LETTER S 't' # 0x74 -> LATIN SMALL LETTER T 'u' # 0x75 -> LATIN SMALL LETTER U 'v' # 0x76 -> LATIN SMALL LETTER V 'w' # 0x77 -> LATIN SMALL LETTER W 'x' # 0x78 -> LATIN SMALL LETTER X 'y' # 0x79 -> LATIN SMALL LETTER Y 'z' # 0x7A -> LATIN SMALL LETTER Z '{' # 0x7B -> LEFT CURLY BRACKET '|' # 0x7C -> VERTICAL LINE '}' # 0x7D -> RIGHT CURLY BRACKET '~' # 0x7E -> TILDE '\x7f' # 0x7F -> DELETE '\x80' # 0x80 -> <control> '\x81' # 0x81 -> <control> '\x82' # 0x82 -> <control> '\x83' # 0x83 -> <control> '\x84' # 0x84 -> <control> '\x85' # 0x85 -> <control> '\x86' # 0x86 -> <control> '\x87' # 0x87 -> <control> '\x88' # 0x88 -> <control> '\x89' # 0x89 -> <control> '\x8a' # 0x8A -> <control> '\x8b' # 0x8B -> <control> '\x8c' # 0x8C -> <control> '\x8d' # 0x8D -> <control> '\x8e' # 0x8E -> <control> '\x8f' # 0x8F -> <control> '\x90' # 0x90 -> <control> '\x91' # 0x91 -> <control> '\x92' # 0x92 -> <control> '\x93' # 0x93 -> <control> '\x94' # 0x94 -> <control> '\x95' # 0x95 -> <control> '\x96' # 0x96 -> <control> '\x97' # 0x97 -> <control> '\x98' # 0x98 -> <control> '\x99' # 0x99 -> <control> '\x9a' # 0x9A -> <control> '\x9b' # 0x9B -> <control> '\x9c' # 0x9C -> <control> '\x9d' # 0x9D -> <control> '\x9e' # 0x9E -> <control> '\x9f' # 0x9F -> <control> '\ufffe' '\u0e01' # 0xA1 -> THAI CHARACTER KO KAI '\u0e02' # 0xA2 -> THAI CHARACTER KHO KHAI '\u0e03' # 0xA3 -> THAI CHARACTER KHO KHUAT '\u0e04' # 0xA4 -> THAI CHARACTER KHO KHWAI '\u0e05' # 0xA5 -> THAI CHARACTER KHO KHON '\u0e06' # 0xA6 -> THAI CHARACTER KHO RAKHANG '\u0e07' # 0xA7 -> THAI CHARACTER NGO NGU '\u0e08' # 0xA8 -> THAI CHARACTER CHO CHAN '\u0e09' # 0xA9 -> THAI CHARACTER CHO CHING '\u0e0a' # 0xAA -> THAI CHARACTER CHO CHANG '\u0e0b' # 0xAB -> THAI CHARACTER SO SO '\u0e0c' # 0xAC -> THAI CHARACTER CHO CHOE '\u0e0d' # 0xAD -> THAI CHARACTER YO YING '\u0e0e' # 0xAE -> THAI CHARACTER DO CHADA '\u0e0f' # 0xAF -> THAI CHARACTER TO PATAK '\u0e10' # 0xB0 -> THAI CHARACTER THO THAN '\u0e11' # 0xB1 -> THAI CHARACTER THO NANGMONTHO '\u0e12' # 0xB2 -> THAI CHARACTER THO PHUTHAO '\u0e13' # 0xB3 -> THAI CHARACTER NO NEN '\u0e14' # 0xB4 -> THAI CHARACTER DO DEK '\u0e15' # 0xB5 -> THAI CHARACTER TO TAO '\u0e16' # 0xB6 -> THAI CHARACTER THO THUNG '\u0e17' # 0xB7 -> THAI CHARACTER THO THAHAN '\u0e18' # 0xB8 -> THAI CHARACTER THO THONG '\u0e19' # 0xB9 -> THAI CHARACTER NO NU '\u0e1a' # 0xBA -> THAI CHARACTER BO BAIMAI '\u0e1b' # 0xBB -> THAI CHARACTER PO PLA '\u0e1c' # 0xBC -> THAI CHARACTER PHO PHUNG '\u0e1d' # 0xBD -> THAI CHARACTER FO FA '\u0e1e' # 0xBE -> THAI CHARACTER PHO PHAN '\u0e1f' # 0xBF -> THAI CHARACTER FO FAN '\u0e20' # 0xC0 -> THAI CHARACTER PHO SAMPHAO '\u0e21' # 0xC1 -> THAI CHARACTER MO MA '\u0e22' # 0xC2 -> THAI CHARACTER YO YAK '\u0e23' # 0xC3 -> THAI CHARACTER RO RUA '\u0e24' # 0xC4 -> THAI CHARACTER RU '\u0e25' # 0xC5 -> THAI CHARACTER LO LING '\u0e26' # 0xC6 -> THAI CHARACTER LU '\u0e27' # 0xC7 -> THAI CHARACTER WO WAEN '\u0e28' # 0xC8 -> THAI CHARACTER SO SALA '\u0e29' # 0xC9 -> THAI CHARACTER SO RUSI '\u0e2a' # 0xCA -> THAI CHARACTER SO SUA '\u0e2b' # 0xCB -> THAI CHARACTER HO HIP '\u0e2c' # 0xCC -> THAI CHARACTER LO CHULA '\u0e2d' # 0xCD -> THAI CHARACTER O ANG '\u0e2e' # 0xCE -> THAI CHARACTER HO NOKHUK '\u0e2f' # 0xCF -> THAI CHARACTER PAIYANNOI '\u0e30' # 0xD0 -> THAI CHARACTER SARA A '\u0e31' # 0xD1 -> THAI CHARACTER MAI HAN-AKAT '\u0e32' # 0xD2 -> THAI CHARACTER SARA AA '\u0e33' # 0xD3 -> THAI CHARACTER SARA AM '\u0e34' # 0xD4 -> THAI CHARACTER SARA I '\u0e35' # 0xD5 -> THAI CHARACTER SARA II '\u0e36' # 0xD6 -> THAI CHARACTER SARA UE '\u0e37' # 0xD7 -> THAI CHARACTER SARA UEE '\u0e38' # 0xD8 -> THAI CHARACTER SARA U '\u0e39' # 0xD9 -> THAI CHARACTER SARA UU '\u0e3a' # 0xDA -> THAI CHARACTER PHINTHU '\ufffe' '\ufffe' '\ufffe' '\ufffe' '\u0e3f' # 0xDF -> THAI CURRENCY SYMBOL BAHT '\u0e40' # 0xE0 -> THAI CHARACTER SARA E '\u0e41' # 0xE1 -> THAI CHARACTER SARA AE '\u0e42' # 0xE2 -> THAI CHARACTER SARA O '\u0e43' # 0xE3 -> THAI CHARACTER SARA AI MAIMUAN '\u0e44' # 0xE4 -> THAI CHARACTER SARA AI MAIMALAI '\u0e45' # 0xE5 -> THAI CHARACTER LAKKHANGYAO '\u0e46' # 0xE6 -> THAI CHARACTER MAIYAMOK '\u0e47' # 0xE7 -> THAI CHARACTER MAITAIKHU '\u0e48' # 0xE8 -> THAI CHARACTER MAI EK '\u0e49' # 0xE9 -> THAI CHARACTER MAI THO '\u0e4a' # 0xEA -> THAI CHARACTER MAI TRI '\u0e4b' # 0xEB -> THAI CHARACTER MAI CHATTAWA '\u0e4c' # 0xEC -> THAI CHARACTER THANTHAKHAT '\u0e4d' # 0xED -> THAI CHARACTER NIKHAHIT '\u0e4e' # 0xEE -> THAI CHARACTER YAMAKKAN '\u0e4f' # 0xEF -> THAI CHARACTER FONGMAN '\u0e50' # 0xF0 -> THAI DIGIT ZERO '\u0e51' # 0xF1 -> THAI DIGIT ONE '\u0e52' # 0xF2 -> THAI DIGIT TWO '\u0e53' # 0xF3 -> THAI DIGIT THREE '\u0e54' # 0xF4 -> THAI DIGIT FOUR '\u0e55' # 0xF5 -> THAI DIGIT FIVE '\u0e56' # 0xF6 -> THAI DIGIT SIX '\u0e57' # 0xF7 -> THAI DIGIT SEVEN '\u0e58' # 0xF8 -> THAI DIGIT EIGHT '\u0e59' # 0xF9 -> THAI DIGIT NINE '\u0e5a' # 0xFA -> THAI CHARACTER ANGKHANKHU '\u0e5b' # 0xFB -> THAI CHARACTER KHOMUT '\ufffe' '\ufffe' '\ufffe' '\ufffe' ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
mpl-2.0
tameemsafi/learning-python-3
projects/tic-tac-toe.py
1
2422
# Tic Tack Toe Game in Python 3 # -> Simple tic-tac-toe game in the console current_board_number = 0 board = ["{0}".format(i + 1) for i in range(9)] def print_board(): row1 = "| {0} | {1} | {2} |".format(board[0], board[1], board[2]) row2 = "| {0} | {1} | {2} |".format(board[3], board[4], board[5]) row3 = "| {0} | {1} | {2} |".format(board[6], board[7], board[8]) print() print(row1) print(row2) print(row3) print() def player_move(icon): if icon == "X": number = 1 elif icon == "O": number = 2 print("Your turn player {0}".format(number)) choice = int(input("Enter your move (1-9): ").strip()) if board[choice - 1] in "123456789": board[choice - 1] = icon global current_board_number current_board_number = current_board_number + 1 else: print("You cant move there.") player_move(icon) def is_victory(icon): if(board[0] == icon and board[1] == icon and board[2] == icon) or \ (board[3] == icon and board[4] == icon and board[5] == icon) or \ (board[6] == icon and board[7] == icon and board[8] == icon) or \ (board[0] == icon and board[3] == icon and board[6] == icon) or \ (board[1] == icon and board[4] == icon and board[7] == icon) or \ (board[2] == icon and board[5] == icon and board[8] == icon) or \ (board[3] == icon and board[6] == icon and board[8] == icon) or \ (board[2] == icon and board[4] == icon and board[6] == icon): return True else: return False def is_draw(): global current_board_number if current_board_number == 8: return True else: return False while True: print_board() player_move("X") print_board() if is_victory("X"): print("X wins! Congratulations!") break player_move("O") if is_victory("X"): print_board() print("X wins! Congratulations!") break if is_draw(): print_board() print("Game was a draw!") break # ------ # Output # ------ # | 1 | 2 | 3 | # | 4 | 5 | 6 | # | 7 | 8 | 9 | # Your turn player 1 # Enter your move (1-9): 1 # | X | 2 | 3 | # | 4 | 5 | 6 | # | 7 | 8 | 9 | # Your turn player 2 # Enter your move (1-9): 3 # | X | 2 | O | # | 4 | 5 | 6 | # | 7 | 8 | 9 | # Your turn player 1 # Enter your move (1-9): 4 # | X | 2 | O | # | X | 5 | 6 | # | 7 | 8 | 9 | # Your turn player 2 # Enter your move (1-9): 6 # | X | 2 | O | # | X | 5 | O | # | 7 | 8 | 9 | # Your turn player 1 # Enter your move (1-9): 7 # | X | 2 | O | # | X | 5 | O | # | X | 8 | 9 | # X wins! Congratulations!
gpl-3.0
IsCoolEntertainment/debpkg_python-boto
boto/mturk/__init__.py
782
1108
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. #
mit
popazerty/dvbapp2-gui
lib/python/Plugins/SystemPlugins/VideoClippingSetup/plugin.py
7
5719
from Screens.Screen import Screen from Components.ConfigList import ConfigListScreen from Components.config import config, ConfigSubsection, ConfigInteger, ConfigSlider, getConfigListEntry config.plugins.VideoClippingSetup = ConfigSubsection() config.plugins.VideoClippingSetup.clip_left = ConfigInteger(default = 0) config.plugins.VideoClippingSetup.clip_width = ConfigInteger(default = 720) config.plugins.VideoClippingSetup.clip_top = ConfigInteger(default = 0) config.plugins.VideoClippingSetup.clip_height = ConfigInteger(default = 576) class VideoClippingCoordinates(Screen, ConfigListScreen): skin = """ <screen position="0,0" size="e,e" title="Video clipping setup" backgroundColor="transparent"> <widget name="config" position="c-175,c-75" size="350,150" foregroundColor="black" backgroundColor="transparent" /> <ePixmap pixmap="buttons/green.png" position="c-145,e-100" zPosition="0" size="140,40" alphatest="on" /> <ePixmap pixmap="buttons/red.png" position="c+5,e-100" zPosition="0" size="140,40" alphatest="on" /> <widget name="ok" position="c-145,e-100" size="140,40" valign="center" halign="center" zPosition="1" font="Regular;20" transparent="1" backgroundColor="green" /> <widget name="cancel" position="c+5,e-100" size="140,40" valign="center" halign="center" zPosition="1" font="Regular;20" transparent="1" backgroundColor="red" /> </screen>""" def __init__(self, session): self.skin = VideoClippingCoordinates.skin Screen.__init__(self, session) from Components.ActionMap import ActionMap from Components.Button import Button self["ok"] = Button(_("OK")) self["cancel"] = Button(_("Cancel")) self["actions"] = ActionMap(["SetupActions", "ColorActions", "MenuActions"], { "ok": self.keyGo, "save": self.keyGo, "cancel": self.keyCancel, "green": self.keyGo, "red": self.keyCancel, "menu": self.closeRecursive, }, -2) self.list = [] ConfigListScreen.__init__(self, self.list, session = self.session) left = config.plugins.VideoClippingSetup.clip_left.getValue() width = config.plugins.VideoClippingSetup.clip_width.getValue() top = config.plugins.VideoClippingSetup.clip_top.getValue() height = config.plugins.VideoClippingSetup.clip_height.getValue() self.clip_step = ConfigSlider(default = 1, increment = 1, limits = (1, 20)) self.clip_left = ConfigSlider(default = left, increment = self.clip_step.getValue(), limits = (0, 720)) self.clip_width = ConfigSlider(default = width, increment = self.clip_step.getValue(), limits = (0, 720)) self.clip_top = ConfigSlider(default = top, increment = self.clip_step.getValue(), limits = (0, 576)) self.clip_height = ConfigSlider(default = height, increment = self.clip_step.getValue(), limits = (0, 576)) self.list.append(getConfigListEntry(_("stepsize"), self.clip_step)) self.list.append(getConfigListEntry(_("left"), self.clip_left)) self.list.append(getConfigListEntry(_("width"), self.clip_width)) self.list.append(getConfigListEntry(_("top"), self.clip_top)) self.list.append(getConfigListEntry(_("height"), self.clip_height)) self["config"].list = self.list self["config"].l.setList(self.list) def adjustStep(self): self.clip_left.increment = self.clip_step.getValue() self.clip_width.increment = self.clip_step.getValue() self.clip_top.increment = self.clip_step.getValue() self.clip_height.increment = self.clip_step.getValue() def keyLeft(self): ConfigListScreen.keyLeft(self) self.adjustStep() self.setPreviewPosition() def keyRight(self): ConfigListScreen.keyRight(self) self.adjustStep() self.setPreviewPosition() def setPreviewPosition(self): setPosition(int(self.clip_left.getValue()), int(self.clip_width.getValue()), int(self.clip_top.getValue()), int(self.clip_height.getValue())) def keyGo(self): config.plugins.VideoClippingSetup.clip_left.value = self.clip_left.getValue() config.plugins.VideoClippingSetup.clip_width.value = self.clip_width.getValue() config.plugins.VideoClippingSetup.clip_top.value = self.clip_top.getValue() config.plugins.VideoClippingSetup.clip_height.value = self.clip_height.getValue() config.plugins.VideoClippingSetup.save() self.close() def keyCancel(self): setConfiguredPosition() self.close() def setPosition(clip_left, clip_width, clip_top, clip_height): if clip_left + clip_width > 720: clip_width = 720 - clip_left if clip_top + clip_height > 576: clip_height = 576 - clip_top try: file = open("/proc/stb/vmpeg/0/clip_left", "w") file.write('%X' % clip_left) file.close() file = open("/proc/stb/vmpeg/0/clip_width", "w") file.write('%X' % clip_width) file.close() file = open("/proc/stb/vmpeg/0/clip_top", "w") file.write('%X' % clip_top) file.close() file = open("/proc/stb/vmpeg/0/clip_height", "w") file.write('%X' % clip_height) file.close() except: return def setConfiguredPosition(): setPosition(int(config.plugins.VideoClippingSetup.clip_left.getValue()), int(config.plugins.VideoClippingSetup.clip_width.getValue()), int(config.plugins.VideoClippingSetup.clip_top.getValue()), int(config.plugins.VideoClippingSetup.clip_height.getValue())) def main(session, **kwargs): session.open(VideoClippingCoordinates) def startup(reason, **kwargs): setConfiguredPosition() def Plugins(**kwargs): from os import path if path.exists("/proc/stb/vmpeg/0/clip_left"): from Plugins.Plugin import PluginDescriptor return [PluginDescriptor(name = "Video clipping setup", description = "clip overscan / letterbox borders", where = PluginDescriptor.WHERE_PLUGINMENU, fnc = main), PluginDescriptor(name = "Video clipping setup", description = "", where = PluginDescriptor.WHERE_SESSIONSTART, fnc = startup)] return []
gpl-2.0
blighj/django
tests/utils_tests/test_safestring.py
58
3447
from django.template import Context, Template from django.test import SimpleTestCase from django.utils import html, text from django.utils.functional import lazy, lazystr from django.utils.safestring import SafeData, mark_safe class customescape(str): def __html__(self): # implement specific and obviously wrong escaping # in order to be able to tell for sure when it runs return self.replace('<', '<<').replace('>', '>>') class SafeStringTest(SimpleTestCase): def assertRenderEqual(self, tpl, expected, **context): context = Context(context) tpl = Template(tpl) self.assertEqual(tpl.render(context), expected) def test_mark_safe(self): s = mark_safe('a&b') self.assertRenderEqual('{{ s }}', 'a&b', s=s) self.assertRenderEqual('{{ s|force_escape }}', 'a&amp;b', s=s) def test_mark_safe_str(self): """ Calling str() on a SafeText instance doesn't lose the safe status. """ s = mark_safe('a&b') self.assertIsInstance(str(s), type(s)) def test_mark_safe_object_implementing_dunder_html(self): e = customescape('<a&b>') s = mark_safe(e) self.assertIs(s, e) self.assertRenderEqual('{{ s }}', '<<a&b>>', s=s) self.assertRenderEqual('{{ s|force_escape }}', '&lt;a&amp;b&gt;', s=s) def test_mark_safe_lazy(self): s = lazystr('a&b') self.assertIsInstance(mark_safe(s), SafeData) self.assertRenderEqual('{{ s }}', 'a&b', s=mark_safe(s)) def test_mark_safe_object_implementing_dunder_str(self): class Obj: def __str__(self): return '<obj>' s = mark_safe(Obj()) self.assertRenderEqual('{{ s }}', '<obj>', s=s) def test_mark_safe_result_implements_dunder_html(self): self.assertEqual(mark_safe('a&b').__html__(), 'a&b') def test_mark_safe_lazy_result_implements_dunder_html(self): self.assertEqual(mark_safe(lazystr('a&b')).__html__(), 'a&b') def test_add_lazy_safe_text_and_safe_text(self): s = html.escape(lazystr('a')) s += mark_safe('&b') self.assertRenderEqual('{{ s }}', 'a&b', s=s) s = html.escapejs(lazystr('a')) s += mark_safe('&b') self.assertRenderEqual('{{ s }}', 'a&b', s=s) s = text.slugify(lazystr('a')) s += mark_safe('&b') self.assertRenderEqual('{{ s }}', 'a&b', s=s) def test_mark_safe_as_decorator(self): """ mark_safe used as a decorator leaves the result of a function unchanged. """ def clean_string_provider(): return '<html><body>dummy</body></html>' self.assertEqual(mark_safe(clean_string_provider)(), clean_string_provider()) def test_mark_safe_decorator_does_not_affect_dunder_html(self): """ mark_safe doesn't affect a callable that has an __html__() method. """ class SafeStringContainer: def __html__(self): return '<html></html>' self.assertIs(mark_safe(SafeStringContainer), SafeStringContainer) def test_mark_safe_decorator_does_not_affect_promises(self): """ mark_safe doesn't affect lazy strings (Promise objects). """ def html_str(): return '<html></html>' lazy_str = lazy(html_str, str)() self.assertEqual(mark_safe(lazy_str), html_str())
bsd-3-clause
mancoast/CPythonPyc_test
cpython/272_test_contains.py
136
3153
from test.test_support import have_unicode, run_unittest import unittest class base_set: def __init__(self, el): self.el = el class set(base_set): def __contains__(self, el): return self.el == el class seq(base_set): def __getitem__(self, n): return [self.el][n] class TestContains(unittest.TestCase): def test_common_tests(self): a = base_set(1) b = set(1) c = seq(1) self.assertIn(1, b) self.assertNotIn(0, b) self.assertIn(1, c) self.assertNotIn(0, c) self.assertRaises(TypeError, lambda: 1 in a) self.assertRaises(TypeError, lambda: 1 not in a) # test char in string self.assertIn('c', 'abc') self.assertNotIn('d', 'abc') self.assertIn('', '') self.assertIn('', 'abc') self.assertRaises(TypeError, lambda: None in 'abc') if have_unicode: def test_char_in_unicode(self): self.assertIn('c', unicode('abc')) self.assertNotIn('d', unicode('abc')) self.assertIn('', unicode('')) self.assertIn(unicode(''), '') self.assertIn(unicode(''), unicode('')) self.assertIn('', unicode('abc')) self.assertIn(unicode(''), 'abc') self.assertIn(unicode(''), unicode('abc')) self.assertRaises(TypeError, lambda: None in unicode('abc')) # test Unicode char in Unicode self.assertIn(unicode('c'), unicode('abc')) self.assertNotIn(unicode('d'), unicode('abc')) # test Unicode char in string self.assertIn(unicode('c'), 'abc') self.assertNotIn(unicode('d'), 'abc') def test_builtin_sequence_types(self): # a collection of tests on builtin sequence types a = range(10) for i in a: self.assertIn(i, a) self.assertNotIn(16, a) self.assertNotIn(a, a) a = tuple(a) for i in a: self.assertIn(i, a) self.assertNotIn(16, a) self.assertNotIn(a, a) class Deviant1: """Behaves strangely when compared This class is designed to make sure that the contains code works when the list is modified during the check. """ aList = range(15) def __cmp__(self, other): if other == 12: self.aList.remove(12) self.aList.remove(13) self.aList.remove(14) return 1 self.assertNotIn(Deviant1(), Deviant1.aList) class Deviant2: """Behaves strangely when compared This class raises an exception during comparison. That in turn causes the comparison to fail with a TypeError. """ def __cmp__(self, other): if other == 4: raise RuntimeError, "gotcha" try: self.assertNotIn(Deviant2(), a) except TypeError: pass def test_main(): run_unittest(TestContains) if __name__ == '__main__': test_main()
gpl-3.0
milkpku/BetaElephant
policy_experiment/analysis.py
1
1189
import numpy as np import matplotlib.pyplot as plt def load_log_file(file_path): fh = open(file_path) s = fh.readlines() accuarcy = np.zeros((len(s),)) for i in range(len(s)): accuarcy[i] = float(s[i][-5:-1]) return accuarcy def smooth(array, window=250): count = 0 for i in range(1, window): count += array[i:i-window] count /= window return count if __name__=='__main__': watch_list = [ #'policy.orign', #'policy.add-enemymove', #'policy.add-enemyprot', 'policy.add-all', 'policy.fast-policy', 'policy.resNet.add-all', 'policy.pip.add-all', 'policy.fc.add-all' ] plot_list = [] for folder in watch_list: a = load_log_file(folder+'/log.txt') a = a[a<1] a = smooth(a) f, = plt.plot(a) plot_list.append(f) plt.legend(plot_list, watch_list, loc=4) plt.xlim(xmin=0, xmax=10000) plt.xlabel('epoch') plt.ylabel('accuracy') # plt.title('Validation Accuracy for Different Feature') plt.title('Validation Accuracy for Different Model') plt.show()
mit
mwegrzynek/auth_ldap_set_email
__openerp__.py
1
1566
# -*- coding: utf-8 -*- ############################################################################## # # Odoo, Open Source Management Solution # Copyright (C) 2005-Today Litex Service Sp. z o.o. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Set e-mail and signature for users autocreated by auth_ldap', 'summary': '''Small extension module for correctly setting new user's email and signature based on data from LDAP directory''', 'description': ''' Small extension module for correctly setting new user's email and signature based on data from LDAP directory. ''', 'author': 'Litex Service Sp. z o.o.', 'website': 'http://www.litex.pl', 'category': 'Authentication', 'version': '1.0.1', 'depends': [ 'auth_ldap' ], 'data': [], 'demo': [], 'tests': [], }
agpl-3.0
Metronotes/bitcoin
contrib/devtools/update-translations.py
54
2334
#!/usr/bin/python # Copyright (c) 2014 Wladimir J. van der Laan # Distributed under the MIT/X11 software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' Run this script from the root of the repository to update all translations from transifex. It will do the following automatically: - fetch all translations using the tx tool - post-process them into valid and committable format - remove invalid control characters - remove location tags (makes diffs less noisy) TODO: - auto-add new translations to the build system according to the translation process - remove 'unfinished' translation items ''' from __future__ import division, print_function import subprocess import re import sys import os # Name of transifex tool TX = 'tx' # Name of source language file SOURCE_LANG = 'bitcoin_en.ts' # Directory with locale files LOCALE_DIR = 'src/qt/locale' def check_at_repository_root(): if not os.path.exists('.git'): print('No .git directory found') print('Execute this script at the root of the repository', file=sys.stderr) exit(1) def fetch_all_translations(): if subprocess.call([TX, 'pull', '-f']): print('Error while fetching translations', file=sys.stderr) exit(1) def postprocess_translations(): print('Postprocessing...') for filename in os.listdir(LOCALE_DIR): # process only language files, and do not process source language if not filename.endswith('.ts') or filename == SOURCE_LANG: continue filepath = os.path.join(LOCALE_DIR, filename) with open(filepath, 'rb') as f: data = f.read() # remove non-allowed control characters data = re.sub('[\x00-\x09\x0b\x0c\x0e-\x1f]', '', data) data = data.split('\n') # strip locations from non-origin translation # location tags are used to guide translators, they are not necessary for compilation # TODO: actually process XML instead of relying on Transifex's one-tag-per-line output format data = [line for line in data if not '<location' in line] with open(filepath, 'wb') as f: f.write('\n'.join(data)) if __name__ == '__main__': check_at_repository_root() fetch_all_translations() postprocess_translations()
mit
pouyana/teireader
webui/applications/admin/languages/uk.py
16
43559
# coding: utf8 { '!langcode!': 'uk', '!langname!': 'Українська', '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"Оновити" це додатковий вираз, такий, як "field1=\'нове_значення\'". Ви не можете змінювати або вилучати дані об\'єднаних таблиць', '"User Exception" debug mode. ': 'Режим ладнання "Сигнали від користувачів" ("User Exception" debug mode) ', '"User Exception" debug mode. An error ticket could be issued!': 'Режим ладнання "Сигнали від користувачів" ("User Exception" debug mode). Користувачі можуть залишати позначки про помилки!', '%s': '%s', '%s %%{row} deleted': 'Вилучено %s %%{рядок}', '%s %%{row} updated': 'Вилучено %s %%{рядок}', '%s Recent Tweets': '%s %%{останній} %%{твіт}', '%s students registered': '%s студентів зареєстровано', '%Y-%m-%d': '%Y/%m/%d', '%Y-%m-%d %H:%M:%S': '%Y/%m/%d %H:%M:%S', '(requires internet access)': '(потрібно мати доступ в інтернет)', '(requires internet access, experimental)': '(потрібно мати доступ в інтернет, експериментально)', '(something like "it-it")': '(щось схоже на "uk-ua")', '@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(не існує файлу **gluon/contrib/plural_rules/%s.py**)', '@markmin\x01An error occured, please [[reload %s]] the page': 'Сталась помилка, будь-ласка [[переватажте %s]] сторінку', "@markmin\x01Mercurial Version Control System Interface[[NEWLINE]]for application '%s'": "Інтерфейс системи контролю версій Mercurial[[NEWLINE]]для додатку '%s'", '@markmin\x01Searching: **%s** %%{file}': 'Знайдено: **%s** %%{файл}', 'Abort': 'Припинити', 'About': 'Про', 'about': 'про', 'About application': 'Про додаток', 'Add breakpoint': 'Додати точку зупинки', 'Additional code for your application': 'Додатковий код для вашого додатку', 'admin disabled because no admin password': 'адмін.інтерфейс відключено, бо не вказано пароль адміністратора', 'admin disabled because not supported on google app engine': 'адмін.інтерфейс відключено через те, що Google Application Engine його не підтримує', 'admin disabled because too many invalid login attempts': 'адмін.інтерфейс заблоковано, бо кількість хибних спроб входу перевищило граничний рівень', 'admin disabled because unable to access password file': 'адмін.інтерфейс відключено через відсутність доступу до файлу паролів', 'Admin is disabled because insecure channel': "Адмін.інтерфейс відключено через використання ненадійного каналу звя'зку", 'Admin language': 'Мова інтерфейсу:', 'administrative interface': 'інтерфейс адміністратора', 'Administrator Password:': 'Пароль адміністратора:', 'and rename it:': 'i змінити назву на:', 'App does not exist or your are not authorized': 'Додаток не існує, або ви не авторизовані', 'appadmin': 'Aдм.панель', 'appadmin is disabled because insecure channel': "адмін.панель відключено через використання ненадійного каналу зв'язку", 'Application': 'Додаток (Application)', 'application "%s" uninstalled': 'додаток "%s" вилучено', 'application %(appname)s installed with md5sum: %(digest)s': 'додаток %(appname)s встановлено з md5sum: %(digest)s', 'Application cannot be generated in demo mode': 'В демо-режимі генерувати додатки не можна', 'application compiled': 'додаток скомпільовано', 'Application exists already': 'Додаток вже існує', 'application is compiled and cannot be designed': 'додаток скомпільований. налаштування змінювати не можна', 'Application name:': 'Назва додатку:', 'are not used': 'не використовуються', 'are not used yet': 'поки що не використовуються', 'Are you sure you want to delete file "%s"?': 'Ви впевнені, що хочете вилучити файл "%s"?', 'Are you sure you want to delete plugin "%s"?': 'Ви впевнені, що хочете вилучити втулку "%s"?"', 'Are you sure you want to delete this object?': "Ви впевнені, що хочете вилучити цей об'єкт?", 'Are you sure you want to uninstall application "%s"?': 'Ви впевнені, що хочете вилучити (uninstall) додаток "%s"?', 'arguments': 'аргументи', 'at char %s': 'на символі %s', 'at line %s': 'в рядку %s', 'ATTENTION:': 'УВАГА:', 'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': "УВАГА: Вхід потребує надійного (HTTPS) з'єднання або запуску на локальному комп'ютері.", 'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ОБЕРЕЖНО: ТЕСТУВАННЯ НЕ Є ПОТОКО-БЕЗПЕЧНИМ, ТОЖ НЕ ЗАПУСКАЙТЕ ДЕКІЛЬКА ТЕСТІВ ОДНОЧАСНО.', 'ATTENTION: you cannot edit the running application!': 'УВАГА: Ви не можете редагувати додаток, який зараз виконуєте!', 'Autocomplete Python Code': 'Автозавершення коду на Python', 'Available databases and tables': 'Доступні бази даних та таблиці', 'back': '<< назад', 'bad_resource': 'поганий_ресурс', 'Basics': 'Початок', 'Begin': 'Початок', 'breakpoint': 'точку зупинки', 'Breakpoints': 'Точки зупинок', 'breakpoints': 'точки зупинок', 'Bulk Register': 'Масова реєстрація', 'Bulk Student Registration': 'Масова реєстрація студентів', 'cache': 'кеш', 'Cache Keys': 'Ключі кешу', 'cache, errors and sessions cleaned': 'кеш, список зареєстрованих помилок та сесії очищенні', 'can be a git repo': 'може бути git-репозитарієм', 'Cancel': 'Відмінити', 'Cannot be empty': 'Не може бути порожнім', 'Cannot compile: there are errors in your app:': 'Не вдається скомпілювати: є помилки у вашому додатку:', 'cannot create file': 'не можу створити файл', 'cannot upload file "%(filename)s"': 'не можу завантажити файл "%(filename)s"', 'Change admin password': 'Змінити пароль адміністратора', 'change editor settings': 'змінити налаштування редактора', 'check all': 'відмітити всі', 'Check for upgrades': 'Перевірити оновлення', 'Check to delete': 'Помітити на вилучення', 'Checking for upgrades...': 'Відбувається пошук оновлень...', 'Clean': 'Очистити', 'Clear CACHE?': 'Очистити ВЕСЬ кеш?', 'Clear DISK': 'Очистити ДИСКОВИЙ КЕШ', 'Clear RAM': "Очистити КЕШ В ПАМ'ЯТІ", 'Click row to expand traceback': '"Клацніть" мишкою по рядку, щоб розгорнути стек викликів (traceback)', 'Click row to view a ticket': 'Для перегляду позначки (ticket) "клацніть" мишкою по рядку', 'code': 'код', 'Code listing': 'Лістинг', 'collapse/expand all': 'згорнути/розгорнути все', 'Command': 'Команда', 'Comment:': 'Пояснення:', 'Commit': 'Комміт', 'Commit form': 'Commit form', 'Compile': 'Компілювати', 'compiled application removed': 'скомпільований додаток вилучено', 'Condition': 'Умова', 'contact_admin': "зв'язатись_з_адміністратором", 'continue': 'продовжити', 'Controllers': 'Контролери', 'controllers': 'контролери', 'Count': 'К-сть', 'Create': 'Створити', 'create': 'створити', 'create file with filename:': 'створити файл з назвою:', 'create plural-form': 'створити форму множини', 'Create rules': 'Створити правила', 'Create/Upload': 'Створити/Завантажити', 'created by': 'Автор:', 'Created On': 'Створено в', 'crontab': 'таблиця cron', 'Current request': 'Поточний запит', 'Current response': 'Поточна відповідь', 'Current session': 'Поточна сесія', 'currently running': 'наразі, активний додаток', 'currently saved or': 'останній збережений, або', 'data uploaded': 'дані завантажено', 'database': 'база даних', 'database %s select': 'Вибірка з бази даних %s', 'database administration': 'адміністрування бази даних', 'Date and Time': 'Дата і час', 'db': 'база даних', 'Debug': 'Ладнати (Debug)', 'defines tables': "об'являє таблиці", 'Delete': 'Вилучити', 'delete': 'вилучити', 'delete all checked': 'вилучити всі відмічені', 'delete plugin': 'вилучити втулку', 'Delete this file (you will be asked to confirm deletion)': 'Вилучити цей файл (буде відображено запит на підтвердження операції)', 'Delete:': 'Вилучити:', 'deleted after first hit': 'автоматично вилучається після першого спрацювання', 'Deploy': 'Розгорнути', 'Deploy on Google App Engine': 'Розгорнути на Google App Engine (GAE)', 'Deploy to OpenShift': 'Розгорнути на OpenShift', 'Deployment form': 'Форма розгортання (deployment form)', 'design': 'налаштування', 'Detailed traceback description': 'Детальний опис стеку викликів (traceback)', 'details': 'детальніше', 'direction: ltr': 'напрямок: зліва-направо (ltr)', 'directory not found': 'каталог не знайдено', 'Disable': 'Вимкнути', 'Disabled': 'Вимкнено', 'disabled in demo mode': 'відключено в демо-режимі', 'disabled in multi user mode': 'відключено в багато-користувацькому режимі', 'Disk Cache Keys': 'Ключі дискового кешу', 'Disk Cleared': 'Дисковий кеш очищено', 'docs': 'док.', 'done!': 'зроблено!', 'Downgrade': 'Повернути попередню версію', 'download layouts': 'завантажити макет (layout)', 'download plugins': 'завантажити втулки', 'Edit': 'Редагувати', 'edit all': 'редагувати всі', 'Edit application': 'Налаштування додатку', 'edit controller': 'редагувати контролер', 'edit controller:': 'редагувати контролер:', 'Edit current record': 'Редагувати поточний запис', 'edit views:': 'редагувати відображення (views):', 'Editing %s': 'Редагується %s', 'Editing file "%s"': 'Редагується файл "%s"', 'Editing Language file': 'Редагується файл перекладу', 'Editing Plural Forms File': 'Редагується файл форм множини', 'Enable': 'Увімкнути', 'enter a value': 'введіть значення', 'Error': 'Помилка', 'Error logs for "%(app)s"': 'Список зареєстрованих помилок додатку "%(app)s"', 'Error snapshot': 'Розгорнутий знімок стану (Error snapshot)', 'Error ticket': 'Позначка (ticket) про помилку', 'Errors': 'Помилки', 'Errors in form, please check it out.': 'Помилка у формі, будь-ласка перевірте її.', 'Exception %(extype)s: %(exvalue)s': 'Виключення %(extype)s: %(exvalue)s', 'Exception %s': 'Виключення %s', 'Exception instance attributes': 'Атрибути примірника класу Exception (виключення)', 'Exit Fullscreen': 'Вийти з повноекранного режиму', 'Expand Abbreviation': 'Розгорнути абревіатуру', 'export as csv file': 'експортувати як файл csv', 'exposes': 'обслуговує', 'exposes:': 'обслуговує:', 'extends': 'розширює', 'failed to compile file because:': 'не вдалось скомпілювати файл через:', 'failed to reload module because:': 'не вдалось перевантажити модуль через:', 'faq': 'ЧаПи (faq)', 'File': 'Файл', 'file "%(filename)s" created': 'файл "%(filename)s" створено', 'file "%(filename)s" deleted': 'файл "%(filename)s" вилучено', 'file "%(filename)s" uploaded': 'файл "%(filename)s" завантажено', 'file "%s" of %s restored': 'файл "%s" з %s відновлено', 'file changed on disk': 'файл змінено на диску', 'file does not exist': 'файлу не існує', 'file not found': 'файл не знайдено', 'file saved on %(time)s': 'файл збережено в %(time)s', 'file saved on %s': 'файл збережено в %s', 'Filename': "Ім'я файлу", 'filter': 'фільтр', 'Find Next': 'Шукати наступний', 'Find Previous': 'Шукати попередній', 'Frames': 'Стек викликів', 'Functions with no doctests will result in [passed] tests.': 'Функції, в яких відсутні док-тести відносяться до функцій, які успішно пройшли тести.', 'GAE Email': 'Ел.пошта GAE', 'GAE Output': 'Відповідь GAE', 'GAE Password': 'Пароль GAE', 'Generate': 'Генерувати', 'Get from URL:': 'Отримати з URL:', 'Git Pull': 'Git Pull', 'Git Push': 'Git Push', 'Globals##debug': 'Глобальні змінні', 'Go to Matching Pair': 'Перейти до відповідної пари', 'go!': 'почали!', 'Google App Engine Deployment Interface': 'Інтерфейс розгортання Google App Engine', 'Google Application Id': 'Ідентифікатор Google Application', 'Goto': 'Перейти до', 'graph model': 'графова модель', 'Help': 'Допомога', 'Hide/Show Translated strings': 'Сховати/показати ВЖЕ ПЕРЕКЛАДЕНІ рядки', 'Hits': 'Спрацьовувань', 'Home': 'Домівка', 'honored only if the expression evaluates to true': 'точка зупинки активується тільки за істинності умови', 'If start the downgrade, be patient, it may take a while to rollback': 'Запустивши повернення на попередню версію, будьте терплячими, це може зайняти трохи часу', 'If start the upgrade, be patient, it may take a while to download': 'Запустивши оновлення, будьте терплячими, потрібен час для завантаження необхідних даних', 'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\n\t\tA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'Якщо в наданому вище звіті присутня позначка про помилку (ticket number), то це вказує на збій у виконанні контролера ще до початку запуску док-тестів. Це, зазвичай, сигналізує про помилку вирівнювання тексту програми (indention error) або помилку за межами функції (error outside function code).\n\t\tЗелений заголовок сигналізує, що всі тести (з наявних) пройшли успішно. В цьому випадку результат тестів показано не буде.', 'Import/Export': 'Імпорт/Експорт', 'In development, use the default Rocket webserver that is currently supported by this debugger.': 'Під час розробки , використовуйте вбудований веб-сервер Rocket, він найкраще налаштований на спільну роботу з інтерактивним ладначем.', 'includes': 'включає', 'index': 'індекс', 'insert new': 'вставити новий', 'insert new %s': 'вставити новий %s', 'inspect attributes': 'інспектувати атрибути', 'Install': 'Встановлення', 'Installed applications': 'Встановлені додатки (applications)', 'Interaction at %s line %s': 'Виконується %s рядок %s', 'Interactive console': 'Інтерактивна консоль', 'internal error': 'внутрішня помилка', 'internal error: %s': 'внутрішня помилка: %s', 'Internal State': 'Внутрішній стан', 'Invalid action': 'Помилкова дія', 'invalid circual reference': 'помилкове циклічне посилання', 'invalid circular reference': 'помилкове циклічне посилання', 'invalid password': 'неправильний пароль', 'invalid password.': 'неправильний пароль.', 'Invalid Query': 'Помилковий запит', 'invalid request': 'хибний запит', 'invalid request ': 'Хибний запит', 'invalid table names (auth_* tables already defined)': 'хибна назва таблиці (таблиці auth_* вже оголошено)', 'invalid ticket': 'недійсна позначка про помилку (ticket)', 'Key': 'Ключ', 'Key bindings': 'Клавіатурні скорочення:', 'Key bindings for ZenCoding Plugin': 'Клавіатурні скорочення для втулки ZenCoding plugin', 'kill process': 'вбити процес', 'language file "%(filename)s" created/updated': 'Файл перекладу "%(filename)s" створено/оновлено', 'Language files (static strings) updated': 'Файли перекладів (із статичних рядків в першоджерелах) оновлено', 'languages': 'переклади', 'Languages': 'Переклади', 'Last saved on:': 'Востаннє збережено:', 'License for': 'Ліцензія додатку', 'Line number': '№ рядка', 'LineNo': '№ рядка', 'lists by exception': 'список виключень (exceptions)', 'lists by ticket': 'список позначок (tickets)', 'loading...': 'завантаження...', 'locals': 'локальні', 'Locals##debug': 'Локальні змінні', 'Login': 'Вхід', 'login': 'вхід', 'Login to the Administrative Interface': 'Вхід в адміністративний інтерфейс', 'Logout': 'Вихід', 'Main Menu': 'Основне меню', 'Manage': 'Керувати', 'Manage Admin Users/Students': 'Адміністратор керування користувачами/студентами', 'Manage Students': 'Керувати студентами', 'Match Pair': 'Знайти пару', 'merge': "з'єднати", 'Merge Lines': "З'єднати рядки", 'Minimum length is %s': 'Мінімальна довжина становить %s', 'Models': 'Моделі', 'models': 'моделі', 'Modified On': 'Змінено в', 'Modules': 'Модулі', 'modules': 'модулі', 'Must include at least %s %s': 'Має вміщувати щонайменше %s %s', 'Must include at least %s lower case': 'Повинен включати щонайменше %s малих букв', 'Must include at least %s of the following : %s': 'Має включати не менше %s таких символів : %s', 'Must include at least %s upper case': 'Повинен включати щонайменше %s великих букв', 'new application "%s" created': 'новий додаток "%s" створено', 'New Application Wizard': 'Майстер створення нового додатку', 'New application wizard': 'Майстер створення нового додатку', 'new plugin installed': 'нова втулка (plugin) встановлена', 'New Record': 'Новий запис', 'new record inserted': 'новий рядок додано', 'New simple application': 'Новий простий додаток', 'next': 'наступний', 'next 100 rows': 'наступні 100 рядків', 'Next Edit Point': 'Наступне місце редагування', 'NO': 'НІ', 'No databases in this application': 'Даний додаток не використовує бази даних', 'No Interaction yet': 'Ладнач не активовано', 'no match': 'співпадань нема', 'no package selected': 'пакет не вибрано', 'no permission to uninstall "%s"': 'нема прав на вилучення (uninstall) "%s"', 'No ticket_storage.txt found under /private folder': 'В каталозі /private відсутній файл ticket_storage.txt', 'Not Authorized': 'Не дозволено', 'Note: If you receive an error with github status code of 128, ensure the system and account you are deploying from has a cooresponding ssh key configured in the openshift account.': 'Примітка: Якщо ви отримали повідомлення про помилку з кодом стану github рівним 128, переконайтесь, що система та обліковий запис, з якого відбувається розгортання використовують відповідний ключ ssh, налаштований в обліковому записі openshift.', "On production, you'll have to configure your webserver to use one process and multiple threads to use this debugger.": 'У промисловій експлуатації, ви повинні налаштувати ваш веб-сервер на використання одного процесу та багатьох потоків, якщо бажаєте скористатись цим ладначем.', 'online designer': 'дизайнер БД', 'OpenShift Deployment Interface': 'OpenShift: Інтерфейс розгортання', 'OpenShift Output': 'Вивід OpenShift', 'or alternatively': 'або альтернативно', 'Or Get from URL:': 'Або Отримати з мережі (ч/з URL):', 'or import from csv file': 'або імпортувати через csv-файл', 'Original/Translation': 'Оригінал/переклад', 'Overwrite installed app': 'Перезаписати встановлений додаток', 'Pack all': 'Пакувати все', 'Pack compiled': 'Пакувати зкомпільоване', 'Pack custom': 'Пакувати вибране', 'pack plugin': 'запакувати втулку', 'PAM authenticated user, cannot change password here': 'Ввімкнена система ідентифікації користувачів PAM. Для зміни паролю скористайтесь командами вашої ОС ', 'password changed': 'пароль змінено', 'Path to appcfg.py': 'Шлях до appcfg.py', 'Path to local openshift repo root.': 'Шлях до локального корня репозитарія openshift.', 'peek': 'глянути', 'Peeking at file': 'Перегляд файлу', 'Please': 'Будь-ласка', 'plugin': 'втулка', 'plugin "%(plugin)s" deleted': 'втулку "%(plugin)s" вилучено', 'Plugin "%s" in application': 'Втулка "%s" в додатку', 'plugin not specified': 'втулку не визначено', 'plugins': 'втулки (plugins)', 'Plugins': 'Втулки (Plugins)', 'Plural Form #%s': 'Форма множини #%s', 'Plural-Forms:': 'Форми множини:', 'Powered by': 'Працює на', 'previous 100 rows': 'попередні 100 рядків', 'Previous Edit Point': 'Попереднє місце редагування', 'Private files': 'Приватні файли', 'private files': 'приватні файли', 'Project Progress': 'Поступ проекту', 'Pull': 'Втягнути', 'Push': 'Проштовхнути', 'Query:': 'Запит:', 'RAM Cache Keys': 'Ключ ОЗП-кешу (RAM Cache)', 'Ram Cleared': "Кеш в пам'яті очищено", 'Rapid Search': 'Миттєвий пошук', 'record': 'запис', 'record does not exist': 'запису не існує', 'record id': 'Ід.запису', 'refresh': 'оновіть', 'reload': 'перевантажити', 'Reload routes': 'Перезавантажити маршрути', 'Remove compiled': 'Вилуч.компл', 'Removed Breakpoint on %s at line %s': 'Вилучено точку зупинки у %s в рядку %s', 'Replace': 'Замінити', 'Replace All': 'Замінити все', 'request': 'запит', 'requires python-git, but not installed': 'Для розгортання необхідний пакет python-git, але він не встановлений', 'resolve': "розв'язати", 'Resolve Conflict file': "Файл розв'язування конфлікту", 'response': 'відповідь', 'restart': 'перезапустити майстра', 'restore': 'повернути', 'return': 'повернутись', 'revert': 'відновитись', 'Rows in table': 'Рядків у таблиці', 'Rows selected': 'Рядків вибрано', 'rules are not defined': 'правила не визначені', 'rules parsed with errors': 'в правилах є помилка', 'rules:': 'правила:', 'Run tests': 'Запустити всі тести', 'Run tests in this file': 'Запустити тести у цьому файлі', "Run tests in this file (to run all files, you may also use the button labelled 'test')": "Запустити тести з цього файлу (для тестування всіх файлів, вам необхідно натиснути кнопку з назвою 'тестувати всі')", 'Running on %s': 'Запущено на %s', 'runonce': 'одноразово', 'Save': 'Зберегти', 'Save file:': 'Зберегти файл:', 'Save file: %s': 'Зберегти файл: %s', 'Save via Ajax': 'зберегти через Ajax', 'Saved file hash:': 'Хеш збереженого файлу:', 'search': 'пошук', 'selected': 'відмічено', 'session': 'сесія', 'session expired': 'час даної сесії вичерпано', 'Set Breakpoint on %s at line %s: %s': 'Додано точку зупинки в %s на рядок %s: %s', 'shell': 'консоль', 'signup': 'підписатись', 'signup_requested': 'необхідна_реєстрація', 'Singular Form': 'Форма однини', 'site': 'сайт', 'Site': 'Сайт', 'skip to generate': 'перейти до генерування результату', 'some files could not be removed': 'деякі файли не можна вилучити', 'Sorry, could not find mercurial installed': 'Не вдалось виявити встановлену систему контролю версій Mercurial', 'Start a new app': 'Створюється новий додаток', 'Start searching': 'Розпочати пошук', 'Start wizard': 'Активувати майстра', 'state': 'стан', 'Static': 'Статичні', 'static': 'статичні', 'Static files': 'Статичні файли', 'Step': 'Крок', 'step': 'крок', 'stop': 'зупинити', 'submit': 'застосувати', 'Submit': 'Застосувати', 'successful': 'успішно', 'switch to : db': 'перемкнути на : БД', 'table': 'таблиця', 'tags': 'мітки (tags)', 'Temporary': 'Тимчасово', 'test': 'тестувати всі', 'Testing application': 'Тестування додатку', 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Запит" це умова, на зразок "db.table1.field1==\'значення\'". Вираз "db.table1.field1==db.table2.field2" повертає результат об\'єднання (SQL JOIN) таблиць.', 'The app exists, was created by wizard, continue to overwrite!': 'Додаток вже існує і його було створено майстром. Продовжуємо переписування!', 'The app exists, was NOT created by wizard, continue to overwrite!': 'Додаток вже існує, і його НЕ було створено майстром. Продовжуємо перезаписування!', 'The application logic, each URL path is mapped in one exposed function in the controller': 'Логіка додатку, кожний шлях URL проектується на одну з функцій обслуговування в контролері', 'The data representation, define database tables and sets': 'Представлення даних, опис таблиць БД та наборів', 'The presentations layer, views are also known as templates': 'Презентаційний рівень, відображення, відомі також як шаблони', 'There are no controllers': 'Жодного контролера, наразі, не існує', 'There are no models': 'Моделей, наразі, нема', 'There are no modules': 'Модулів поки що нема', 'There are no plugins': 'Жодної втулки, наразі, не встановлено', 'There are no private files': 'Приватних файлів поки що нема', 'There are no static files': 'Статичних файлів, наразі, нема', 'There are no translators': 'Перекладів нема', 'There are no translators, only default language is supported': 'Перекладів нема, підтримується тільки мова оригіналу', 'There are no views': 'Відображень нема', 'These files are not served, they are only available from within your app': 'Ці файли ніяк не обробляються, вони доступні тільки в межах вашого додатку', 'These files are served without processing, your images go here': 'Ці файли обслуговуються "як є", без обробки, ваші графічні файли та інші супутні файли даних можуть знаходитись тут', "This debugger may not work properly if you don't have a threaded webserver or you're using multiple daemon processes.": 'Цей ладнач може працювати некоректно, якщо ви використовуєте веб-сервер без підтримки потоків або використовуєте декілька сервісних процесів.', 'This is an experimental feature and it needs more testing. If you decide to downgrade you do it at your own risk': 'Це експериментальна властивість, яка вимагає подальшого тестування. Якщо ви вирішили повернутись на попередню версію, ви це робити на ваш власний розсуд.', 'This is an experimental feature and it needs more testing. If you decide to upgrade you do it at your own risk': 'Це експериментальна властивість, яка вимагає подальшого тестування. Якщо ви вирішили розпочати оновлення, ви це робите на ваш власний розсуд', 'This is the %(filename)s template': 'Це шаблон %(filename)s', "This page can commit your changes to an openshift app repo and push them to your cloud instance. This assumes that you've already created the application instance using the web2py skeleton and have that repo somewhere on a filesystem that this web2py instance can access. This functionality requires GitPython installed and on the python path of the runtime that web2py is operating in.": 'На цій сторінці можна закомітити ваші зміни в репозитарій додатків openshift та проштовхнути їх у ваш примірник в хмарі. Це передбачає, що ви вже створили примірник додатку, використовуючи базовий додаток web2py, як скелет, і маєте репозитарій десь на вашій файловій системі, причому екземпляр web2py має до нього доступ. Ця властивість вимагає наявності встановленого модулю GitPython так, щоб web2py міг його викликати.', 'This page can upload your application to the Google App Engine computing cloud. Mind that you must first create indexes locally and this is done by installing the Google appserver and running the app locally with it once, or there will be errors when selecting records. Attention: deployment may take long time, depending on the network speed. Attention: it will overwrite your app.yaml. DO NOT SUBMIT TWICE.': 'На цій сторінці ви можете завантажити свій додаток в сервіс хмарних обчислень Google App Engine. Майте на увазі, що спочатку необхідно локально створити індекси, і це можна зробити встановивши сервер додатків Google appserver та запустивши в ньому додаток один раз, інакше при виборі записів виникатимуть помилки. Увага: розгортання може зайняти тривалий час, в залежності від швидкості мережі. Увага: це призведе до перезапису app.yaml. НЕ ПУБЛІКУЙТЕ ДВІЧІ.', 'this page to see if a breakpoint was hit and debug interaction is required.': 'цю сторінку, щоб побачити, чи була досягнута точка зупинки і процес ладнання розпочато.', 'This will pull changes from the remote repo for application "%s"?': '"Втягнути" (pull) зміни з віддаленого репозитарію для додатку "%s"?', 'This will push changes to the remote repo for application "%s".': 'Проштовхнути (push) зміни у віддалений репозитарій для додатку "%s"?', 'ticket': 'позначка', 'Ticket': 'Позначка (Ticket)', 'Ticket ID': 'Ід.позначки (Ticket ID)', 'Ticket Missing': 'Позначка (ticket) відсутня', 'tickets': 'позначки (tickets)', 'Time in Cache (h:m:s)': 'Час в кеші (г:хв:сек)', 'to previous version.': 'до попередньої версії.', 'To create a plugin, name a file/folder plugin_[name]': 'Для створення втулки, назвіть файл/каталог plugin_[name]', 'To emulate a breakpoint programatically, write:': 'Для встановлення точки зупинки програмним чином напишіть:', 'to use the debugger!': 'щоб активувати ладнач!', 'toggle breakpoint': '+/- точку зупинки', 'Toggle Fullscreen': 'Перемкнути на весь екран', 'Traceback': 'Стек викликів (Traceback)', 'Translation strings for the application': 'Пари рядків <оригінал>:<переклад> для вибраної мови', 'try something like': 'спробуйте щось схоже на', 'Try the mobile interface': 'Спробуйте мобільний інтерфейс', 'try view': 'дивитись результат', 'Type PDB debugger command in here and hit Return (Enter) to execute it.': 'наберіть тут будь-які команди ладнача PDB і натисніть клавішу [Return] ([Enter]), щоб запустити їх на виконання.', 'Type python statement in here and hit Return (Enter) to execute it.': 'Наберіть тут будь-які вирази Python і натисніть клавішу [Return] ([Enter]), щоб запустити їх на виконання.', 'Unable to check for upgrades': 'Неможливо перевірити оновлення', 'unable to create application "%s"': 'не можу створити додаток "%s"', 'unable to create application "%s" (it may exist already)': 'не можу створити додаток "%s" (можливо його вже створено)', 'unable to delete file "%(filename)s"': 'не можу вилучити файл "%(filename)s"', 'unable to delete file plugin "%(plugin)s"': 'не можу вилучити файл втулки "%(plugin)s"', 'Unable to determine the line number!': 'Не можу визначити номер рядка!', 'Unable to download app because:': 'Не можу завантажити додаток через:', 'Unable to download because:': 'Неможливо завантажити через:', 'unable to download layout': 'не вдається завантажити макет', 'unable to download plugin: %s': 'не вдається завантажити втулку: %s', 'unable to install application "%(appname)s"': 'не вдається встановити додаток "%(appname)s"', 'unable to parse csv file': 'не вдається розібрати csv-файл', 'unable to uninstall "%s"': 'не вдається вилучити "%s"', 'unable to upgrade because "%s"': 'не вдається оновити, тому що "%s"', 'unauthorized': 'неавторизовано', 'uncheck all': 'зняти відмітку з усіх', 'uninstall': 'вилучити', 'Uninstall': 'Вилучити', 'Unsupported webserver working mode: %s': 'Веб-сервер знаходиться в режимі, який не підтримується: %s', 'update': 'оновити', 'update all languages': 'оновити всі переклади', 'Update:': 'Поновити:', 'Upgrade': 'Оновити', 'upgrade now': 'оновитись зараз', 'upgrade now to %s': 'оновити зараз до %s', 'upgrade_web2py': 'оновити web2py', 'upload': 'завантажити', 'Upload': 'Завантажити', 'Upload a package:': 'Завантажити пакет:', 'Upload and install packed application': 'Завантажити та встановити запакований додаток', 'upload file:': 'завантажити файл:', 'upload plugin file:': 'завантажити файл втулки:', 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Для створення складних запитів використовуйте (...)&(...) замість AND, (...)|(...) замість OR, та ~(...) замість NOT.', 'user': 'користувач', 'Using the shell may lock the database to other users of this app.': 'Використання оболонки може заблокувати базу даних від сумісного використання іншими користувачами цього додатку.', 'value not allowed': 'недопустиме значення', 'variables': 'змінні', 'Version': 'Версія', 'Version %s.%s.%s (%s) %s': 'Версія %s.%s.%s (%s) %s', 'Versioning': 'Контроль версій', 'view': 'перегляд', 'Views': 'Відображення (Views)', 'views': 'відображення', 'WARNING:': 'ПОПЕРЕДЖЕННЯ:', 'Web Framework': 'Веб-каркас (Web Framework)', 'web2py apps to deploy': 'Готові до розгортання додатки web2py', 'web2py Debugger': 'Ладнач web2py', 'web2py downgrade': 'повернення на попередню версію web2py', 'web2py is up to date': 'web2py оновлено до актуальної версії', 'web2py online debugger': 'оперативний ладнач (online debugger) web2py', 'web2py Recent Tweets': 'Останні твіти web2py', 'web2py upgrade': 'оновлення web2py', 'web2py upgraded; please restart it': 'web2py оновлено; будь-ласка перезапустіть його', 'Wrap with Abbreviation': 'Загорнути з абревіатурою', 'WSGI reference name': "ім'я посилання WSGI", 'YES': 'ТАК', 'You can also set and remove breakpoint in the edit window, using the Toggle Breakpoint button': 'Ви також можете встановлювати/вилучати точки зупинок під час редагування першоджерел (sources), використовуючи кнопку "+/- точку зупинки"', 'You have one more login attempt before you are locked out': 'У вас є ще одна спроба перед тим, як вхід буде заблоковано', 'you must specify a name for the uploaded application': "ви повинні вказати ім'я додатка, перед ти, як завантажити його", 'You need to set up and reach a': 'Треба встановити та досягнути', 'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Ваш додаток буде заблоковано, поки ви не клацнете по одній з кнопок керування ("наступний", "крок", "продовжити", та ін.)', 'Your can inspect variables using the console bellow': 'Ви можете досліджувати змінні, використовуючи інтерактивну консоль', }
mit
CUCWD/edx-platform
common/djangoapps/microsite_configuration/tests/backends/test_database.py
18
7307
""" Test Microsite database backends. """ import logging from mock import patch from django.conf import settings from microsite_configuration.backends.base import ( BaseMicrositeBackend, BaseMicrositeTemplateBackend, ) from microsite_configuration import microsite from microsite_configuration.models import ( Microsite, MicrositeHistory, MicrositeTemplate, ) from microsite_configuration.tests.tests import ( DatabaseMicrositeTestCase, ) from microsite_configuration.tests.factories import ( SiteFactory, MicrositeFactory, MicrositeTemplateFactory, ) log = logging.getLogger(__name__) @patch( 'microsite_configuration.microsite.BACKEND', microsite.get_backend( 'microsite_configuration.backends.database.DatabaseMicrositeBackend', BaseMicrositeBackend ) ) class DatabaseMicrositeBackendTests(DatabaseMicrositeTestCase): """ Go through and test the DatabaseMicrositeBackend class """ def setUp(self): super(DatabaseMicrositeBackendTests, self).setUp() self.addCleanup(microsite.clear) def test_get_value(self): """ Tests microsite.get_value works as expected. """ microsite.set_by_domain(self.microsite.site.domain) self.assertEqual(microsite.get_value('email_from_address'), self.microsite.values['email_from_address']) def test_is_request_in_microsite(self): """ Tests microsite.is_request_in_microsite works as expected. """ microsite.set_by_domain(self.microsite.site.domain) self.assertTrue(microsite.is_request_in_microsite()) def test_get_dict(self): """ Tests microsite.get_dict works as expected. """ microsite.set_by_domain(self.microsite.site.domain) self.assertEqual(microsite.get_dict('nested_dict'), self.microsite.values['nested_dict']) def test_has_override_value(self): """ Tests microsite.has_override_value works as expected. """ microsite.set_by_domain(self.microsite.site.domain) self.assertTrue(microsite.has_override_value('platform_name')) def test_get_value_for_org(self): """ Tests microsite.get_value_for_org works as expected. """ microsite.set_by_domain(self.microsite.site.domain) self.assertEqual( microsite.get_value_for_org(self.microsite.get_organizations()[0], 'platform_name'), self.microsite.values['platform_name'] ) def test_get_all_orgs(self): """ Tests microsite.get_all_orgs works as expected. """ microsite.set_by_domain(self.microsite.site.domain) self.assertEqual( microsite.get_all_orgs(), set(self.microsite.get_organizations()) ) def test_clear(self): """ Tests microsite.clear works as expected. """ microsite.set_by_domain(self.microsite.site.domain) self.assertEqual( microsite.get_value('platform_name'), self.microsite.values['platform_name'] ) microsite.clear() self.assertIsNone(microsite.get_value('platform_name')) @patch('edxmako.paths.add_lookup') def test_enable_microsites(self, add_lookup): """ Tests microsite.enable_microsites works as expected. """ # remove microsite root directory paths first settings.STATICFILES_DIRS = [ path for path in settings.STATICFILES_DIRS if path != settings.MICROSITE_ROOT_DIR ] with patch.dict('django.conf.settings.FEATURES', {'USE_MICROSITES': False}): microsite.enable_microsites(log) self.assertNotIn(settings.MICROSITE_ROOT_DIR, settings.STATICFILES_DIRS) add_lookup.assert_not_called() with patch.dict('django.conf.settings.FEATURES', {'USE_MICROSITES': True}): microsite.enable_microsites(log) self.assertIn(settings.MICROSITE_ROOT_DIR, settings.STATICFILES_DIRS) def test_get_all_configs(self): """ Tests microsite.get_all_config works as expected. """ microsite.set_by_domain(self.microsite.site.domain) configs = microsite.get_all_config() self.assertEqual(len(configs.keys()), 1) self.assertEqual(configs[self.microsite.key], self.microsite.values) def test_set_config_by_domain(self): """ Tests microsite.set_config_by_domain works as expected. """ microsite.clear() # if microsite config does not exist microsite.set_by_domain('unknown') self.assertIsNone(microsite.get_value('platform_name')) # if no microsite exists Microsite.objects.all().delete() microsite.clear() microsite.set_by_domain('unknown') self.assertIsNone(microsite.get_value('platform_name')) # if microsite site has no organization it should raise exception new_microsite = MicrositeFactory.create(key="test_microsite2") new_microsite.site = SiteFactory.create(domain='test.microsite2.com') # This would update microsite so we test MicrositeHistory has old microsite new_microsite.save() self.assertEqual(MicrositeHistory.objects.all().count(), 2) with self.assertRaises(Exception): microsite.set_by_domain('test.microsite2.com') def test_has_configuration_set(self): """ Tests microsite.has_configuration_set works as expected on this backend. """ self.assertTrue(microsite.BACKEND.has_configuration_set()) Microsite.objects.all().delete() self.assertFalse(microsite.BACKEND.has_configuration_set()) @patch( 'microsite_configuration.microsite.TEMPLATES_BACKEND', microsite.get_backend( 'microsite_configuration.backends.database.DatabaseMicrositeTemplateBackend', BaseMicrositeTemplateBackend ) ) class DatabaseMicrositeTemplateBackendTests(DatabaseMicrositeTestCase): """ Go through and test the DatabaseMicrositeTemplateBackend class """ def setUp(self): super(DatabaseMicrositeTemplateBackendTests, self).setUp() MicrositeTemplateFactory.create( microsite=self.microsite, template_uri='about.html', template=""" <html> <body> About this microsite. </body> </html> """, ) def tearDown(self): super(DatabaseMicrositeTemplateBackendTests, self).tearDown() microsite.clear() def test_microsite_get_template_when_no_template_exists(self): """ Test microsite.get_template return None if there is not template in DB. """ MicrositeTemplate.objects.all().delete() microsite.set_by_domain(self.microsite.site.domain) template = microsite.get_template('about.html') self.assertIsNone(template) def test_microsite_get_template(self): """ Test microsite.get_template return appropriate template. """ microsite.set_by_domain(self.microsite.site.domain) template = microsite.get_template('about.html') self.assertIn('About this microsite', template.render())
agpl-3.0
HutchinHuang/New_Intern_Reminder
change_input.py
1
2026
# -*- coding:utf-8 -*- # A small script to get the city-code in shixiseng.com and use shelve module to save all the parameters' change rules. import re import requests import shelve def get_city_code(): """ This Function is used to get the names and codes of the cities supported by shixiseng.com. :return: A dictionary with all city names as its key and corresponding city codes as its value. """ html = requests.get("http://www.shixiseng.com/interns/").text name_code_tuple = re.findall(r'<dd class="city_btn" data-val="(.+?)" *?>(.+?)</dd>', html) name_code_dict = {name_code_tuple[i][1]: name_code_tuple[i][0] for i in range(len(name_code_tuple))} name_code_dict["全国"] = None name_code_dict[""] = None # If the input is "", It should be None. return name_code_dict # Then we use three other dicts to set the rules for changing parameters. salary_dict = { "": None, "0": None, "1": "0,50", "2": "50,100", "3": "100,150", "4": "150,200", "5": "200,300", "6": "300,0" } degree_dict = { "": None, "0": None, "1": "dazhuan", "2": "benke", "3": "shuoshi", "4": "boshi" } remain_dict = { "": None, "0": None, "1": "entry", "2": "noentry", "3": "notsure" } day_dict = { "0": None, "1": "1", "2": "2", "3": "3", "4": "4", "5": "5" } month_dict = { "0": None, "1": "1", "2": "2", "3": "3", "4": "4", "5": "5", "6": "6", "7": "8", "8": "10", "9": "12" } frequency_dict = { "0": 1, "1": 3, "2": 7 } if __name__ == "__main__": city_dict = get_city_code() for key in city_dict: print("'" + key + "'", end=", ") with shelve.open("shelve/para_change_dict") as slv: slv["city"] = city_dict slv["salary"] = salary_dict slv["degree"] = degree_dict slv["remain"] = remain_dict slv["day"] = day_dict slv["month"] = month_dict slv["frequency"] = frequency_dict
mit
GeraldLoeffler/nupic
nupic/encoders/__init__.py
40
1580
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2013, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- from scalar import ScalarEncoder from adaptivescalar import AdaptiveScalarEncoder from date import DateEncoder from logenc import LogEncoder from category import CategoryEncoder from sdrcategory import SDRCategoryEncoder from delta import DeltaEncoder from scalarspace import ScalarSpaceEncoder from coordinate import CoordinateEncoder from geospatial_coordinate import GeospatialCoordinateEncoder from nupic.encoders.pass_through_encoder import PassThroughEncoder # multiencoder must be imported last because it imports * from this module! from multi import MultiEncoder from utils import bitsToString
agpl-3.0
JioCloud/neutron
neutron/tests/api/test_subnetpools.py
10
12170
# Copyright 2015 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from tempest_lib.common.utils import data_utils from neutron.tests.api import base from neutron.tests.api import clients from neutron.tests.tempest import config from neutron.tests.tempest import test CONF = config.CONF SUBNETPOOL_NAME = 'smoke-subnetpool' SUBNET_NAME = 'smoke-subnet' class SubnetPoolsTest(base.BaseNetworkTest): min_prefixlen = '28' max_prefixlen = '31' ip_version = 4 subnet_cidr = u'10.11.12.0/31' new_prefix = u'10.11.15.0/24' larger_prefix = u'10.11.0.0/16' """ Tests the following operations in the Neutron API using the REST client for Neutron: create a subnetpool for a tenant list tenant's subnetpools show a tenant subnetpool details subnetpool update delete a subnetpool All subnetpool tests are run once with ipv4 and once with ipv6. v2.0 of the Neutron API is assumed. """ @classmethod def resource_setup(cls): super(SubnetPoolsTest, cls).resource_setup() prefixes = [u'10.11.12.0/24'] cls._subnetpool_data = {'subnetpool': {'min_prefixlen': '29', 'prefixes': prefixes}} try: creds = cls.isolated_creds.get_admin_creds() cls.os_adm = clients.Manager(credentials=creds) except NotImplementedError: msg = ("Missing Administrative Network API credentials " "in configuration.") raise cls.skipException(msg) cls.admin_client = cls.os_adm.network_client def _create_subnetpool(self, client, pool_values=None): name = data_utils.rand_name(SUBNETPOOL_NAME) subnetpool_data = copy.deepcopy(self._subnetpool_data) if pool_values: subnetpool_data['subnetpool'].update(pool_values) subnetpool_data['subnetpool']['name'] = name body = client.create_subnetpool(subnetpool_data) created_subnetpool = body['subnetpool'] subnetpool_id = created_subnetpool['id'] return name, subnetpool_id def _new_subnetpool_attributes(self): new_name = data_utils.rand_name(SUBNETPOOL_NAME) subnetpool_data = {'subnetpool': {'name': new_name, 'min_prefixlen': self.min_prefixlen, 'max_prefixlen': self.max_prefixlen}} return subnetpool_data def _check_equality_updated_subnetpool(self, expected_values, updated_pool): self.assertEqual(expected_values['name'], updated_pool['name']) self.assertEqual(expected_values['min_prefixlen'], updated_pool['min_prefixlen']) self.assertEqual(expected_values['max_prefixlen'], updated_pool['max_prefixlen']) # expected_values may not contains all subnetpool values if 'prefixes' in expected_values: self.assertEqual(expected_values['prefixes'], updated_pool['prefixes']) @test.attr(type='smoke') @test.idempotent_id('6e1781ec-b45b-4042-aebe-f485c022996e') def test_create_list_subnetpool(self): name, pool_id = self._create_subnetpool(self.client) body = self.client.list_subnetpools() subnetpools = body['subnetpools'] self.addCleanup(self.client.delete_subnetpool, pool_id) self.assertIn(pool_id, [sp['id'] for sp in subnetpools], "Created subnetpool id should be in the list") self.assertIn(name, [sp['name'] for sp in subnetpools], "Created subnetpool name should be in the list") @test.attr(type='smoke') @test.idempotent_id('741d08c2-1e3f-42be-99c7-0ea93c5b728c') def test_get_subnetpool(self): name, pool_id = self._create_subnetpool(self.client) self.addCleanup(self.client.delete_subnetpool, pool_id) prefixlen = self._subnetpool_data['subnetpool']['min_prefixlen'] body = self.client.get_subnetpool(pool_id) subnetpool = body['subnetpool'] self.assertEqual(name, subnetpool['name']) self.assertEqual(pool_id, subnetpool['id']) self.assertEqual(prefixlen, subnetpool['min_prefixlen']) self.assertEqual(prefixlen, subnetpool['default_prefixlen']) self.assertFalse(subnetpool['shared']) @test.attr(type='smoke') @test.idempotent_id('764f1b93-1c4a-4513-9e7b-6c2fc5e9270c') def test_tenant_update_subnetpool(self): name, pool_id = self._create_subnetpool(self.client) subnetpool_data = self._new_subnetpool_attributes() self.client.update_subnetpool(pool_id, subnetpool_data) body = self.client.get_subnetpool(pool_id) subnetpool = body['subnetpool'] self.addCleanup(self.client.delete_subnetpool, pool_id) self._check_equality_updated_subnetpool(subnetpool_data['subnetpool'], subnetpool) self.assertFalse(subnetpool['shared']) @test.attr(type='smoke') @test.idempotent_id('4b496082-c992-4319-90be-d4a7ce646290') def test_update_subnetpool_prefixes_append(self): # We can append new prefixes to subnetpool name, pool_id = self._create_subnetpool(self.client) old_prefixes = self._subnetpool_data['subnetpool']['prefixes'] new_prefixes = old_prefixes[:] new_prefixes.append(self.new_prefix) subnetpool_data = {'subnetpool': {'prefixes': new_prefixes}} self.addCleanup(self.client.delete_subnetpool, pool_id) self.client.update_subnetpool(pool_id, subnetpool_data) body = self.client.get_subnetpool(pool_id) prefixes = body['subnetpool']['prefixes'] self.assertIn(self.new_prefix, prefixes) self.assertIn(old_prefixes[0], prefixes) @test.attr(type='smoke') @test.idempotent_id('2cae5d6a-9d32-42d8-8067-f13970ae13bb') def test_update_subnetpool_prefixes_extend(self): # We can extend current subnetpool prefixes name, pool_id = self._create_subnetpool(self.client) old_prefixes = self._subnetpool_data['subnetpool']['prefixes'] subnetpool_data = {'subnetpool': {'prefixes': [self.larger_prefix]}} self.addCleanup(self.client.delete_subnetpool, pool_id) self.client.update_subnetpool(pool_id, subnetpool_data) body = self.client.get_subnetpool(pool_id) prefixes = body['subnetpool']['prefixes'] self.assertIn(self.larger_prefix, prefixes) self.assertNotIn(old_prefixes[0], prefixes) @test.attr(type='smoke') @test.idempotent_id('d70c6c35-913b-4f24-909f-14cd0d29b2d2') def test_admin_create_shared_subnetpool(self): pool_values = {'shared': 'True'} name, pool_id = self._create_subnetpool(self.admin_client, pool_values) # Shared subnetpool can be retrieved by tenant user. body = self.client.get_subnetpool(pool_id) self.addCleanup(self.admin_client.delete_subnetpool, pool_id) subnetpool = body['subnetpool'] self.assertEqual(name, subnetpool['name']) self.assertTrue(subnetpool['shared']) def _create_subnet_from_pool(self, subnet_values=None, pool_values=None): pool_name, pool_id = self._create_subnetpool(self.client, pool_values) subnet_name = data_utils.rand_name(SUBNETPOOL_NAME) network = self.create_network() network_id = network['id'] kwargs = {'name': subnet_name, 'subnetpool_id': pool_id} if subnet_values: kwargs.update(subnet_values) body = self.client.create_subnet( network_id=network_id, ip_version=self.ip_version, **kwargs) subnet = body['subnet'] self.addCleanup(self.client.delete_subnetpool, pool_id) self.addCleanup(self.client.delete_network, network_id) return pool_id, subnet @test.attr(type='smoke') @test.idempotent_id('1362ed7d-3089-42eb-b3a5-d6cb8398ee77') def test_create_subnet_from_pool_with_prefixlen(self): subnet_values = {"prefixlen": self.max_prefixlen} pool_id, subnet = self._create_subnet_from_pool(subnet_values) cidr = str(subnet['cidr']) self.assertEqual(pool_id, subnet['subnetpool_id']) self.assertTrue(cidr.endswith(str(self.max_prefixlen))) @test.attr(type='smoke') @test.idempotent_id('86b86189-9789-4582-9c3b-7e2bfe5735ee') def test_create_subnet_from_pool_with_subnet_cidr(self): subnet_values = {"cidr": self.subnet_cidr} pool_id, subnet = self._create_subnet_from_pool(subnet_values) cidr = str(subnet['cidr']) self.assertEqual(pool_id, subnet['subnetpool_id']) self.assertEqual(cidr, self.subnet_cidr) @test.attr(type='smoke') @test.idempotent_id('83f76e3a-9c40-40c2-a015-b7c5242178d8') def test_create_subnet_from_pool_with_default_prefixlen(self): # If neither cidr nor prefixlen is specified, # subnet will use subnetpool default_prefixlen for cidr. pool_id, subnet = self._create_subnet_from_pool() cidr = str(subnet['cidr']) self.assertEqual(pool_id, subnet['subnetpool_id']) prefixlen = self._subnetpool_data['subnetpool']['min_prefixlen'] self.assertTrue(cidr.endswith(str(prefixlen))) @test.attr(type='smoke') @test.idempotent_id('a64af292-ec52-4bde-b654-a6984acaf477') def test_create_subnet_from_pool_with_quota(self): pool_values = {'default_quota': 4} subnet_values = {"prefixlen": self.max_prefixlen} pool_id, subnet = self._create_subnet_from_pool(subnet_values, pool_values) cidr = str(subnet['cidr']) self.assertEqual(pool_id, subnet['subnetpool_id']) self.assertTrue(cidr.endswith(str(self.max_prefixlen))) class SubnetPoolsTestV6(SubnetPoolsTest): min_prefixlen = '48' max_prefixlen = '64' ip_version = 6 subnet_cidr = '2001:db8:3::/64' new_prefix = u'2001:db8:5::/64' larger_prefix = u'2001:db8::/32' @classmethod def resource_setup(cls): super(SubnetPoolsTestV6, cls).resource_setup() min_prefixlen = '64' prefixes = [u'2001:db8:3::/48'] cls._subnetpool_data = {'subnetpool': {'min_prefixlen': min_prefixlen, 'prefixes': prefixes}} @test.attr(type='smoke') @test.idempotent_id('f62d73dc-cf6f-4879-b94b-dab53982bf3b') def test_create_dual_stack_subnets_from_subnetpools(self): pool_id_v6, subnet_v6 = self._create_subnet_from_pool() self.addCleanup(self.client.delete_subnet, subnet_v6['id']) pool_values_v4 = {'prefixes': ['192.168.0.0/16'], 'min_prefixlen': 21, 'max_prefixlen': 32} pool_name_v4, pool_id_v4 = self._create_subnetpool(self.client, pool_values=pool_values_v4) subnet_v4 = self.client.create_subnet( network_id=subnet_v6['network_id'], ip_version=4, subnetpool_id=pool_id_v4)['subnet'] self.addCleanup(self.client.delete_subnet, subnet_v4['id']) self.assertEqual(subnet_v4['network_id'], subnet_v6['network_id'])
apache-2.0
hdznrrd/siid
apps/powermeter.py
1
1250
#!/usr/bin/python # # get the latest power consumption values from redis in JSON format # pass parameter ?n=<number> to control the number of items returned # # {"Total": [3734, ...], "Minutes ago": [0, ...], "L1.Power": [1612, ...], "L3.Power": [1363, ...], "L2.Power": [759, ...]} # import redis import json import cgi getp = cgi.FieldStorage(); baseKey = "sensordata.shackspace.20745965.data." sensors = ["L1.Power","L2.Power","L3.Power"] numValues = 500 if "n" in getp: numValues = int(getp["n"].value) rc = redis.Redis("glados.shack") # ensure all queries are atomic so the time values are sync pipe = rc.pipeline(transaction=True) for sensor in sensors: # get the numValues latest values pipe.lrange(baseKey+sensor, -numValues, -1) res = pipe.execute() output = {} # convert time on the fly latest = json.loads(res[0][-1])[0] output["Minutes ago"] = map(lambda kv: (json.loads(kv)[0]-latest)/60000.0 , res[0]) for idx, sensordata in enumerate(res): output[sensors[idx]] = map(lambda kv: json.loads(kv)[1] , sensordata) from operator import add output["Total"] = map( add, map( add, output[sensors[0]], output[sensors[1]]), output[sensors[2]] ) # off we go print "Content-Type: application/json" print print json.dumps(output)
mit
Midrya/chromium
testing_support/trial_dir.py
47
2749
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import atexit import logging import os import sys import tempfile from testing_support import auto_stub import gclient_utils class TrialDir(object): """Manages a temporary directory. On first object creation, TrialDir.TRIAL_ROOT will be set to a new temporary directory created in /tmp or the equivalent. It will be deleted on process exit unless TrialDir.SHOULD_LEAK is set to True. """ # When SHOULD_LEAK is set to True, temporary directories created while the # tests are running aren't deleted at the end of the tests. Expect failures # when running more than one test due to inter-test side-effects. Helps with # debugging. SHOULD_LEAK = False # Main root directory. TRIAL_ROOT = None def __init__(self, subdir, leak=False): self.leak = self.SHOULD_LEAK or leak self.subdir = subdir self.root_dir = None def set_up(self): """All late initialization comes here.""" # You can override self.TRIAL_ROOT. if not self.TRIAL_ROOT: # Was not yet initialized. TrialDir.TRIAL_ROOT = os.path.realpath(tempfile.mkdtemp(prefix='trial')) atexit.register(self._clean) self.root_dir = os.path.join(TrialDir.TRIAL_ROOT, self.subdir) gclient_utils.rmtree(self.root_dir) os.makedirs(self.root_dir) def tear_down(self): """Cleans the trial subdirectory for this instance.""" if not self.leak: logging.debug('Removing %s' % self.root_dir) gclient_utils.rmtree(self.root_dir) else: logging.error('Leaking %s' % self.root_dir) self.root_dir = None @staticmethod def _clean(): """Cleans the root trial directory.""" if not TrialDir.SHOULD_LEAK: logging.debug('Removing %s' % TrialDir.TRIAL_ROOT) gclient_utils.rmtree(TrialDir.TRIAL_ROOT) else: logging.error('Leaking %s' % TrialDir.TRIAL_ROOT) class TrialDirMixIn(object): def setUp(self): # Create a specific directory just for the test. self.trial = TrialDir(self.id()) self.trial.set_up() def tearDown(self): self.trial.tear_down() @property def root_dir(self): return self.trial.root_dir class TestCase(auto_stub.TestCase, TrialDirMixIn): """Base unittest class that cleans off a trial directory in tearDown().""" def setUp(self): auto_stub.TestCase.setUp(self) TrialDirMixIn.setUp(self) def tearDown(self): TrialDirMixIn.tearDown(self) auto_stub.TestCase.tearDown(self) if '-l' in sys.argv: # See SHOULD_LEAK definition in TrialDir for its purpose. TrialDir.SHOULD_LEAK = True print 'Leaking!' sys.argv.remove('-l')
bsd-3-clause
mKeRix/home-assistant
homeassistant/components/xiaomi_miio/sensor.py
4
7833
"""Support for Xiaomi Mi Air Quality Monitor (PM2.5).""" from dataclasses import dataclass import logging from miio import AirQualityMonitor, DeviceException # pylint: disable=import-error from miio.gateway import DeviceType import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_HOST, CONF_NAME, CONF_TOKEN, DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_PRESSURE, DEVICE_CLASS_TEMPERATURE, PRESSURE_HPA, TEMP_CELSIUS, UNIT_PERCENTAGE, ) from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity from .config_flow import CONF_FLOW_TYPE, CONF_GATEWAY from .const import DOMAIN from .gateway import XiaomiGatewayDevice _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "Xiaomi Miio Sensor" DATA_KEY = "sensor.xiaomi_miio" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_TOKEN): vol.All(cv.string, vol.Length(min=32, max=32)), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) ATTR_POWER = "power" ATTR_CHARGING = "charging" ATTR_BATTERY_LEVEL = "battery_level" ATTR_DISPLAY_CLOCK = "display_clock" ATTR_NIGHT_MODE = "night_mode" ATTR_NIGHT_TIME_BEGIN = "night_time_begin" ATTR_NIGHT_TIME_END = "night_time_end" ATTR_SENSOR_STATE = "sensor_state" ATTR_MODEL = "model" SUCCESS = ["ok"] @dataclass class SensorType: """Class that holds device specific info for a xiaomi aqara sensor.""" unit: str = None icon: str = None device_class: str = None GATEWAY_SENSOR_TYPES = { "temperature": SensorType( unit=TEMP_CELSIUS, icon=None, device_class=DEVICE_CLASS_TEMPERATURE ), "humidity": SensorType( unit=UNIT_PERCENTAGE, icon=None, device_class=DEVICE_CLASS_HUMIDITY ), "pressure": SensorType( unit=PRESSURE_HPA, icon=None, device_class=DEVICE_CLASS_PRESSURE ), } async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Xiaomi sensor from a config entry.""" entities = [] # Gateway sub devices if config_entry.data[CONF_FLOW_TYPE] == CONF_GATEWAY: gateway = hass.data[DOMAIN][config_entry.entry_id] sub_devices = gateway.devices for sub_device in sub_devices.values(): sensor_variables = None if sub_device.type == DeviceType.SensorHT: sensor_variables = ["temperature", "humidity"] if sub_device.type == DeviceType.AqaraHT: sensor_variables = ["temperature", "humidity", "pressure"] if sensor_variables is not None: entities.extend( [ XiaomiGatewaySensor(sub_device, config_entry, variable) for variable in sensor_variables ] ) async_add_entities(entities, update_before_add=True) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the sensor from config.""" if DATA_KEY not in hass.data: hass.data[DATA_KEY] = {} host = config[CONF_HOST] token = config[CONF_TOKEN] name = config[CONF_NAME] _LOGGER.info("Initializing with host %s (token %s...)", host, token[:5]) try: air_quality_monitor = AirQualityMonitor(host, token) device_info = await hass.async_add_executor_job(air_quality_monitor.info) model = device_info.model unique_id = f"{model}-{device_info.mac_address}" _LOGGER.info( "%s %s %s detected", model, device_info.firmware_version, device_info.hardware_version, ) device = XiaomiAirQualityMonitor(name, air_quality_monitor, model, unique_id) except DeviceException: raise PlatformNotReady hass.data[DATA_KEY][host] = device async_add_entities([device], update_before_add=True) class XiaomiAirQualityMonitor(Entity): """Representation of a Xiaomi Air Quality Monitor.""" def __init__(self, name, device, model, unique_id): """Initialize the entity.""" self._name = name self._device = device self._model = model self._unique_id = unique_id self._icon = "mdi:cloud" self._unit_of_measurement = "AQI" self._available = None self._state = None self._state_attrs = { ATTR_POWER: None, ATTR_BATTERY_LEVEL: None, ATTR_CHARGING: None, ATTR_DISPLAY_CLOCK: None, ATTR_NIGHT_MODE: None, ATTR_NIGHT_TIME_BEGIN: None, ATTR_NIGHT_TIME_END: None, ATTR_SENSOR_STATE: None, ATTR_MODEL: self._model, } @property def should_poll(self): """Poll the miio device.""" return True @property def unique_id(self): """Return an unique ID.""" return self._unique_id @property def name(self): """Return the name of this entity, if any.""" return self._name @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def icon(self): """Return the icon to use for device if any.""" return self._icon @property def available(self): """Return true when state is known.""" return self._available @property def state(self): """Return the state of the device.""" return self._state @property def device_state_attributes(self): """Return the state attributes of the device.""" return self._state_attrs async def async_update(self): """Fetch state from the miio device.""" try: state = await self.hass.async_add_executor_job(self._device.status) _LOGGER.debug("Got new state: %s", state) self._available = True self._state = state.aqi self._state_attrs.update( { ATTR_POWER: state.power, ATTR_CHARGING: state.usb_power, ATTR_BATTERY_LEVEL: state.battery, ATTR_DISPLAY_CLOCK: state.display_clock, ATTR_NIGHT_MODE: state.night_mode, ATTR_NIGHT_TIME_BEGIN: state.night_time_begin, ATTR_NIGHT_TIME_END: state.night_time_end, ATTR_SENSOR_STATE: state.sensor_state, } ) except DeviceException as ex: self._available = False _LOGGER.error("Got exception while fetching the state: %s", ex) class XiaomiGatewaySensor(XiaomiGatewayDevice): """Representation of a XiaomiGatewaySensor.""" def __init__(self, sub_device, entry, data_key): """Initialize the XiaomiSensor.""" super().__init__(sub_device, entry) self._data_key = data_key self._unique_id = f"{sub_device.sid}-{data_key}" self._name = f"{data_key} ({sub_device.sid})".capitalize() @property def icon(self): """Return the icon to use in the frontend.""" return GATEWAY_SENSOR_TYPES[self._data_key].icon @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return GATEWAY_SENSOR_TYPES[self._data_key].unit @property def device_class(self): """Return the device class of this entity.""" return GATEWAY_SENSOR_TYPES[self._data_key].device_class @property def state(self): """Return the state of the sensor.""" return self._sub_device.status[self._data_key]
mit
mwv/scikit-learn
sklearn/lda.py
72
17751
""" Linear Discriminant Analysis (LDA) """ # Authors: Clemens Brunner # Martin Billinger # Matthieu Perrot # Mathieu Blondel # License: BSD 3-Clause from __future__ import print_function import warnings import numpy as np from scipy import linalg from .externals.six import string_types from .base import BaseEstimator, TransformerMixin from .linear_model.base import LinearClassifierMixin from .covariance import ledoit_wolf, empirical_covariance, shrunk_covariance from .utils.multiclass import unique_labels from .utils import check_array, check_X_y from .utils.validation import check_is_fitted from .utils.fixes import bincount from .preprocessing import StandardScaler __all__ = ['LDA'] def _cov(X, shrinkage=None): """Estimate covariance matrix (using optional shrinkage). Parameters ---------- X : array-like, shape (n_samples, n_features) Input data. shrinkage : string or float, optional Shrinkage parameter, possible values: - None or 'empirical': no shrinkage (default). - 'auto': automatic shrinkage using the Ledoit-Wolf lemma. - float between 0 and 1: fixed shrinkage parameter. Returns ------- s : array, shape (n_features, n_features) Estimated covariance matrix. """ shrinkage = "empirical" if shrinkage is None else shrinkage if isinstance(shrinkage, string_types): if shrinkage == 'auto': sc = StandardScaler() # standardize features X = sc.fit_transform(X) s = ledoit_wolf(X)[0] s = sc.std_[:, np.newaxis] * s * sc.std_[np.newaxis, :] # rescale elif shrinkage == 'empirical': s = empirical_covariance(X) else: raise ValueError('unknown shrinkage parameter') elif isinstance(shrinkage, float) or isinstance(shrinkage, int): if shrinkage < 0 or shrinkage > 1: raise ValueError('shrinkage parameter must be between 0 and 1') s = shrunk_covariance(empirical_covariance(X), shrinkage) else: raise TypeError('shrinkage must be of string or int type') return s def _class_means(X, y): """Compute class means. Parameters ---------- X : array-like, shape (n_samples, n_features) Input data. y : array-like, shape (n_samples,) or (n_samples, n_targets) Target values. Returns ------- means : array-like, shape (n_features,) Class means. """ means = [] classes = np.unique(y) for group in classes: Xg = X[y == group, :] means.append(Xg.mean(0)) return np.asarray(means) def _class_cov(X, y, priors=None, shrinkage=None): """Compute class covariance matrix. Parameters ---------- X : array-like, shape (n_samples, n_features) Input data. y : array-like, shape (n_samples,) or (n_samples, n_targets) Target values. priors : array-like, shape (n_classes,) Class priors. shrinkage : string or float, optional Shrinkage parameter, possible values: - None: no shrinkage (default). - 'auto': automatic shrinkage using the Ledoit-Wolf lemma. - float between 0 and 1: fixed shrinkage parameter. Returns ------- cov : array-like, shape (n_features, n_features) Class covariance matrix. """ classes = np.unique(y) covs = [] for group in classes: Xg = X[y == group, :] covs.append(np.atleast_2d(_cov(Xg, shrinkage))) return np.average(covs, axis=0, weights=priors) class LDA(BaseEstimator, LinearClassifierMixin, TransformerMixin): """Linear Discriminant Analysis (LDA). A classifier with a linear decision boundary, generated by fitting class conditional densities to the data and using Bayes' rule. The model fits a Gaussian density to each class, assuming that all classes share the same covariance matrix. The fitted model can also be used to reduce the dimensionality of the input by projecting it to the most discriminative directions. Read more in the :ref:`User Guide <lda_qda>`. Parameters ---------- solver : string, optional Solver to use, possible values: - 'svd': Singular value decomposition (default). Does not compute the covariance matrix, therefore this solver is recommended for data with a large number of features. - 'lsqr': Least squares solution, can be combined with shrinkage. - 'eigen': Eigenvalue decomposition, can be combined with shrinkage. shrinkage : string or float, optional Shrinkage parameter, possible values: - None: no shrinkage (default). - 'auto': automatic shrinkage using the Ledoit-Wolf lemma. - float between 0 and 1: fixed shrinkage parameter. Note that shrinkage works only with 'lsqr' and 'eigen' solvers. priors : array, optional, shape (n_classes,) Class priors. n_components : int, optional Number of components (< n_classes - 1) for dimensionality reduction. store_covariance : bool, optional Additionally compute class covariance matrix (default False). tol : float, optional Threshold used for rank estimation in SVD solver. Attributes ---------- coef_ : array, shape (n_features,) or (n_classes, n_features) Weight vector(s). intercept_ : array, shape (n_features,) Intercept term. covariance_ : array-like, shape (n_features, n_features) Covariance matrix (shared by all classes). means_ : array-like, shape (n_classes, n_features) Class means. priors_ : array-like, shape (n_classes,) Class priors (sum to 1). scalings_ : array-like, shape (rank, n_classes - 1) Scaling of the features in the space spanned by the class centroids. xbar_ : array-like, shape (n_features,) Overall mean. classes_ : array-like, shape (n_classes,) Unique class labels. See also -------- sklearn.qda.QDA: Quadratic discriminant analysis Notes ----- The default solver is 'svd'. It can perform both classification and transform, and it does not rely on the calculation of the covariance matrix. This can be an advantage in situations where the number of features is large. However, the 'svd' solver cannot be used with shrinkage. The 'lsqr' solver is an efficient algorithm that only works for classification. It supports shrinkage. The 'eigen' solver is based on the optimization of the between class scatter to within class scatter ratio. It can be used for both classification and transform, and it supports shrinkage. However, the 'eigen' solver needs to compute the covariance matrix, so it might not be suitable for situations with a high number of features. Examples -------- >>> import numpy as np >>> from sklearn.lda import LDA >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]]) >>> y = np.array([1, 1, 1, 2, 2, 2]) >>> clf = LDA() >>> clf.fit(X, y) LDA(n_components=None, priors=None, shrinkage=None, solver='svd', store_covariance=False, tol=0.0001) >>> print(clf.predict([[-0.8, -1]])) [1] """ def __init__(self, solver='svd', shrinkage=None, priors=None, n_components=None, store_covariance=False, tol=1e-4): self.solver = solver self.shrinkage = shrinkage self.priors = priors self.n_components = n_components self.store_covariance = store_covariance # used only in svd solver self.tol = tol # used only in svd solver def _solve_lsqr(self, X, y, shrinkage): """Least squares solver. The least squares solver computes a straightforward solution of the optimal decision rule based directly on the discriminant functions. It can only be used for classification (with optional shrinkage), because estimation of eigenvectors is not performed. Therefore, dimensionality reduction with the transform is not supported. Parameters ---------- X : array-like, shape (n_samples, n_features) Training data. y : array-like, shape (n_samples,) or (n_samples, n_classes) Target values. shrinkage : string or float, optional Shrinkage parameter, possible values: - None: no shrinkage (default). - 'auto': automatic shrinkage using the Ledoit-Wolf lemma. - float between 0 and 1: fixed shrinkage parameter. Notes ----- This solver is based on [1]_, section 2.6.2, pp. 39-41. References ---------- .. [1] R. O. Duda, P. E. Hart, D. G. Stork. Pattern Classification (Second Edition). John Wiley & Sons, Inc., New York, 2001. ISBN 0-471-05669-3. """ self.means_ = _class_means(X, y) self.covariance_ = _class_cov(X, y, self.priors_, shrinkage) self.coef_ = linalg.lstsq(self.covariance_, self.means_.T)[0].T self.intercept_ = (-0.5 * np.diag(np.dot(self.means_, self.coef_.T)) + np.log(self.priors_)) def _solve_eigen(self, X, y, shrinkage): """Eigenvalue solver. The eigenvalue solver computes the optimal solution of the Rayleigh coefficient (basically the ratio of between class scatter to within class scatter). This solver supports both classification and dimensionality reduction (with optional shrinkage). Parameters ---------- X : array-like, shape (n_samples, n_features) Training data. y : array-like, shape (n_samples,) or (n_samples, n_targets) Target values. shrinkage : string or float, optional Shrinkage parameter, possible values: - None: no shrinkage (default). - 'auto': automatic shrinkage using the Ledoit-Wolf lemma. - float between 0 and 1: fixed shrinkage constant. Notes ----- This solver is based on [1]_, section 3.8.3, pp. 121-124. References ---------- .. [1] R. O. Duda, P. E. Hart, D. G. Stork. Pattern Classification (Second Edition). John Wiley & Sons, Inc., New York, 2001. ISBN 0-471-05669-3. """ self.means_ = _class_means(X, y) self.covariance_ = _class_cov(X, y, self.priors_, shrinkage) Sw = self.covariance_ # within scatter St = _cov(X, shrinkage) # total scatter Sb = St - Sw # between scatter evals, evecs = linalg.eigh(Sb, Sw) evecs = evecs[:, np.argsort(evals)[::-1]] # sort eigenvectors # evecs /= np.linalg.norm(evecs, axis=0) # doesn't work with numpy 1.6 evecs /= np.apply_along_axis(np.linalg.norm, 0, evecs) self.scalings_ = evecs self.coef_ = np.dot(self.means_, evecs).dot(evecs.T) self.intercept_ = (-0.5 * np.diag(np.dot(self.means_, self.coef_.T)) + np.log(self.priors_)) def _solve_svd(self, X, y, store_covariance=False, tol=1.0e-4): """SVD solver. Parameters ---------- X : array-like, shape (n_samples, n_features) Training data. y : array-like, shape (n_samples,) or (n_samples, n_targets) Target values. store_covariance : bool, optional Additionally compute class covariance matrix (default False). tol : float, optional Threshold used for rank estimation. """ n_samples, n_features = X.shape n_classes = len(self.classes_) self.means_ = _class_means(X, y) if store_covariance: self.covariance_ = _class_cov(X, y, self.priors_) Xc = [] for idx, group in enumerate(self.classes_): Xg = X[y == group, :] Xc.append(Xg - self.means_[idx]) self.xbar_ = np.dot(self.priors_, self.means_) Xc = np.concatenate(Xc, axis=0) # 1) within (univariate) scaling by with classes std-dev std = Xc.std(axis=0) # avoid division by zero in normalization std[std == 0] = 1. fac = 1. / (n_samples - n_classes) # 2) Within variance scaling X = np.sqrt(fac) * (Xc / std) # SVD of centered (within)scaled data U, S, V = linalg.svd(X, full_matrices=False) rank = np.sum(S > tol) if rank < n_features: warnings.warn("Variables are collinear.") # Scaling of within covariance is: V' 1/S scalings = (V[:rank] / std).T / S[:rank] # 3) Between variance scaling # Scale weighted centers X = np.dot(((np.sqrt((n_samples * self.priors_) * fac)) * (self.means_ - self.xbar_).T).T, scalings) # Centers are living in a space with n_classes-1 dim (maximum) # Use SVD to find projection in the space spanned by the # (n_classes) centers _, S, V = linalg.svd(X, full_matrices=0) rank = np.sum(S > tol * S[0]) self.scalings_ = np.dot(scalings, V.T[:, :rank]) coef = np.dot(self.means_ - self.xbar_, self.scalings_) self.intercept_ = (-0.5 * np.sum(coef ** 2, axis=1) + np.log(self.priors_)) self.coef_ = np.dot(coef, self.scalings_.T) self.intercept_ -= np.dot(self.xbar_, self.coef_.T) def fit(self, X, y, store_covariance=False, tol=1.0e-4): """Fit LDA model according to the given training data and parameters. Parameters ---------- X : array-like, shape (n_samples, n_features) Training data. y : array, shape (n_samples,) Target values. """ if store_covariance: warnings.warn("'store_covariance' was moved to the __init__()" "method in version 0.16 and will be removed from" "fit() in version 0.18.", DeprecationWarning) else: store_covariance = self.store_covariance if tol != 1.0e-4: warnings.warn("'tol' was moved to __init__() method in version" " 0.16 and will be removed from fit() in 0.18", DeprecationWarning) self.tol = tol X, y = check_X_y(X, y) self.classes_ = unique_labels(y) if self.priors is None: # estimate priors from sample _, y_t = np.unique(y, return_inverse=True) # non-negative ints self.priors_ = bincount(y_t) / float(len(y)) else: self.priors_ = self.priors if self.solver == 'svd': if self.shrinkage is not None: raise NotImplementedError('shrinkage not supported') self._solve_svd(X, y, store_covariance=store_covariance, tol=tol) elif self.solver == 'lsqr': self._solve_lsqr(X, y, shrinkage=self.shrinkage) elif self.solver == 'eigen': self._solve_eigen(X, y, shrinkage=self.shrinkage) else: raise ValueError("unknown solver {} (valid solvers are 'svd', " "'lsqr', and 'eigen').".format(self.solver)) if self.classes_.size == 2: # treat binary case as a special case self.coef_ = np.array(self.coef_[1, :] - self.coef_[0, :], ndmin=2) self.intercept_ = np.array(self.intercept_[1] - self.intercept_[0], ndmin=1) return self def transform(self, X): """Project data to maximize class separation. Parameters ---------- X : array-like, shape (n_samples, n_features) Input data. Returns ------- X_new : array, shape (n_samples, n_components) Transformed data. """ if self.solver == 'lsqr': raise NotImplementedError("transform not implemented for 'lsqr' " "solver (use 'svd' or 'eigen').") check_is_fitted(self, ['xbar_', 'scalings_'], all_or_any=any) X = check_array(X) if self.solver == 'svd': X_new = np.dot(X - self.xbar_, self.scalings_) elif self.solver == 'eigen': X_new = np.dot(X, self.scalings_) n_components = X.shape[1] if self.n_components is None \ else self.n_components return X_new[:, :n_components] def predict_proba(self, X): """Estimate probability. Parameters ---------- X : array-like, shape (n_samples, n_features) Input data. Returns ------- C : array, shape (n_samples, n_classes) Estimated probabilities. """ prob = self.decision_function(X) prob *= -1 np.exp(prob, prob) prob += 1 np.reciprocal(prob, prob) if len(self.classes_) == 2: # binary case return np.column_stack([1 - prob, prob]) else: # OvR normalization, like LibLinear's predict_probability prob /= prob.sum(axis=1).reshape((prob.shape[0], -1)) return prob def predict_log_proba(self, X): """Estimate log probability. Parameters ---------- X : array-like, shape (n_samples, n_features) Input data. Returns ------- C : array, shape (n_samples, n_classes) Estimated log probabilities. """ return np.log(self.predict_proba(X))
bsd-3-clause
HerlanAssis/Django-AulaOsvandoSantana
lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/sjisprober.py
1182
3734
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### import sys from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import SJISDistributionAnalysis from .jpcntx import SJISContextAnalysis from .mbcssm import SJISSMModel from . import constants class SJISProber(MultiByteCharSetProber): def __init__(self): MultiByteCharSetProber.__init__(self) self._mCodingSM = CodingStateMachine(SJISSMModel) self._mDistributionAnalyzer = SJISDistributionAnalysis() self._mContextAnalyzer = SJISContextAnalysis() self.reset() def reset(self): MultiByteCharSetProber.reset(self) self._mContextAnalyzer.reset() def get_charset_name(self): return "SHIFT_JIS" def feed(self, aBuf): aLen = len(aBuf) for i in range(0, aLen): codingState = self._mCodingSM.next_state(aBuf[i]) if codingState == constants.eError: if constants._debug: sys.stderr.write(self.get_charset_name() + ' prober hit error at byte ' + str(i) + '\n') self._mState = constants.eNotMe break elif codingState == constants.eItsMe: self._mState = constants.eFoundIt break elif codingState == constants.eStart: charLen = self._mCodingSM.get_current_charlen() if i == 0: self._mLastChar[1] = aBuf[0] self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:], charLen) self._mDistributionAnalyzer.feed(self._mLastChar, charLen) else: self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3 - charLen], charLen) self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], charLen) self._mLastChar[0] = aBuf[aLen - 1] if self.get_state() == constants.eDetecting: if (self._mContextAnalyzer.got_enough_data() and (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): self._mState = constants.eFoundIt return self.get_state() def get_confidence(self): contxtCf = self._mContextAnalyzer.get_confidence() distribCf = self._mDistributionAnalyzer.get_confidence() return max(contxtCf, distribCf)
mit
xsmart/bluecherry-client
breakpad/src/testing/test/gmock_test_utils.py
11
4157
#!/usr/bin/python2.4 # # Copyright 2006, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Unit test utilities for Google C++ Mocking Framework.""" __author__ = 'wan@google.com (Zhanyong Wan)' import os import sys import unittest # Initially maps a flag to its default value. After # _ParseAndStripGMockFlags() is called, maps a flag to its actual # value. _flag_map = {'gmock_source_dir': os.path.dirname(sys.argv[0]), 'gmock_build_dir': os.path.dirname(sys.argv[0])} _gmock_flags_are_parsed = False def _ParseAndStripGMockFlags(argv): """Parses and strips Google Test flags from argv. This is idempotent.""" global _gmock_flags_are_parsed if _gmock_flags_are_parsed: return _gmock_flags_are_parsed = True for flag in _flag_map: # The environment variable overrides the default value. if flag.upper() in os.environ: _flag_map[flag] = os.environ[flag.upper()] # The command line flag overrides the environment variable. i = 1 # Skips the program name. while i < len(argv): prefix = '--' + flag + '=' if argv[i].startswith(prefix): _flag_map[flag] = argv[i][len(prefix):] del argv[i] break else: # We don't increment i in case we just found a --gmock_* flag # and removed it from argv. i += 1 def GetFlag(flag): """Returns the value of the given flag.""" # In case GetFlag() is called before Main(), we always call # _ParseAndStripGMockFlags() here to make sure the --gmock_* flags # are parsed. _ParseAndStripGMockFlags(sys.argv) return _flag_map[flag] def GetSourceDir(): """Returns the absolute path of the directory where the .py files are.""" return os.path.abspath(GetFlag('gmock_source_dir')) def GetBuildDir(): """Returns the absolute path of the directory where the test binaries are.""" return os.path.abspath(GetFlag('gmock_build_dir')) def GetExitStatus(exit_code): """Returns the argument to exit(), or -1 if exit() wasn't called. Args: exit_code: the result value of os.system(command). """ if os.name == 'nt': # On Windows, os.WEXITSTATUS() doesn't work and os.system() returns # the argument to exit() directly. return exit_code else: # On Unix, os.WEXITSTATUS() must be used to extract the exit status # from the result of os.system(). if os.WIFEXITED(exit_code): return os.WEXITSTATUS(exit_code) else: return -1 def Main(): """Runs the unit test.""" # We must call _ParseAndStripGMockFlags() before calling # unittest.main(). Otherwise the latter will be confused by the # --gmock_* flags. _ParseAndStripGMockFlags(sys.argv) unittest.main()
gpl-2.0
glove747/liberty-neutron
neutron/tests/unit/plugins/ml2/drivers/mech_fake_agent.py
47
2697
# Copyright (C) 2014,2015 VA Linux Systems Japan K.K. # Copyright (C) 2014 Fumihiko Kakuma <kakuma at valinux co jp> # Copyright (C) 2014,2015 YAMAMOTO Takashi <yamamoto at valinux co jp> # All Rights Reserved. # # Based on openvswitch mechanism driver. # # Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from neutron.agent import securitygroups_rpc from neutron.common import constants from neutron.extensions import portbindings from neutron.plugins.common import constants as p_constants from neutron.plugins.ml2.drivers import mech_agent LOG = logging.getLogger(__name__) class FakeAgentMechanismDriver(mech_agent.SimpleAgentMechanismDriverBase): """ML2 mechanism driver for testing. This is a ML2 mechanism driver used by UTs in test_l2population. This driver implements minimum requirements for L2pop mech driver. As there are some agent-based mechanism drivers and OVS agent mech driver is not the only one to support L2pop, it is useful to test L2pop with multiple drivers like this to check the minimum requirements. NOTE(yamamoto): This is a modified copy of ofagent mechanism driver as of writing this. There's no need to keep this synced with the "real" ofagent mechansim driver or its agent. """ def __init__(self): sg_enabled = securitygroups_rpc.is_firewall_enabled() vif_details = {portbindings.CAP_PORT_FILTER: sg_enabled, portbindings.OVS_HYBRID_PLUG: sg_enabled} super(FakeAgentMechanismDriver, self).__init__( # NOTE(yamamoto): l2pop driver has a hardcoded list of # supported agent types. constants.AGENT_TYPE_OFA, portbindings.VIF_TYPE_OVS, vif_details) def get_allowed_network_types(self, agent): return (agent['configurations'].get('tunnel_types', []) + [p_constants.TYPE_LOCAL, p_constants.TYPE_FLAT, p_constants.TYPE_VLAN]) def get_mappings(self, agent): return dict(agent['configurations'].get('interface_mappings', {}))
apache-2.0
1adrianb/face-alignment
face_alignment/detection/blazeface/net_blazeface.py
1
16693
import numpy as np import torch import torch.nn as nn import torch.nn.functional as F class BlazeBlock(nn.Module): def __init__(self, in_channels, out_channels, kernel_size=3, stride=1): super(BlazeBlock, self).__init__() self.stride = stride self.channel_pad = out_channels - in_channels # TFLite uses slightly different padding than PyTorch # on the depthwise conv layer when the stride is 2. if stride == 2: self.max_pool = nn.MaxPool2d(kernel_size=stride, stride=stride) padding = 0 else: padding = (kernel_size - 1) // 2 self.convs = nn.Sequential( nn.Conv2d(in_channels=in_channels, out_channels=in_channels, kernel_size=kernel_size, stride=stride, padding=padding, groups=in_channels, bias=True), nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=1, padding=0, bias=True), ) self.act = nn.ReLU(inplace=True) def forward(self, x): if self.stride == 2: h = F.pad(x, (0, 2, 0, 2), "constant", 0) x = self.max_pool(x) else: h = x if self.channel_pad > 0: x = F.pad(x, (0, 0, 0, 0, 0, self.channel_pad), "constant", 0) return self.act(self.convs(h) + x) class BlazeFace(nn.Module): """The BlazeFace face detection model from MediaPipe. The version from MediaPipe is simpler than the one in the paper; it does not use the "double" BlazeBlocks. Because we won't be training this model, it doesn't need to have batchnorm layers. These have already been "folded" into the conv weights by TFLite. The conversion to PyTorch is fairly straightforward, but there are some small differences between TFLite and PyTorch in how they handle padding on conv layers with stride 2. This version works on batches, while the MediaPipe version can only handle a single image at a time. Based on code from https://github.com/tkat0/PyTorch_BlazeFace/ and https://github.com/google/mediapipe/ """ def __init__(self): super(BlazeFace, self).__init__() # These are the settings from the MediaPipe example graph # mediapipe/graphs/face_detection/face_detection_mobile_gpu.pbtxt self.num_classes = 1 self.num_anchors = 896 self.num_coords = 16 self.score_clipping_thresh = 100.0 self.x_scale = 128.0 self.y_scale = 128.0 self.h_scale = 128.0 self.w_scale = 128.0 self.min_score_thresh = 0.75 self.min_suppression_threshold = 0.3 self._define_layers() def _define_layers(self): self.backbone1 = nn.Sequential( nn.Conv2d(in_channels=3, out_channels=24, kernel_size=5, stride=2, padding=0, bias=True), nn.ReLU(inplace=True), BlazeBlock(24, 24), BlazeBlock(24, 28), BlazeBlock(28, 32, stride=2), BlazeBlock(32, 36), BlazeBlock(36, 42), BlazeBlock(42, 48, stride=2), BlazeBlock(48, 56), BlazeBlock(56, 64), BlazeBlock(64, 72), BlazeBlock(72, 80), BlazeBlock(80, 88), ) self.backbone2 = nn.Sequential( BlazeBlock(88, 96, stride=2), BlazeBlock(96, 96), BlazeBlock(96, 96), BlazeBlock(96, 96), BlazeBlock(96, 96), ) self.classifier_8 = nn.Conv2d(88, 2, 1, bias=True) self.classifier_16 = nn.Conv2d(96, 6, 1, bias=True) self.regressor_8 = nn.Conv2d(88, 32, 1, bias=True) self.regressor_16 = nn.Conv2d(96, 96, 1, bias=True) def forward(self, x): # TFLite uses slightly different padding on the first conv layer # than PyTorch, so do it manually. x = F.pad(x, (1, 2, 1, 2), "constant", 0) b = x.shape[0] # batch size, needed for reshaping later x = self.backbone1(x) # (b, 88, 16, 16) h = self.backbone2(x) # (b, 96, 8, 8) # Note: Because PyTorch is NCHW but TFLite is NHWC, we need to # permute the output from the conv layers before reshaping it. c1 = self.classifier_8(x) # (b, 2, 16, 16) c1 = c1.permute(0, 2, 3, 1) # (b, 16, 16, 2) c1 = c1.reshape(b, -1, 1) # (b, 512, 1) c2 = self.classifier_16(h) # (b, 6, 8, 8) c2 = c2.permute(0, 2, 3, 1) # (b, 8, 8, 6) c2 = c2.reshape(b, -1, 1) # (b, 384, 1) c = torch.cat((c1, c2), dim=1) # (b, 896, 1) r1 = self.regressor_8(x) # (b, 32, 16, 16) r1 = r1.permute(0, 2, 3, 1) # (b, 16, 16, 32) r1 = r1.reshape(b, -1, 16) # (b, 512, 16) r2 = self.regressor_16(h) # (b, 96, 8, 8) r2 = r2.permute(0, 2, 3, 1) # (b, 8, 8, 96) r2 = r2.reshape(b, -1, 16) # (b, 384, 16) r = torch.cat((r1, r2), dim=1) # (b, 896, 16) return [r, c] def _device(self): """Which device (CPU or GPU) is being used by this model?""" return self.classifier_8.weight.device def load_weights(self, path): self.load_state_dict(torch.load(path)) self.eval() def load_anchors(self, path, device=None): device = device or self._device() self.anchors = torch.tensor( np.load(path), dtype=torch.float32, device=device) assert(self.anchors.ndimension() == 2) assert(self.anchors.shape[0] == self.num_anchors) assert(self.anchors.shape[1] == 4) def load_anchors_from_npy(self, arr, device=None): device = device or self._device() self.anchors = torch.tensor( arr, dtype=torch.float32, device=device) assert(self.anchors.ndimension() == 2) assert(self.anchors.shape[0] == self.num_anchors) assert(self.anchors.shape[1] == 4) def _preprocess(self, x): """Converts the image pixels to the range [-1, 1].""" return x.float() / 127.5 - 1.0 def predict_on_image(self, img): """Makes a prediction on a single image. Arguments: img: a NumPy array of shape (H, W, 3) or a PyTorch tensor of shape (3, H, W). The image's height and width should be 128 pixels. Returns: A tensor with face detections. """ if isinstance(img, np.ndarray): img = torch.from_numpy(img).permute((2, 0, 1)) return self.predict_on_batch(img.unsqueeze(0))[0] def predict_on_batch(self, x): """Makes a prediction on a batch of images. Arguments: x: a NumPy array of shape (b, H, W, 3) or a PyTorch tensor of shape (b, 3, H, W). The height and width should be 128 pixels. Returns: A list containing a tensor of face detections for each image in the batch. If no faces are found for an image, returns a tensor of shape (0, 17). Each face detection is a PyTorch tensor consisting of 17 numbers: - ymin, xmin, ymax, xmax - x,y-coordinates for the 6 keypoints - confidence score """ if isinstance(x, np.ndarray): x = torch.from_numpy(x).permute((0, 3, 1, 2)) assert x.shape[1] == 3 assert x.shape[2] == 128 assert x.shape[3] == 128 # 1. Preprocess the images into tensors: x = x.to(self._device()) x = self._preprocess(x) # 2. Run the neural network: with torch.no_grad(): out = self.__call__(x) # 3. Postprocess the raw predictions: detections = self._tensors_to_detections(out[0], out[1], self.anchors) # 4. Non-maximum suppression to remove overlapping detections: filtered_detections = [] for i in range(len(detections)): faces = self._weighted_non_max_suppression(detections[i]) faces = torch.stack(faces) if len( faces) > 0 else torch.zeros((0, 17)) filtered_detections.append(faces) return filtered_detections def _tensors_to_detections(self, raw_box_tensor, raw_score_tensor, anchors): """The output of the neural network is a tensor of shape (b, 896, 16) containing the bounding box regressor predictions, as well as a tensor of shape (b, 896, 1) with the classification confidences. This function converts these two "raw" tensors into proper detections. Returns a list of (num_detections, 17) tensors, one for each image in the batch. This is based on the source code from: mediapipe/calculators/tflite/tflite_tensors_to_detections_calculator.cc mediapipe/calculators/tflite/tflite_tensors_to_detections_calculator.proto """ assert raw_box_tensor.ndimension() == 3 assert raw_box_tensor.shape[1] == self.num_anchors assert raw_box_tensor.shape[2] == self.num_coords assert raw_score_tensor.ndimension() == 3 assert raw_score_tensor.shape[1] == self.num_anchors assert raw_score_tensor.shape[2] == self.num_classes assert raw_box_tensor.shape[0] == raw_score_tensor.shape[0] detection_boxes = self._decode_boxes(raw_box_tensor, anchors) thresh = self.score_clipping_thresh raw_score_tensor = raw_score_tensor.clamp(-thresh, thresh) detection_scores = raw_score_tensor.sigmoid().squeeze(dim=-1) # Note: we stripped off the last dimension from the scores tensor # because there is only has one class. Now we can simply use a mask # to filter out the boxes with too low confidence. mask = detection_scores >= self.min_score_thresh # Because each image from the batch can have a different number of # detections, process them one at a time using a loop. output_detections = [] for i in range(raw_box_tensor.shape[0]): boxes = detection_boxes[i, mask[i]] scores = detection_scores[i, mask[i]].unsqueeze(dim=-1) output_detections.append(torch.cat((boxes, scores), dim=-1).to('cpu')) return output_detections def _decode_boxes(self, raw_boxes, anchors): """Converts the predictions into actual coordinates using the anchor boxes. Processes the entire batch at once. """ boxes = torch.zeros_like(raw_boxes) x_center = raw_boxes[..., 0] / self.x_scale * \ anchors[:, 2] + anchors[:, 0] y_center = raw_boxes[..., 1] / self.y_scale * \ anchors[:, 3] + anchors[:, 1] w = raw_boxes[..., 2] / self.w_scale * anchors[:, 2] h = raw_boxes[..., 3] / self.h_scale * anchors[:, 3] boxes[..., 0] = y_center - h / 2. # ymin boxes[..., 1] = x_center - w / 2. # xmin boxes[..., 2] = y_center + h / 2. # ymax boxes[..., 3] = x_center + w / 2. # xmax for k in range(6): offset = 4 + k * 2 keypoint_x = raw_boxes[..., offset] / \ self.x_scale * anchors[:, 2] + anchors[:, 0] keypoint_y = raw_boxes[..., offset + 1] / \ self.y_scale * anchors[:, 3] + anchors[:, 1] boxes[..., offset] = keypoint_x boxes[..., offset + 1] = keypoint_y return boxes def _weighted_non_max_suppression(self, detections): """The alternative NMS method as mentioned in the BlazeFace paper: "We replace the suppression algorithm with a blending strategy that estimates the regression parameters of a bounding box as a weighted mean between the overlapping predictions." The original MediaPipe code assigns the score of the most confident detection to the weighted detection, but we take the average score of the overlapping detections. The input detections should be a Tensor of shape (count, 17). Returns a list of PyTorch tensors, one for each detected face. This is based on the source code from: mediapipe/calculators/util/non_max_suppression_calculator.cc mediapipe/calculators/util/non_max_suppression_calculator.proto """ if len(detections) == 0: return [] output_detections = [] # Sort the detections from highest to lowest score. remaining = torch.argsort(detections[:, 16], descending=True) while len(remaining) > 0: detection = detections[remaining[0]] # Compute the overlap between the first box and the other # remaining boxes. (Note that the other_boxes also include # the first_box.) first_box = detection[:4] other_boxes = detections[remaining, :4] ious = overlap_similarity(first_box, other_boxes) # If two detections don't overlap enough, they are considered # to be from different faces. mask = ious > self.min_suppression_threshold overlapping = remaining[mask] remaining = remaining[~mask] # Take an average of the coordinates from the overlapping # detections, weighted by their confidence scores. weighted_detection = detection.clone() if len(overlapping) > 1: coordinates = detections[overlapping, :16] scores = detections[overlapping, 16:17] total_score = scores.sum() weighted = (coordinates * scores).sum(dim=0) / total_score weighted_detection[:16] = weighted weighted_detection[16] = total_score / len(overlapping) output_detections.append(weighted_detection) return output_detections # IOU code from https://github.com/amdegroot/ssd.pytorch/blob/master/layers/box_utils.py def intersect(box_a, box_b): """ We resize both tensors to [A,B,2] without new malloc: [A,2] -> [A,1,2] -> [A,B,2] [B,2] -> [1,B,2] -> [A,B,2] Then we compute the area of intersect between box_a and box_b. Args: box_a: (tensor) bounding boxes, Shape: [A,4]. box_b: (tensor) bounding boxes, Shape: [B,4]. Return: (tensor) intersection area, Shape: [A,B]. """ A = box_a.size(0) B = box_b.size(0) max_xy = torch.min(box_a[:, 2:].unsqueeze(1).expand(A, B, 2), box_b[:, 2:].unsqueeze(0).expand(A, B, 2)) min_xy = torch.max(box_a[:, :2].unsqueeze(1).expand(A, B, 2), box_b[:, :2].unsqueeze(0).expand(A, B, 2)) inter = torch.clamp((max_xy - min_xy), min=0) return inter[:, :, 0] * inter[:, :, 1] def jaccard(box_a, box_b): """Compute the jaccard overlap of two sets of boxes. The jaccard overlap is simply the intersection over union of two boxes. Here we operate on ground truth boxes and default boxes. E.g.: A ∩ B / A ∪ B = A ∩ B / (area(A) + area(B) - A ∩ B) Args: box_a: (tensor) Ground truth bounding boxes, Shape: [num_objects,4] box_b: (tensor) Prior boxes from priorbox layers, Shape: [num_priors,4] Return: jaccard overlap: (tensor) Shape: [box_a.size(0), box_b.size(0)] """ inter = intersect(box_a, box_b) area_a = ((box_a[:, 2] - box_a[:, 0]) * (box_a[:, 3] - box_a[:, 1])).unsqueeze(1).expand_as(inter) # [A,B] area_b = ((box_b[:, 2] - box_b[:, 0]) * (box_b[:, 3] - box_b[:, 1])).unsqueeze(0).expand_as(inter) # [A,B] union = area_a + area_b - inter return inter / union # [A,B] def overlap_similarity(box, other_boxes): """Computes the IOU between a bounding box and set of other boxes.""" return jaccard(box.unsqueeze(0), other_boxes).squeeze(0) def init_model(): net = BlazeFace().to('cuda') net.load_weights("BlazeFace/params/blazeface.pth") net.load_anchors("BlazeFace/params/anchors.npy") # Optionally change the thresholds: net.min_score_thresh = 0.75 net.min_suppression_threshold = 0.3 return net
bsd-3-clause
akshatharaj/django
tests/proxy_model_inheritance/tests.py
278
1764
from __future__ import absolute_import, unicode_literals import os from django.core.management import call_command from django.test import TestCase, TransactionTestCase from django.test.utils import extend_sys_path from django.utils._os import upath from .models import ( ConcreteModel, ConcreteModelSubclass, ConcreteModelSubclassProxy, ) class ProxyModelInheritanceTests(TransactionTestCase): """ Proxy model inheritance across apps can result in migrate not creating the table for the proxied model (as described in #12286). This test creates two dummy apps and calls migrate, then verifies that the table has been created. """ available_apps = [] def test_table_exists(self): with extend_sys_path(os.path.dirname(os.path.abspath(upath(__file__)))): with self.modify_settings(INSTALLED_APPS={'append': ['app1', 'app2']}): call_command('migrate', verbosity=0, run_syncdb=True) from app1.models import ProxyModel from app2.models import NiceModel self.assertEqual(NiceModel.objects.all().count(), 0) self.assertEqual(ProxyModel.objects.all().count(), 0) class MultiTableInheritanceProxyTest(TestCase): def test_model_subclass_proxy(self): """ Deleting an instance of a model proxying a multi-table inherited subclass should cascade delete down the whole inheritance chain (see #18083). """ instance = ConcreteModelSubclassProxy.objects.create() instance.delete() self.assertEqual(0, ConcreteModelSubclassProxy.objects.count()) self.assertEqual(0, ConcreteModelSubclass.objects.count()) self.assertEqual(0, ConcreteModel.objects.count())
bsd-3-clause
pongem/python-bot-project
appengine/standard/localtesting/mail_test.py
9
1506
# Copyright 2015 Google Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # [START mail_example] import unittest from google.appengine.api import mail from google.appengine.ext import testbed class MailTestCase(unittest.TestCase): def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_mail_stub() self.mail_stub = self.testbed.get_stub(testbed.MAIL_SERVICE_NAME) def tearDown(self): self.testbed.deactivate() def testMailSent(self): mail.send_mail(to='alice@example.com', subject='This is a test', sender='bob@example.com', body='This is a test e-mail') messages = self.mail_stub.get_sent_messages(to='alice@example.com') self.assertEqual(1, len(messages)) self.assertEqual('alice@example.com', messages[0].to) # [END mail_example] if __name__ == '__main__': unittest.main()
apache-2.0
madjam/mxnet
example/python-howto/monitor_weights.py
12
1987
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # pylint: skip-file import sys import os curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) sys.path.append(os.path.join(curr_path, "../../tests/python/common")) from get_data import MNISTIterator import mxnet as mx import numpy as np import logging data = mx.symbol.Variable('data') fc1 = mx.symbol.FullyConnected(data = data, name='fc1', num_hidden=128) act1 = mx.symbol.Activation(data = fc1, name='relu1', act_type="relu") fc2 = mx.symbol.FullyConnected(data = act1, name = 'fc2', num_hidden = 64) act2 = mx.symbol.Activation(data = fc2, name='relu2', act_type="relu") fc3 = mx.symbol.FullyConnected(data = act2, name='fc3', num_hidden=10) mlp = mx.symbol.SoftmaxOutput(data = fc3, name = 'softmax') # data train, val = MNISTIterator(batch_size=100, input_shape = (784,)) # train logging.basicConfig(level=logging.DEBUG) model = mx.model.FeedForward( ctx = mx.cpu(), symbol = mlp, num_epoch = 20, learning_rate = 0.1, momentum = 0.9, wd = 0.00001) def norm_stat(d): return mx.nd.norm(d)/np.sqrt(d.size) mon = mx.mon.Monitor(100, norm_stat) model.fit(X=train, eval_data=val, monitor=mon, batch_end_callback = mx.callback.Speedometer(100, 100))
apache-2.0